Fix for the AI menu to respect the --cpu command line flag

This commit is contained in:
ebolam 2022-08-11 10:40:32 -04:00
parent 9016e29c66
commit 64664dc61e
1 changed files with 3 additions and 1 deletions

View File

@ -1209,6 +1209,8 @@ def get_model_info(model, directory=""):
url = True url = True
elif not utils.HAS_ACCELERATE and not torch.cuda.is_available(): elif not utils.HAS_ACCELERATE and not torch.cuda.is_available():
pass pass
elif args.cpu:
pass
else: else:
layer_count = get_layer_count(model, directory=directory) layer_count = get_layer_count(model, directory=directory)
if layer_count is None: if layer_count is None:
@ -3460,7 +3462,7 @@ def get_message(msg):
else: else:
filename = "settings/{}.breakmodel".format(vars.model.replace('/', '_')) filename = "settings/{}.breakmodel".format(vars.model.replace('/', '_'))
f = open(filename, "w") f = open(filename, "w")
f.write(msg['gpu_layers'] + '\n' + msg['disk_layers']) f.write(str(msg['gpu_layers']) + '\n' + str(msg['disk_layers']))
f.close() f.close()
vars.colaburl = msg['url'] + "/request" vars.colaburl = msg['url'] + "/request"
load_model(use_gpu=msg['use_gpu'], gpu_layers=msg['gpu_layers'], disk_layers=msg['disk_layers'], online_model=msg['online_model']) load_model(use_gpu=msg['use_gpu'], gpu_layers=msg['gpu_layers'], disk_layers=msg['disk_layers'], online_model=msg['online_model'])