Merge pull request #140 from ebolam/Web-UI

Fix for user selection of model folder before the web ui is loaded
This commit is contained in:
henk717 2022-06-08 08:31:45 +02:00 committed by GitHub
commit 6a324b0e75
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
1 changed files with 13 additions and 8 deletions

View File

@ -327,7 +327,10 @@ class Send_to_socketio(object):
def write(self, bar): def write(self, bar):
print(bar, end="") print(bar, end="")
time.sleep(0.01) time.sleep(0.01)
try:
emit('from_server', {'cmd': 'model_load_status', 'data': bar.replace(" ", " ")}, broadcast=True) emit('from_server', {'cmd': 'model_load_status', 'data': bar.replace(" ", " ")}, broadcast=True)
except:
pass
# Set logging level to reduce chatter from Flask # Set logging level to reduce chatter from Flask
import logging import logging
@ -932,6 +935,15 @@ def general_startup():
vars.aria2_port = args.aria2_port or 6799 vars.aria2_port = args.aria2_port or 6799
#Now let's look to see if we are going to force a load of a model from a user selected folder
if(vars.model == "selectfolder"):
print("{0}Please choose the folder where pytorch_model.bin is located:{1}\n".format(colors.CYAN, colors.END))
modpath = fileops.getdirpath(getcwd() + "/models", "Select Model Folder")
if(modpath):
# Save directory to vars
vars.model = modpath
#==================================================================# #==================================================================#
# Load Model # Load Model
#==================================================================# #==================================================================#
@ -1132,13 +1144,6 @@ def load_model(use_gpu=True, gpu_layers=None, initial_load=False, online_model="
args.configname = vars.model + "/" + online_model args.configname = vars.model + "/" + online_model
vars.oaiurl = vars.oaiengines + "/{0}/completions".format(online_model) vars.oaiurl = vars.oaiengines + "/{0}/completions".format(online_model)
if(vars.model == "selectfolder"):
print("{0}Please choose the folder where pytorch_model.bin is located:{1}\n".format(colors.CYAN, colors.END))
modpath = fileops.getdirpath(getcwd() + "/models", "Select Model Folder")
if(modpath):
# Save directory to vars
vars.model = modpath
# If transformers model was selected & GPU available, ask to use CPU or GPU # If transformers model was selected & GPU available, ask to use CPU or GPU
if(vars.model not in ["InferKit", "Colab", "OAI", "GooseAI" , "ReadOnly", "TPUMeshTransformerGPTJ", "TPUMeshTransformerGPTNeoX"]): if(vars.model not in ["InferKit", "Colab", "OAI", "GooseAI" , "ReadOnly", "TPUMeshTransformerGPTJ", "TPUMeshTransformerGPTNeoX"]):