mirror of
https://github.com/KoboldAI/KoboldAI-Client.git
synced 2025-06-05 21:59:24 +02:00
--model selectfolder
This commit is contained in:
@@ -1132,6 +1132,14 @@ def load_model(use_gpu=True, gpu_layers=None, initial_load=False, online_model="
|
|||||||
args.configname = vars.model + "/" + online_model
|
args.configname = vars.model + "/" + online_model
|
||||||
vars.oaiurl = vars.oaiengines + "/{0}/completions".format(online_model)
|
vars.oaiurl = vars.oaiengines + "/{0}/completions".format(online_model)
|
||||||
|
|
||||||
|
if(vars.model == "selectfolder"):
|
||||||
|
print("{0}Please choose the folder where pytorch_model.bin is located:{1}\n".format(colors.CYAN, colors.END))
|
||||||
|
modpath = fileops.getdirpath(getcwd() + "/models", "Select Model Folder")
|
||||||
|
|
||||||
|
if(modpath):
|
||||||
|
# Save directory to vars
|
||||||
|
vars.model = modpath
|
||||||
|
|
||||||
# If transformers model was selected & GPU available, ask to use CPU or GPU
|
# If transformers model was selected & GPU available, ask to use CPU or GPU
|
||||||
if(vars.model not in ["InferKit", "Colab", "OAI", "GooseAI" , "ReadOnly", "TPUMeshTransformerGPTJ", "TPUMeshTransformerGPTNeoX"]):
|
if(vars.model not in ["InferKit", "Colab", "OAI", "GooseAI" , "ReadOnly", "TPUMeshTransformerGPTJ", "TPUMeshTransformerGPTNeoX"]):
|
||||||
vars.allowsp = True
|
vars.allowsp = True
|
||||||
|
Reference in New Issue
Block a user