mirror of
https://github.com/KoboldAI/KoboldAI-Client.git
synced 2025-06-05 21:59:24 +02:00
Minor update
This commit is contained in:
@@ -6489,7 +6489,7 @@ def UI_2_select_model(data):
|
|||||||
if valid:
|
if valid:
|
||||||
logger.debug("Valid Loaders: {}".format(valid_loaders))
|
logger.debug("Valid Loaders: {}".format(valid_loaders))
|
||||||
emit("selected_model_info", valid_loaders)
|
emit("selected_model_info", valid_loaders)
|
||||||
if not valid:
|
if not valid and 'path' in data:
|
||||||
#Get directories
|
#Get directories
|
||||||
paths, breadcrumbs = get_folder_path_info(data['path'])
|
paths, breadcrumbs = get_folder_path_info(data['path'])
|
||||||
output = []
|
output = []
|
||||||
@@ -6501,6 +6501,8 @@ def UI_2_select_model(data):
|
|||||||
break
|
break
|
||||||
output.append({'label': path[1], 'name': path[0], 'size': "", "menu": "Custom", 'path': path[0], 'isMenu': not valid})
|
output.append({'label': path[1], 'name': path[0], 'size': "", "menu": "Custom", 'path': path[0], 'isMenu': not valid})
|
||||||
emit("open_model_load_menu", {"items": output+[{'label': 'Return to Main Menu', 'name':'mainmenu', 'size': "", "menu": "Custom", 'isMenu': True}], 'breadcrumbs': breadcrumbs})
|
emit("open_model_load_menu", {"items": output+[{'label': 'Return to Main Menu', 'name':'mainmenu', 'size': "", "menu": "Custom", 'isMenu': True}], 'breadcrumbs': breadcrumbs})
|
||||||
|
elif not valid:
|
||||||
|
logger.error("Nothing can load the model: {}".format(valid_loaders))
|
||||||
|
|
||||||
return
|
return
|
||||||
|
|
||||||
@@ -6530,6 +6532,7 @@ def UI_2_select_model(data):
|
|||||||
def UI_2_load_model(data):
|
def UI_2_load_model(data):
|
||||||
logger.info("loading Model")
|
logger.info("loading Model")
|
||||||
logger.info(data)
|
logger.info(data)
|
||||||
|
model_loaders[data['plugin']].set_input_parameters(**data)
|
||||||
#load_model(use_gpu=data['use_gpu'], gpu_layers=data['gpu_layers'], disk_layers=data['disk_layers'], online_model=data['online_model'], url=koboldai_vars.colaburl, use_8_bit=data['use_8_bit'])
|
#load_model(use_gpu=data['use_gpu'], gpu_layers=data['gpu_layers'], disk_layers=data['disk_layers'], online_model=data['online_model'], url=koboldai_vars.colaburl, use_8_bit=data['use_8_bit'])
|
||||||
|
|
||||||
#==================================================================#
|
#==================================================================#
|
||||||
|
@@ -67,7 +67,7 @@ class model_loader(InferenceModel):
|
|||||||
"unit": "text",
|
"unit": "text",
|
||||||
"label": "Model",
|
"label": "Model",
|
||||||
"id": "model",
|
"id": "model",
|
||||||
"default": "",
|
"default": model_name,
|
||||||
"check": {"value": "", 'check': "!="},
|
"check": {"value": "", 'check': "!="},
|
||||||
"tooltip": "Which model to use when running OpenAI/GooseAI.",
|
"tooltip": "Which model to use when running OpenAI/GooseAI.",
|
||||||
"menu_path": "",
|
"menu_path": "",
|
||||||
|
@@ -1882,7 +1882,9 @@ function load_model() {
|
|||||||
for (const element of settings_area.querySelectorAll(".model_settings_input:not(.hidden)")) {
|
for (const element of settings_area.querySelectorAll(".model_settings_input:not(.hidden)")) {
|
||||||
data[element.id.split("|")[1].replace("_value", "")] = element.value;
|
data[element.id.split("|")[1].replace("_value", "")] = element.value;
|
||||||
}
|
}
|
||||||
data = {...data, ...selected_model_data}
|
data = {...data, ...selected_model_data};
|
||||||
|
|
||||||
|
data['plugin'] = document.getElementById("modelplugin").value;
|
||||||
|
|
||||||
socket.emit("load_model", data);
|
socket.emit("load_model", data);
|
||||||
closePopups();
|
closePopups();
|
||||||
|
Reference in New Issue
Block a user