From 5edf0797e85a36fa7c8c7314c764da852f7a3199 Mon Sep 17 00:00:00 2001 From: ebolam Date: Wed, 14 Sep 2022 15:10:42 -0400 Subject: [PATCH] UI2 support for Horde --- aiserver.py | 16 +++++++++------- static/koboldai.js | 19 +++++++++++++++++++ templates/popups.html | 2 +- 3 files changed, 29 insertions(+), 8 deletions(-) diff --git a/aiserver.py b/aiserver.py index 46b23403..9900f93e 100644 --- a/aiserver.py +++ b/aiserver.py @@ -1204,7 +1204,7 @@ def get_model_info(model, directory=""): key_value = js["oaiapikey"] if 'oaiurl' in js and js['oaiurl'] != "": default_url = js['oaiurl'] - get_cluster_models({'key': key_value, 'url': default_url}) + get_cluster_models({'model': model, 'key': key_value, 'url': default_url}) elif model in [x[1] for x in model_menu['apilist']]: if path.exists("settings/{}.v2_settings".format(model)): with open("settings/{}.v2_settings".format(model), "r") as file: @@ -1337,7 +1337,7 @@ def get_oai_models(data): emit('oai_engines', {'data': engines, 'online_model': online_model}, broadcast=False, room="UI_2") else: # Something went wrong, print the message and quit since we can't initialize an engine - print("{0}ERROR!{1}".format(colors.RED, colors.END), room="UI_1") + print("{0}ERROR!{1}".format(colors.RED, colors.END)) print(req.json()) emit('from_server', {'cmd': 'errmsg', 'data': req.json()}) @@ -1345,6 +1345,7 @@ def get_oai_models(data): def get_cluster_models(msg): koboldai_vars.oaiapikey = msg['key'] koboldai_vars.apikey = koboldai_vars.oaiapikey + model = msg['model'] url = msg['url'] @@ -1369,8 +1370,8 @@ def get_cluster_models(msg): # If the client settings file doesn't exist, create it # Write API key to file os.makedirs('settings', exist_ok=True) - if path.exists(get_config_filename(koboldai_vars.model_selected)): - with open(get_config_filename(koboldai_vars.model_selected), "r") as file: + if path.exists(get_config_filename(model)): + with open(get_config_filename(model), "r") as file: js = json.load(file) if 'online_model' in js: online_model = js['online_model'] @@ -1381,11 +1382,12 @@ def get_cluster_models(msg): changed=True if changed: js={} - with open(get_config_filename(koboldai_vars.model_selected), "w") as file: + with open(get_config_filename(model), "w") as file: js["apikey"] = koboldai_vars.oaiapikey file.write(json.dumps(js, indent=3)) - emit('from_server', {'cmd': 'oai_engines', 'data': engines, 'online_model': online_model}, broadcast=True) + emit('from_server', {'cmd': 'oai_engines', 'data': engines, 'online_model': online_model}, broadcast=True, room="UI_1") + emit('oai_engines', {'data': engines, 'online_model': online_model}, broadcast=False, room="UI_2") else: # Something went wrong, print the message and quit since we can't initialize an engine print("{0}ERROR!{1}".format(colors.RED, colors.END)) @@ -3771,7 +3773,7 @@ def get_message(msg): elif(msg['cmd'] == 'OAI_Key_Update'): get_oai_models({'model': koboldai_vars.model, 'key': msg['key']}) elif(msg['cmd'] == 'Cluster_Key_Update'): - get_cluster_models(msg) + get_cluster_models({'model': koboldai_vars.model, 'key': msg['key'], 'url': msg['url']}) elif(msg['cmd'] == 'loadselect'): koboldai_vars.loadselect = msg["data"] elif(msg['cmd'] == 'spselect'): diff --git a/static/koboldai.js b/static/koboldai.js index 1da9de26..df97ac9a 100644 --- a/static/koboldai.js +++ b/static/koboldai.js @@ -1168,6 +1168,25 @@ function selected_model_info(data) { } else { document.getElementById("modelurl").classList.add("hidden"); } + + //change model loading on url if needed + if (data.models_on_url) { + document.getElementById("modelurl").onchange = function () {socket.emit('get_cluster_models', {'key': document.getElementById("modelkey").value, 'url': this.value});}; + document.getElementById("modelkey").onchange = function () {socket.emit('get_cluster_models', {'key': this.value, 'url': document.getElementById("modelurl").value});}; + } else { + document.getElementById("modelkey").ochange = function () {socket.emit('OAI_Key_Update', {'model': document.getElementById('btn_loadmodelaccept').getAttribute('selected_model'), 'key': this.value});}; + document.getElementById("modelurl").ochange = null; + } + + //Multiple Model Select? + if (data.multi_online_models) { + document.getElementById("oaimodel").setAttribute("multiple", ""); + document.getElementById("oaimodel").options[0].textContent = "All" + } else { + document.getElementById("oaimodel").removeAttribute("multiple"); + document.getElementById("oaimodel").options[0].textContent = "Select Model(s)" + } + //hide or unhide the use gpu checkbox if (data.gpu) { document.getElementById("use_gpu_div").classList.remove("hidden"); diff --git a/templates/popups.html b/templates/popups.html index 59b77565..4a0a353a 100644 --- a/templates/popups.html +++ b/templates/popups.html @@ -34,7 +34,7 @@