Fix for Horde mode
This commit is contained in:
parent
f38034bd2c
commit
397059cf2f
|
@ -1431,6 +1431,7 @@ def get_model_info(model, directory=""):
|
|||
key_value = ""
|
||||
break_values = []
|
||||
url = False
|
||||
default_url = None
|
||||
models_on_url = False
|
||||
multi_online_models = False
|
||||
gpu_count = torch.cuda.device_count()
|
||||
|
@ -1443,6 +1444,7 @@ def get_model_info(model, directory=""):
|
|||
models_on_url = True
|
||||
url = True
|
||||
key = True
|
||||
default_url = 'https://koboldai.net'
|
||||
multi_online_models = True
|
||||
if path.exists("settings/{}.settings".format(model)):
|
||||
with open("settings/{}.settings".format(model), "r") as file:
|
||||
|
@ -1498,7 +1500,7 @@ def get_model_info(model, directory=""):
|
|||
'gpu':gpu, 'layer_count':layer_count, 'breakmodel':breakmodel,
|
||||
'disk_break_value': disk_blocks, 'accelerate': utils.HAS_ACCELERATE,
|
||||
'break_values': break_values, 'gpu_count': gpu_count, 'multi_online_models': multi_online_models,
|
||||
'url': url, 'gpu_names': gpu_names, 'models_on_url': models_on_url}, broadcast=True)
|
||||
'url': url, 'default_url': default_url, 'gpu_names': gpu_names, 'models_on_url': models_on_url}, broadcast=True)
|
||||
if key_value != "":
|
||||
get_oai_models(key_value)
|
||||
|
||||
|
@ -2105,7 +2107,7 @@ def load_model(use_gpu=True, gpu_layers=None, disk_layers=None, initial_load=Fal
|
|||
vars.oaiengines = "https://api.goose.ai/v1/engines"
|
||||
vars.model = "OAI"
|
||||
args.configname = "GooseAI" + "/" + online_model
|
||||
else:
|
||||
elif vars.model != "CLUSTER":
|
||||
args.configname = vars.model + "/" + online_model
|
||||
vars.oaiurl = vars.oaiengines + "/{0}/completions".format(online_model)
|
||||
|
||||
|
|
|
@ -2944,6 +2944,9 @@ $(document).ready(function(){
|
|||
|
||||
if (msg.url) {
|
||||
$("#modelurl").removeClass("hidden");
|
||||
if (msg.default_url != null) {
|
||||
$("#modelurl").value = msg.default_url;
|
||||
}
|
||||
} else {
|
||||
$("#modelurl").addClass("hidden");
|
||||
}
|
||||
|
@ -3304,7 +3307,11 @@ $(document).ready(function(){
|
|||
}
|
||||
}
|
||||
var disk_layers = $("#disk_layers").length > 0 ? $("#disk_layers")[0].value : 0;
|
||||
message = {'cmd': 'load_model', 'use_gpu': $('#use_gpu')[0].checked, 'key': $('#modelkey')[0].value, 'gpu_layers': gpu_layers.slice(0, -1), 'disk_layers': disk_layers, 'url': $('#modelurl')[0].value, 'online_model': $('#oaimodel')[0].value};
|
||||
models = getSelectedOptions(document.getElementById('oaimodel'));
|
||||
if (models.length == 1) {
|
||||
models = models[0];
|
||||
}
|
||||
message = {'cmd': 'load_model', 'use_gpu': $('#use_gpu')[0].checked, 'key': $('#modelkey')[0].value, 'gpu_layers': gpu_layers.slice(0, -1), 'disk_layers': disk_layers, 'url': $('#modelurl')[0].value, 'online_model': models};
|
||||
socket.send(message);
|
||||
loadmodelcontent.html("");
|
||||
hideLoadModelPopup();
|
||||
|
@ -3750,3 +3757,27 @@ function upload_file(file_box) {
|
|||
}
|
||||
}
|
||||
|
||||
function getSelectedOptions(element) {
|
||||
// validate element
|
||||
if(!element || !element.options)
|
||||
return []; //or null?
|
||||
|
||||
// return HTML5 implementation of selectedOptions instead.
|
||||
if (element.selectedOptions) {
|
||||
selectedOptions = element.selectedOptions;
|
||||
} else {
|
||||
// you are here because your browser doesn't have the HTML5 selectedOptions
|
||||
var opts = element.options;
|
||||
var selectedOptions = [];
|
||||
for(var i = 0; i < opts.length; i++) {
|
||||
if(opts[i].selected) {
|
||||
selectedOptions.push(opts[i]);
|
||||
}
|
||||
}
|
||||
}
|
||||
output = []
|
||||
for (item of selectedOptions) {
|
||||
output.push(item.value);
|
||||
}
|
||||
return output;
|
||||
}
|
Loading…
Reference in New Issue