Merge pull request #201 from ebolam/united

Fix for Horde mode
This commit is contained in:
henk717 2022-09-06 01:50:10 +02:00 committed by GitHub
commit 8ed731daff
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
2 changed files with 45 additions and 2 deletions

View File

@ -1444,6 +1444,7 @@ def get_model_info(model, directory=""):
key_value = ""
break_values = []
url = False
default_url = None
models_on_url = False
multi_online_models = False
gpu_count = torch.cuda.device_count()
@ -1456,6 +1457,7 @@ def get_model_info(model, directory=""):
models_on_url = True
url = True
key = True
default_url = 'https://koboldai.net'
multi_online_models = True
if path.exists(get_config_filename(model)):
with open(get_config_filename(model), "r") as file:
@ -1511,7 +1513,7 @@ def get_model_info(model, directory=""):
'gpu':gpu, 'layer_count':layer_count, 'breakmodel':breakmodel,
'disk_break_value': disk_blocks, 'accelerate': utils.HAS_ACCELERATE,
'break_values': break_values, 'gpu_count': gpu_count, 'multi_online_models': multi_online_models,
'url': url, 'gpu_names': gpu_names, 'models_on_url': models_on_url}, broadcast=True)
'url': url, 'default_url': default_url, 'gpu_names': gpu_names, 'models_on_url': models_on_url}, broadcast=True)
if key_value != "":
get_oai_models(key_value)
@ -2129,6 +2131,16 @@ def load_model(use_gpu=True, gpu_layers=None, disk_layers=None, initial_load=Fal
if changed:
with open(get_config_filename(), "w") as file:
file.write(json.dumps(js, indent=3))
<<<<<<< HEAD
# Swap OAI Server if GooseAI was selected
if(vars.model == "GooseAI"):
vars.oaiengines = "https://api.goose.ai/v1/engines"
vars.model = "OAI"
args.configname = "GooseAI" + "/" + online_model
elif vars.model != "CLUSTER":
args.configname = vars.model + "/" + online_model
=======
>>>>>>> 296481f4aae46ce3d665537744460f1d3c0947a2
vars.oaiurl = vars.oaiengines + "/{0}/completions".format(online_model)

View File

@ -2944,6 +2944,9 @@ $(document).ready(function(){
if (msg.url) {
$("#modelurl").removeClass("hidden");
if (msg.default_url != null) {
$("#modelurl").value = msg.default_url;
}
} else {
$("#modelurl").addClass("hidden");
}
@ -3304,7 +3307,11 @@ $(document).ready(function(){
}
}
var disk_layers = $("#disk_layers").length > 0 ? $("#disk_layers")[0].value : 0;
message = {'cmd': 'load_model', 'use_gpu': $('#use_gpu')[0].checked, 'key': $('#modelkey')[0].value, 'gpu_layers': gpu_layers.slice(0, -1), 'disk_layers': disk_layers, 'url': $('#modelurl')[0].value, 'online_model': $('#oaimodel')[0].value};
models = getSelectedOptions(document.getElementById('oaimodel'));
if (models.length == 1) {
models = models[0];
}
message = {'cmd': 'load_model', 'use_gpu': $('#use_gpu')[0].checked, 'key': $('#modelkey')[0].value, 'gpu_layers': gpu_layers.slice(0, -1), 'disk_layers': disk_layers, 'url': $('#modelurl')[0].value, 'online_model': models};
socket.send(message);
loadmodelcontent.html("");
hideLoadModelPopup();
@ -3750,3 +3757,27 @@ function upload_file(file_box) {
}
}
function getSelectedOptions(element) {
// validate element
if(!element || !element.options)
return []; //or null?
// return HTML5 implementation of selectedOptions instead.
if (element.selectedOptions) {
selectedOptions = element.selectedOptions;
} else {
// you are here because your browser doesn't have the HTML5 selectedOptions
var opts = element.options;
var selectedOptions = [];
for(var i = 0; i < opts.length; i++) {
if(opts[i].selected) {
selectedOptions.push(opts[i]);
}
}
}
output = []
for (item of selectedOptions) {
output.push(item.value);
}
return output;
}