mirror of
https://github.com/KoboldAI/KoboldAI-Client.git
synced 2025-02-17 12:10:49 +01:00
Next evolution of web ui model selection. Custom Paths not working quite right.
This commit is contained in:
parent
2ddf45141b
commit
5e00f7daf0
13
aiserver.py
13
aiserver.py
@ -260,6 +260,10 @@ utils.vars = vars
|
|||||||
# Function to get model selection at startup
|
# Function to get model selection at startup
|
||||||
#==================================================================#
|
#==================================================================#
|
||||||
def sendModelSelection(menu="mainmenu"):
|
def sendModelSelection(menu="mainmenu"):
|
||||||
|
#If we send one of the manual load options, send back the list of model directories, otherwise send the menu
|
||||||
|
if menu in ('NeoCustom', 'GPT2Custom'):
|
||||||
|
emit('from_server', {'cmd': 'show_model_menu', 'data': [[folder, menu, "", False] for folder in next(os.walk('./models'))[1]], 'menu': 'custom'}, broadcast=True)
|
||||||
|
else:
|
||||||
emit('from_server', {'cmd': 'show_model_menu', 'data': model_menu[menu], 'menu': menu}, broadcast=True)
|
emit('from_server', {'cmd': 'show_model_menu', 'data': model_menu[menu], 'menu': menu}, broadcast=True)
|
||||||
|
|
||||||
def getModelSelection(modellist):
|
def getModelSelection(modellist):
|
||||||
@ -801,6 +805,8 @@ def load_model(use_gpu=True, key=''):
|
|||||||
global generator
|
global generator
|
||||||
vars.noai = False
|
vars.noai = False
|
||||||
set_aibusy(True)
|
set_aibusy(True)
|
||||||
|
print("Model: ".format(vars.model))
|
||||||
|
print("args.path: ".format(args.path))
|
||||||
# If transformers model was selected & GPU available, ask to use CPU or GPU
|
# If transformers model was selected & GPU available, ask to use CPU or GPU
|
||||||
if(vars.model not in ["InferKit", "Colab", "OAI", "GooseAI" , "ReadOnly", "TPUMeshTransformerGPTJ"]):
|
if(vars.model not in ["InferKit", "Colab", "OAI", "GooseAI" , "ReadOnly", "TPUMeshTransformerGPTJ"]):
|
||||||
vars.allowsp = True
|
vars.allowsp = True
|
||||||
@ -2532,7 +2538,13 @@ def get_message(msg):
|
|||||||
elif(msg['cmd'] == 'load_model'):
|
elif(msg['cmd'] == 'load_model'):
|
||||||
load_model(use_gpu=msg['use_gpu'], key=msg['key'])
|
load_model(use_gpu=msg['use_gpu'], key=msg['key'])
|
||||||
elif(msg['cmd'] == 'selectmodel'):
|
elif(msg['cmd'] == 'selectmodel'):
|
||||||
|
if msg['data'] in ('NeoCustom', 'GPT2Custom') and 'path' not in msg:
|
||||||
|
sendModelSelection(menu=msg['data'])
|
||||||
vars.model = msg['data']
|
vars.model = msg['data']
|
||||||
|
if 'path' in msg:
|
||||||
|
args.path = msg['path']
|
||||||
|
print(vars.model)
|
||||||
|
print(args.path)
|
||||||
elif(msg['cmd'] == 'loadselect'):
|
elif(msg['cmd'] == 'loadselect'):
|
||||||
vars.loadselect = msg["data"]
|
vars.loadselect = msg["data"]
|
||||||
elif(msg['cmd'] == 'spselect'):
|
elif(msg['cmd'] == 'spselect'):
|
||||||
@ -5189,6 +5201,7 @@ if __name__ == "__main__":
|
|||||||
|
|
||||||
general_startup()
|
general_startup()
|
||||||
#show_select_model_list()
|
#show_select_model_list()
|
||||||
|
if vars.model == "" or vars.model is None:
|
||||||
vars.model = "ReadOnly"
|
vars.model = "ReadOnly"
|
||||||
load_model()
|
load_model()
|
||||||
|
|
||||||
|
@ -937,9 +937,10 @@ function hideUSPopup() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
function buildLoadModelList(ar) {
|
function buildLoadModelList(ar, menu) {
|
||||||
disableButtons([load_model_accept]);
|
disableButtons([load_model_accept]);
|
||||||
loadmodelcontent.html("");
|
loadmodelcontent.html("");
|
||||||
|
console.log(menu);
|
||||||
var i;
|
var i;
|
||||||
for(i=0; i<ar.length; i++) {
|
for(i=0; i<ar.length; i++) {
|
||||||
var html
|
var html
|
||||||
@ -951,7 +952,7 @@ function buildLoadModelList(ar) {
|
|||||||
html = html + "<div class=\"loadlistpadding\"></div>"
|
html = html + "<div class=\"loadlistpadding\"></div>"
|
||||||
}
|
}
|
||||||
html = html + "<div class=\"loadlistpadding\"></div>\
|
html = html + "<div class=\"loadlistpadding\"></div>\
|
||||||
<div class=\"loadlistitem\" id=\"loadmodel"+i+"\" name=\""+ar[i][1]+"\">\
|
<div class=\"loadlistitem\" id=\"loadmodel"+i+"\" name=\""+ar[i][1]+"\" pretty_name=\""+ar[i][0]+"\">\
|
||||||
<div>"+ar[i][0]+"</div>\
|
<div>"+ar[i][0]+"</div>\
|
||||||
<div class=\"flex-push-right\">"+ar[i][2]+"</div>\
|
<div class=\"flex-push-right\">"+ar[i][2]+"</div>\
|
||||||
</div>\
|
</div>\
|
||||||
@ -960,10 +961,18 @@ function buildLoadModelList(ar) {
|
|||||||
if(ar[i][3]) {
|
if(ar[i][3]) {
|
||||||
$("#loadmodel"+i).off("click").on("click", (function () {
|
$("#loadmodel"+i).off("click").on("click", (function () {
|
||||||
return function () {
|
return function () {
|
||||||
socket.send({'cmd': 'list_model', 'data': $(this).attr("name")});
|
socket.send({'cmd': 'list_model', 'data': $(this).attr("name"), 'pretty_name': $(this).attr("pretty_name")});
|
||||||
disableButtons([load_model_accept]);
|
disableButtons([load_model_accept]);
|
||||||
}
|
}
|
||||||
})(i));
|
})(i));
|
||||||
|
} else if(menu == 'custom') {
|
||||||
|
$("#loadmodel"+i).off("click").on("click", (function () {
|
||||||
|
return function () {
|
||||||
|
socket.send({'cmd': 'selectmodel', 'data': $(this).attr("name"), 'path': $(this).attr("pretty_name")});
|
||||||
|
highlightLoadLine($(this));
|
||||||
|
enableButtons([load_model_accept]);
|
||||||
|
}
|
||||||
|
})(i));
|
||||||
} else {
|
} else {
|
||||||
$("#loadmodel"+i).off("click").on("click", (function () {
|
$("#loadmodel"+i).off("click").on("click", (function () {
|
||||||
return function () {
|
return function () {
|
||||||
@ -2382,7 +2391,7 @@ $(document).ready(function(){
|
|||||||
} else {
|
} else {
|
||||||
$("#modelkey").addClass("hidden")
|
$("#modelkey").addClass("hidden")
|
||||||
}
|
}
|
||||||
buildLoadModelList(msg.data);
|
buildLoadModelList(msg.data, msg.menu);
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
|
Loading…
x
Reference in New Issue
Block a user