From 5e00f7daf09aca468de64e28e19c14c3bdbd8263 Mon Sep 17 00:00:00 2001 From: ebolam Date: Sun, 6 Mar 2022 20:55:11 -0500 Subject: [PATCH] Next evolution of web ui model selection. Custom Paths not working quite right. --- aiserver.py | 17 +++++++++++++++-- static/application.js | 17 +++++++++++++---- 2 files changed, 28 insertions(+), 6 deletions(-) diff --git a/aiserver.py b/aiserver.py index db507136..9da6b533 100644 --- a/aiserver.py +++ b/aiserver.py @@ -260,7 +260,11 @@ utils.vars = vars # Function to get model selection at startup #==================================================================# def sendModelSelection(menu="mainmenu"): - emit('from_server', {'cmd': 'show_model_menu', 'data': model_menu[menu], 'menu': menu}, broadcast=True) + #If we send one of the manual load options, send back the list of model directories, otherwise send the menu + if menu in ('NeoCustom', 'GPT2Custom'): + emit('from_server', {'cmd': 'show_model_menu', 'data': [[folder, menu, "", False] for folder in next(os.walk('./models'))[1]], 'menu': 'custom'}, broadcast=True) + else: + emit('from_server', {'cmd': 'show_model_menu', 'data': model_menu[menu], 'menu': menu}, broadcast=True) def getModelSelection(modellist): print(" # Model\t\t\t\t\t\tVRAM\n ========================================================") @@ -801,6 +805,8 @@ def load_model(use_gpu=True, key=''): global generator vars.noai = False set_aibusy(True) + print("Model: ".format(vars.model)) + print("args.path: ".format(args.path)) # If transformers model was selected & GPU available, ask to use CPU or GPU if(vars.model not in ["InferKit", "Colab", "OAI", "GooseAI" , "ReadOnly", "TPUMeshTransformerGPTJ"]): vars.allowsp = True @@ -2532,7 +2538,13 @@ def get_message(msg): elif(msg['cmd'] == 'load_model'): load_model(use_gpu=msg['use_gpu'], key=msg['key']) elif(msg['cmd'] == 'selectmodel'): + if msg['data'] in ('NeoCustom', 'GPT2Custom') and 'path' not in msg: + sendModelSelection(menu=msg['data']) vars.model = msg['data'] + if 'path' in msg: + args.path = msg['path'] + print(vars.model) + print(args.path) elif(msg['cmd'] == 'loadselect'): vars.loadselect = msg["data"] elif(msg['cmd'] == 'spselect'): @@ -5189,7 +5201,8 @@ if __name__ == "__main__": general_startup() #show_select_model_list() - vars.model = "ReadOnly" + if vars.model == "" or vars.model is None: + vars.model = "ReadOnly" load_model() # Start Flask/SocketIO (Blocking, so this must be last method!) diff --git a/static/application.js b/static/application.js index 14173e21..5b6b5cc4 100644 --- a/static/application.js +++ b/static/application.js @@ -937,9 +937,10 @@ function hideUSPopup() { } -function buildLoadModelList(ar) { +function buildLoadModelList(ar, menu) { disableButtons([load_model_accept]); loadmodelcontent.html(""); + console.log(menu); var i; for(i=0; i" } html = html + "
\ -
\ +
\
"+ar[i][0]+"
\
"+ar[i][2]+"
\
\ @@ -960,10 +961,18 @@ function buildLoadModelList(ar) { if(ar[i][3]) { $("#loadmodel"+i).off("click").on("click", (function () { return function () { - socket.send({'cmd': 'list_model', 'data': $(this).attr("name")}); + socket.send({'cmd': 'list_model', 'data': $(this).attr("name"), 'pretty_name': $(this).attr("pretty_name")}); disableButtons([load_model_accept]); } })(i)); + } else if(menu == 'custom') { + $("#loadmodel"+i).off("click").on("click", (function () { + return function () { + socket.send({'cmd': 'selectmodel', 'data': $(this).attr("name"), 'path': $(this).attr("pretty_name")}); + highlightLoadLine($(this)); + enableButtons([load_model_accept]); + } + })(i)); } else { $("#loadmodel"+i).off("click").on("click", (function () { return function () { @@ -2382,7 +2391,7 @@ $(document).ready(function(){ } else { $("#modelkey").addClass("hidden") } - buildLoadModelList(msg.data); + buildLoadModelList(msg.data, msg.menu); } });