Next evolution of web ui model selection. Custom Paths not working quite right.

This commit is contained in:
ebolam 2022-03-06 20:55:11 -05:00
parent 2ddf45141b
commit 5e00f7daf0
2 changed files with 28 additions and 6 deletions

View File

@ -260,7 +260,11 @@ utils.vars = vars
# Function to get model selection at startup
#==================================================================#
def sendModelSelection(menu="mainmenu"):
emit('from_server', {'cmd': 'show_model_menu', 'data': model_menu[menu], 'menu': menu}, broadcast=True)
#If we send one of the manual load options, send back the list of model directories, otherwise send the menu
if menu in ('NeoCustom', 'GPT2Custom'):
emit('from_server', {'cmd': 'show_model_menu', 'data': [[folder, menu, "", False] for folder in next(os.walk('./models'))[1]], 'menu': 'custom'}, broadcast=True)
else:
emit('from_server', {'cmd': 'show_model_menu', 'data': model_menu[menu], 'menu': menu}, broadcast=True)
def getModelSelection(modellist):
print(" # Model\t\t\t\t\t\tVRAM\n ========================================================")
@ -801,6 +805,8 @@ def load_model(use_gpu=True, key=''):
global generator
vars.noai = False
set_aibusy(True)
print("Model: ".format(vars.model))
print("args.path: ".format(args.path))
# If transformers model was selected & GPU available, ask to use CPU or GPU
if(vars.model not in ["InferKit", "Colab", "OAI", "GooseAI" , "ReadOnly", "TPUMeshTransformerGPTJ"]):
vars.allowsp = True
@ -2532,7 +2538,13 @@ def get_message(msg):
elif(msg['cmd'] == 'load_model'):
load_model(use_gpu=msg['use_gpu'], key=msg['key'])
elif(msg['cmd'] == 'selectmodel'):
if msg['data'] in ('NeoCustom', 'GPT2Custom') and 'path' not in msg:
sendModelSelection(menu=msg['data'])
vars.model = msg['data']
if 'path' in msg:
args.path = msg['path']
print(vars.model)
print(args.path)
elif(msg['cmd'] == 'loadselect'):
vars.loadselect = msg["data"]
elif(msg['cmd'] == 'spselect'):
@ -5189,7 +5201,8 @@ if __name__ == "__main__":
general_startup()
#show_select_model_list()
vars.model = "ReadOnly"
if vars.model == "" or vars.model is None:
vars.model = "ReadOnly"
load_model()
# Start Flask/SocketIO (Blocking, so this must be last method!)

View File

@ -937,9 +937,10 @@ function hideUSPopup() {
}
function buildLoadModelList(ar) {
function buildLoadModelList(ar, menu) {
disableButtons([load_model_accept]);
loadmodelcontent.html("");
console.log(menu);
var i;
for(i=0; i<ar.length; i++) {
var html
@ -951,7 +952,7 @@ function buildLoadModelList(ar) {
html = html + "<div class=\"loadlistpadding\"></div>"
}
html = html + "<div class=\"loadlistpadding\"></div>\
<div class=\"loadlistitem\" id=\"loadmodel"+i+"\" name=\""+ar[i][1]+"\">\
<div class=\"loadlistitem\" id=\"loadmodel"+i+"\" name=\""+ar[i][1]+"\" pretty_name=\""+ar[i][0]+"\">\
<div>"+ar[i][0]+"</div>\
<div class=\"flex-push-right\">"+ar[i][2]+"</div>\
</div>\
@ -960,10 +961,18 @@ function buildLoadModelList(ar) {
if(ar[i][3]) {
$("#loadmodel"+i).off("click").on("click", (function () {
return function () {
socket.send({'cmd': 'list_model', 'data': $(this).attr("name")});
socket.send({'cmd': 'list_model', 'data': $(this).attr("name"), 'pretty_name': $(this).attr("pretty_name")});
disableButtons([load_model_accept]);
}
})(i));
} else if(menu == 'custom') {
$("#loadmodel"+i).off("click").on("click", (function () {
return function () {
socket.send({'cmd': 'selectmodel', 'data': $(this).attr("name"), 'path': $(this).attr("pretty_name")});
highlightLoadLine($(this));
enableButtons([load_model_accept]);
}
})(i));
} else {
$("#loadmodel"+i).off("click").on("click", (function () {
return function () {
@ -2382,7 +2391,7 @@ $(document).ready(function(){
} else {
$("#modelkey").addClass("hidden")
}
buildLoadModelList(msg.data);
buildLoadModelList(msg.data, msg.menu);
}
});