Merge branch 'united' into mkultra

This commit is contained in:
vfbd 2022-08-20 23:24:03 -04:00
commit 4e88b277d4
5 changed files with 44 additions and 29 deletions

View File

@ -11,11 +11,15 @@ IF EXIST "Uninstall\unins000.exe" (
start Uninstall\unins000.exe start Uninstall\unins000.exe
exit exit
) ELSE ( ) ELSE (
echo This will remove all KoboldAI folders that do not contain user data echo This will remove all KoboldAI folders that do not contain user data.
echo DO NOT CONTINUE IF KOBOLDAI IS NOT IN ITS OWN FOLDER! OTHERWISE YOUR OTHER DATA IN THIS FOLDER WILL BE DELETED AS WELL!
pause pause
GOTO UNINSTALL set /P D=Type DELETE if you wish to continue the uninstallation:
) )
IF %D%==DELETE GOTO UNINSTALL
exit
:UNINSTALL :UNINSTALL
echo Uninstallation in progress, please wait... echo Uninstallation in progress, please wait...
set DM=Y set DM=Y

View File

@ -239,6 +239,7 @@ class vars:
submission = "" # Same as above, but after applying input formatting submission = "" # Same as above, but after applying input formatting
lastctx = "" # The last context submitted to the generator lastctx = "" # The last context submitted to the generator
model = "" # Model ID string chosen at startup model = "" # Model ID string chosen at startup
model_selected = "" #selected model in UI
model_type = "" # Model Type (Automatically taken from the model config) model_type = "" # Model Type (Automatically taken from the model config)
noai = False # Runs the script without starting up the transformers pipeline noai = False # Runs the script without starting up the transformers pipeline
aibusy = False # Stops submissions while the AI is working aibusy = False # Stops submissions while the AI is working
@ -1038,6 +1039,8 @@ def savesettings():
if(vars.seed_specified): if(vars.seed_specified):
js["seed"] = vars.seed js["seed"] = vars.seed
else:
js["seed"] = None
js["newlinemode"] = vars.newlinemode js["newlinemode"] = vars.newlinemode
@ -1153,9 +1156,12 @@ def processsettings(js):
if("seed" in js): if("seed" in js):
vars.seed = js["seed"] vars.seed = js["seed"]
if(vars.seed is not None):
vars.seed_specified = True vars.seed_specified = True
else: else:
vars.seed_specified = False vars.seed_specified = False
else:
vars.seed_specified = False
if("antemplate" in js): if("antemplate" in js):
vars.setauthornotetemplate = js["antemplate"] vars.setauthornotetemplate = js["antemplate"]
@ -1474,11 +1480,11 @@ def get_layer_count(model, directory=""):
else: else:
from transformers import AutoConfig from transformers import AutoConfig
if directory == "": if directory == "":
model_config = AutoConfig.from_pretrained(vars.model, revision=vars.revision, cache_dir="cache") model_config = AutoConfig.from_pretrained(model, revision=vars.revision, cache_dir="cache")
elif(os.path.isdir(vars.custmodpth.replace('/', '_'))):
model_config = AutoConfig.from_pretrained(vars.custmodpth.replace('/', '_'), revision=vars.revision, cache_dir="cache")
elif(os.path.isdir(directory)): elif(os.path.isdir(directory)):
model_config = AutoConfig.from_pretrained(directory, revision=vars.revision, cache_dir="cache") model_config = AutoConfig.from_pretrained(directory, revision=vars.revision, cache_dir="cache")
elif(os.path.isdir(vars.custmodpth.replace('/', '_'))):
model_config = AutoConfig.from_pretrained(vars.custmodpth.replace('/', '_'), revision=vars.revision, cache_dir="cache")
else: else:
model_config = AutoConfig.from_pretrained(vars.custmodpth, revision=vars.revision, cache_dir="cache") model_config = AutoConfig.from_pretrained(vars.custmodpth, revision=vars.revision, cache_dir="cache")
@ -1491,9 +1497,9 @@ def get_layer_count(model, directory=""):
def get_oai_models(key): def get_oai_models(key):
vars.oaiapikey = key vars.oaiapikey = key
if vars.model == 'OAI': if vars.model_selected == 'OAI':
url = "https://api.openai.com/v1/engines" url = "https://api.openai.com/v1/engines"
elif vars.model == 'GooseAI': elif vars.model_selected == 'GooseAI':
url = "https://api.goose.ai/v1/engines" url = "https://api.goose.ai/v1/engines"
else: else:
return return
@ -1522,8 +1528,8 @@ def get_oai_models(key):
# If the client settings file doesn't exist, create it # If the client settings file doesn't exist, create it
# Write API key to file # Write API key to file
os.makedirs('settings', exist_ok=True) os.makedirs('settings', exist_ok=True)
if path.exists("settings/{}.settings".format(vars.model)): if path.exists("settings/{}.settings".format(vars.model_selected)):
with open("settings/{}.settings".format(vars.model), "r") as file: with open("settings/{}.settings".format(vars.model_selected), "r") as file:
js = json.load(file) js = json.load(file)
if 'online_model' in js: if 'online_model' in js:
online_model = js['online_model'] online_model = js['online_model']
@ -1531,7 +1537,7 @@ def get_oai_models(key):
if js['apikey'] != key: if js['apikey'] != key:
changed=True changed=True
if changed: if changed:
with open("settings/{}.settings".format(vars.model), "w") as file: with open("settings/{}.settings".format(vars.model_selected), "w") as file:
js["apikey"] = key js["apikey"] = key
file.write(json.dumps(js, indent=3)) file.write(json.dumps(js, indent=3))
@ -3669,8 +3675,8 @@ def get_message(msg):
changed = True changed = True
if not utils.HAS_ACCELERATE: if not utils.HAS_ACCELERATE:
msg['disk_layers'] = "0" msg['disk_layers'] = "0"
if os.path.exists("settings/" + vars.model.replace('/', '_') + ".breakmodel"): if os.path.exists("settings/" + vars.model_selected.replace('/', '_') + ".breakmodel"):
with open("settings/" + vars.model.replace('/', '_') + ".breakmodel", "r") as file: with open("settings/" + vars.model_selected.replace('/', '_') + ".breakmodel", "r") as file:
data = file.read().split('\n')[:2] data = file.read().split('\n')[:2]
if len(data) < 2: if len(data) < 2:
data.append("0") data.append("0")
@ -3678,14 +3684,15 @@ def get_message(msg):
if gpu_layers == msg['gpu_layers'] and disk_layers == msg['disk_layers']: if gpu_layers == msg['gpu_layers'] and disk_layers == msg['disk_layers']:
changed = False changed = False
if changed: if changed:
if vars.model in ["NeoCustom", "GPT2Custom"]: if vars.model_selected in ["NeoCustom", "GPT2Custom"]:
filename = "settings/{}.breakmodel".format(os.path.basename(os.path.normpath(vars.custmodpth))) filename = "settings/{}.breakmodel".format(os.path.basename(os.path.normpath(vars.custmodpth)))
else: else:
filename = "settings/{}.breakmodel".format(vars.model.replace('/', '_')) filename = "settings/{}.breakmodel".format(vars.model_selected.replace('/', '_'))
f = open(filename, "w") f = open(filename, "w")
f.write(str(msg['gpu_layers']) + '\n' + str(msg['disk_layers'])) f.write(str(msg['gpu_layers']) + '\n' + str(msg['disk_layers']))
f.close() f.close()
vars.colaburl = msg['url'] + "/request" vars.colaburl = msg['url'] + "/request"
vars.model = vars.model_selected
load_model(use_gpu=msg['use_gpu'], gpu_layers=msg['gpu_layers'], disk_layers=msg['disk_layers'], online_model=msg['online_model']) load_model(use_gpu=msg['use_gpu'], gpu_layers=msg['gpu_layers'], disk_layers=msg['disk_layers'], online_model=msg['online_model'])
elif(msg['cmd'] == 'show_model'): elif(msg['cmd'] == 'show_model'):
print("Model Name: {}".format(getmodelname())) print("Model Name: {}".format(getmodelname()))
@ -3710,18 +3717,18 @@ def get_message(msg):
elif msg['data'] in ('NeoCustom', 'GPT2Custom') and 'path_modelname' in msg: elif msg['data'] in ('NeoCustom', 'GPT2Custom') and 'path_modelname' in msg:
#Here the user entered custom text in the text box. This could be either a model name or a path. #Here the user entered custom text in the text box. This could be either a model name or a path.
if check_if_dir_is_model(msg['path_modelname']): if check_if_dir_is_model(msg['path_modelname']):
vars.model = msg['data'] vars.model_selected = msg['data']
vars.custmodpth = msg['path_modelname'] vars.custmodpth = msg['path_modelname']
get_model_info(msg['data'], directory=msg['path']) get_model_info(msg['data'], directory=msg['path'])
else: else:
vars.model = msg['path_modelname'] vars.model_selected = msg['path_modelname']
try: try:
get_model_info(vars.model) get_model_info(vars.model_selected)
except: except:
emit('from_server', {'cmd': 'errmsg', 'data': "The model entered doesn't exist."}) emit('from_server', {'cmd': 'errmsg', 'data': "The model entered doesn't exist."})
elif msg['data'] in ('NeoCustom', 'GPT2Custom'): elif msg['data'] in ('NeoCustom', 'GPT2Custom'):
if check_if_dir_is_model(msg['path']): if check_if_dir_is_model(msg['path']):
vars.model = msg['data'] vars.model_selected = msg['data']
vars.custmodpth = msg['path'] vars.custmodpth = msg['path']
get_model_info(msg['data'], directory=msg['path']) get_model_info(msg['data'], directory=msg['path'])
else: else:
@ -3730,12 +3737,12 @@ def get_message(msg):
else: else:
sendModelSelection(menu=msg['data'], folder=msg['path']) sendModelSelection(menu=msg['data'], folder=msg['path'])
else: else:
vars.model = msg['data'] vars.model_selected = msg['data']
if 'path' in msg: if 'path' in msg:
vars.custmodpth = msg['path'] vars.custmodpth = msg['path']
get_model_info(msg['data'], directory=msg['path']) get_model_info(msg['data'], directory=msg['path'])
else: else:
get_model_info(vars.model) get_model_info(vars.model_selected)
elif(msg['cmd'] == 'delete_model'): elif(msg['cmd'] == 'delete_model'):
if "{}/models".format(os.getcwd()) in os.path.abspath(msg['data']) or "{}\\models".format(os.getcwd()) in os.path.abspath(msg['data']): if "{}/models".format(os.getcwd()) in os.path.abspath(msg['data']) or "{}\\models".format(os.getcwd()) in os.path.abspath(msg['data']):
if check_if_dir_is_model(msg['data']): if check_if_dir_is_model(msg['data']):

View File

@ -1085,11 +1085,12 @@ function buildLoadModelList(ar, menu, breadcrumbs, showdelete) {
html = "<div class=\"flex\">\ html = "<div class=\"flex\">\
<div class=\"loadlistpadding\"></div>" <div class=\"loadlistpadding\"></div>"
//if the menu item is a link to another menu //if the menu item is a link to another menu
if(ar[i][3]) { console.log(ar[i]);
if((ar[i][3]) || (['Load a model from its directory', 'Load an old GPT-2 model (eg CloverEdition)'].includes(ar[i][0]))) {
html = html + "<span class=\"loadlisticon loadmodellisticon-folder oi oi-folder allowed\" aria-hidden=\"true\"></span>" html = html + "<span class=\"loadlisticon loadmodellisticon-folder oi oi-folder allowed\" aria-hidden=\"true\"></span>"
} else { } else {
//this is a model //this is a model
html = html + "<div class=\"loadlisticon oi oi-caret-right allowed\"></div>" html = html + "<div class=\"loadlisticon oi oi-caret-right allowed\"></div>&nbsp;&nbsp;&nbsp;"
} }
//now let's do the delete icon if applicable //now let's do the delete icon if applicable
@ -2444,10 +2445,6 @@ $(document).ready(function(){
} else if(msg.cmd == "updatechunk") { } else if(msg.cmd == "updatechunk") {
hideMessage(); hideMessage();
game_text.attr('contenteditable', allowedit); game_text.attr('contenteditable', allowedit);
if (typeof submit_start !== 'undefined') {
$("#runtime")[0].innerHTML = `Generation time: ${Math.round((Date.now() - submit_start)/1000)} sec`;
delete submit_start;
}
var index = msg.data.index; var index = msg.data.index;
var html = msg.data.html; var html = msg.data.html;
var existingChunk = game_text.children('#n' + index); var existingChunk = game_text.children('#n' + index);
@ -2961,6 +2958,7 @@ $(document).ready(function(){
$("#showmodelnamecontainer").removeClass("hidden"); $("#showmodelnamecontainer").removeClass("hidden");
} else if(msg.cmd == 'hide_model_name') { } else if(msg.cmd == 'hide_model_name') {
$("#showmodelnamecontainer").addClass("hidden"); $("#showmodelnamecontainer").addClass("hidden");
$(window).off('beforeunload');
location.reload(); location.reload();
//console.log("Closing window"); //console.log("Closing window");
} else if(msg.cmd == 'model_load_status') { } else if(msg.cmd == 'model_load_status') {

View File

@ -2,6 +2,7 @@
var fav_icon2 = "data:image/x-icon;base64,iVBORw0KGgoAAAANSUhEUgAAABAAAAAQCAMAAAAoLQ9TAAAABGdBTUEAALGPC/xhBQAAACBjSFJNAAB6JgAAgIQAAPoAAACA6AAAdTAAAOpgAAA6mAAAF3CculE8AAAB+1BMVEUAAAAAAAAAAAAAAAAAAQAAAAAAAQAAAAAAAAASFhBBWD4iUyoFEwgFEwguUTM+VDoMFAwAAAA+elIudz8AAAAAAAA0MigyLyQAAAAbLh1LdElSbUoVMBkAAABAZ0M2fkUAAAABAQFMiGQraDkAAQANFxEGFQkLFg8EEAYAAAAsZDonZjUAAABCgVVAnFYrSjhEjFpFi1sdRScAAAAjOi8VMxx1dGOFgGYAAABOTEabmIdlYlQaGhgaGhddXFauqY5JRjoAAAAAAAABAQFGeExIl1lX0XRW0XRHi1RFe02vv5W31KFd1Hpc1Hpe1HvO1KvDvJlqZ1plYVOmoIVt1IFl1H7AuZp1cV9jX1AmSCw3Nzg7NmA1MTJuz4Bm1H5MST9HPl9BQEMgNiNXgWKiobFgXICDd5dfw3RZVnJiV3zGv9Bqf29Oj2G/v8hTTpGhl8dbxHVd0npiYoxhWJvIxtlcimZFn1lRclg9SkZNblZBeEpDbEZCa0ZBc0hLY1BAS1BdaV87j01Vx3FWynJSrGZOhlVasGtas2xatm1at21WnWJQm15WyXJQvmlavnBZrGlEYEJWe1RBWz9Um2BavXBgxn9XhllGY0RLaklXiFlTwG5OpmVSfFNMbUpGZEVLa0lShldEhVCChHiKiHvWz6/Kw6WWlZGAfmj///8kr0X+AAAARHRSTlMAASFrcAhxIjLb/vWvsPb+20b4+DFFyMkz2vf43CP9/m5y9vZysLGvsQn19mz+/tz4+NxHycr3+Ejb/vaxsPX+3TRtcBrzrrgAAAABYktHRKhQCDaSAAAAB3RJTUUH5gYJFyQy3tftxgAAAQBJREFUGNNjYGBgYGRiZmFlZWNmZ2SAAA5OLm4eXj5+AQ6ogKCQi6ubu4ensCCIxygiKubl7ePr6+cfIC4owcjAJCkVGBQc4usbGhYeIS0jy8AsFxkVHRPr6xsXn5CYJK/AoKiUnJKalg5UkZGZla2swsCqmpObl1/g61tYVFxSqsbKwKpeVl5RWVVdU1tX39CoocnAotXU3NLa1t7R2dXd06utwqCj6+vb1z9h4sRJk6f4+uopMLDrG0z1nTZ94sQZM31nGRrJMjBKGJvMnjN3wrz5CxaaCnKAvSNqtmjxkqXLlptbQP0iYmllbWNrZ+/gCBVgZHdS1GR1VpAFqQcApI0/jqlZOvEAAAAldEVYdGRhdGU6Y3JlYXRlADIwMjItMDYtMDlUMjM6MzY6NTArMDA6MDDi0xr+AAAAJXRFWHRkYXRlOm1vZGlmeQAyMDIyLTA2LTA5VDIzOjM2OjUwKzAwOjAwk46iQgAAAABJRU5ErkJggg=="; var fav_icon2 = "data:image/x-icon;base64,iVBORw0KGgoAAAANSUhEUgAAABAAAAAQCAMAAAAoLQ9TAAAABGdBTUEAALGPC/xhBQAAACBjSFJNAAB6JgAAgIQAAPoAAACA6AAAdTAAAOpgAAA6mAAAF3CculE8AAAB+1BMVEUAAAAAAAAAAAAAAAAAAQAAAAAAAQAAAAAAAAASFhBBWD4iUyoFEwgFEwguUTM+VDoMFAwAAAA+elIudz8AAAAAAAA0MigyLyQAAAAbLh1LdElSbUoVMBkAAABAZ0M2fkUAAAABAQFMiGQraDkAAQANFxEGFQkLFg8EEAYAAAAsZDonZjUAAABCgVVAnFYrSjhEjFpFi1sdRScAAAAjOi8VMxx1dGOFgGYAAABOTEabmIdlYlQaGhgaGhddXFauqY5JRjoAAAAAAAABAQFGeExIl1lX0XRW0XRHi1RFe02vv5W31KFd1Hpc1Hpe1HvO1KvDvJlqZ1plYVOmoIVt1IFl1H7AuZp1cV9jX1AmSCw3Nzg7NmA1MTJuz4Bm1H5MST9HPl9BQEMgNiNXgWKiobFgXICDd5dfw3RZVnJiV3zGv9Bqf29Oj2G/v8hTTpGhl8dbxHVd0npiYoxhWJvIxtlcimZFn1lRclg9SkZNblZBeEpDbEZCa0ZBc0hLY1BAS1BdaV87j01Vx3FWynJSrGZOhlVasGtas2xatm1at21WnWJQm15WyXJQvmlavnBZrGlEYEJWe1RBWz9Um2BavXBgxn9XhllGY0RLaklXiFlTwG5OpmVSfFNMbUpGZEVLa0lShldEhVCChHiKiHvWz6/Kw6WWlZGAfmj///8kr0X+AAAARHRSTlMAASFrcAhxIjLb/vWvsPb+20b4+DFFyMkz2vf43CP9/m5y9vZysLGvsQn19mz+/tz4+NxHycr3+Ejb/vaxsPX+3TRtcBrzrrgAAAABYktHRKhQCDaSAAAAB3RJTUUH5gYJFyQy3tftxgAAAQBJREFUGNNjYGBgYGRiZmFlZWNmZ2SAAA5OLm4eXj5+AQ6ogKCQi6ubu4ensCCIxygiKubl7ePr6+cfIC4owcjAJCkVGBQc4usbGhYeIS0jy8AsFxkVHRPr6xsXn5CYJK/AoKiUnJKalg5UkZGZla2swsCqmpObl1/g61tYVFxSqsbKwKpeVl5RWVVdU1tX39CoocnAotXU3NLa1t7R2dXd06utwqCj6+vb1z9h4sRJk6f4+uopMLDrG0z1nTZ94sQZM31nGRrJMjBKGJvMnjN3wrz5CxaaCnKAvSNqtmjxkqXLlptbQP0iYmllbWNrZ+/gCBVgZHdS1GR1VpAFqQcApI0/jqlZOvEAAAAldEVYdGRhdGU6Y3JlYXRlADIwMjItMDYtMDlUMjM6MzY6NTArMDA6MDDi0xr+AAAAJXRFWHRkYXRlOm1vZGlmeQAyMDIyLTA2LTA5VDIzOjM2OjUwKzAwOjAwk46iQgAAAABJRU5ErkJggg==";
var fav_icon1 = "data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAABAAAAAQCAMAAAAoLQ9TAAAABGdBTUEAALGPC/xhBQAAACBjSFJNAAB6JgAAgIQAAPoAAACA6AAAdTAAAOpgAAA6mAAAF3CculE8AAAB+FBMVEUAAAAAAAAAAAAAAAAAAAEAAAAAAQEAAAAAAAAUFRlLVGYrSWgHEBoHEBk3S19HUGMOExkAAABOcos7apIAAAAAAAA2Ly01KyoAAAAgKzdVaX9bZHIaKzwAAABKYHhDcZgAAAABAQFfgJY2XX0AAQEQFhoIEhwOFRgGDRUAAAAAAQE3W3cyWnwAAABSeJJRjLs1R1FVgaFWgJ4lPlMAAAAsOD4aLj55bm2Md3QAAABPSkmfko9pXlsbGRkbGRlfWlm1oJxMQkAAAAAAAAABAQFTb4tYibFtvPpWgKNScpC6s7nExtNzwPp1wPnZx8jMsKtuZGFoXVutmJODwfJ7wfbHr6p5a2hnW1gtQlI4ODk7N2A2LzWDvet8wPZPRkRHPl9CQUQlMTthe4+ko7RhXYGEeJhzsuJaVXRjWHzIwtNwfYddhqLCwcpTTpGimMhvsuVzv/djYpBgWJvLydxlgptVirdZbX1ASFZUaXtOb4xOZX1OZHxNa4ZRX21DSV5gaG9Je6lqsepstO1knclcfJxtoc5tpNFuptVup9ZnkbdgjrVss+xjpuBvrd9snspOW29jdI5LVmlkj7Vvrd54t+RlfptQXXJWZHtlf51oruNgmMFfdJBYZn1RXnRWZXthfZxSeZiGgYGOhYLdxb/RubWZlpWFd3T////2kwjgAAAARXRSTlMAASFrcAhxIjLb/vWvsPb+20b4+DFFyMkz2vf43CP9/m5y9vZysLGvsQlw9fZs/v7c+PjcR8nK9/hI2/72sbD1/t00bXBAFktiAAAAAWJLR0SnwLcrAwAAAAd0SU1FB+YGCRchHQhxJNoAAAD/SURBVBjTY2BgYGBkYmZhZWVjZmdkgAAOTi5uHl4+fgEOqICgkKubu7uHp7AgiMcoIirm5e3j4+Pr5y8uKMHIwCQpFRAYFOzjExIaFi4tI8vALBcRGRUd4+MTGxefkCivwKColJSckpoGVJGekZmlrMLAqpqdk5uX7+NTUFhUXKLGysCqXlpWXlFZVV1TW1ffoKHJoKXd2NTc0trW3tHZ1d2jo8Kgq+fj09vXP2HCxEmTfXz0FRjYDQyn+EydNmHC9Bk+M42MZRkYJUxMZ82e0z933vwFZoIcYO+Imi9ctHjJ0mUWllC/iFhZ29ja2Ts4OkEFGNmdFTVZXRRkQeoBhkE/Yj5NSZ4AAAAldEVYdGRhdGU6Y3JlYXRlADIwMjItMDYtMDlUMjM6MzM6MjgrMDA6MDA90JbEAAAAJXRFWHRkYXRlOm1vZGlmeQAyMDIyLTA2LTA5VDIzOjMzOjI4KzAwOjAwTI0ueAAAAABJRU5ErkJggg=="; var fav_icon1 = "data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAABAAAAAQCAMAAAAoLQ9TAAAABGdBTUEAALGPC/xhBQAAACBjSFJNAAB6JgAAgIQAAPoAAACA6AAAdTAAAOpgAAA6mAAAF3CculE8AAAB+FBMVEUAAAAAAAAAAAAAAAAAAAEAAAAAAQEAAAAAAAAUFRlLVGYrSWgHEBoHEBk3S19HUGMOExkAAABOcos7apIAAAAAAAA2Ly01KyoAAAAgKzdVaX9bZHIaKzwAAABKYHhDcZgAAAABAQFfgJY2XX0AAQEQFhoIEhwOFRgGDRUAAAAAAQE3W3cyWnwAAABSeJJRjLs1R1FVgaFWgJ4lPlMAAAAsOD4aLj55bm2Md3QAAABPSkmfko9pXlsbGRkbGRlfWlm1oJxMQkAAAAAAAAABAQFTb4tYibFtvPpWgKNScpC6s7nExtNzwPp1wPnZx8jMsKtuZGFoXVutmJODwfJ7wfbHr6p5a2hnW1gtQlI4ODk7N2A2LzWDvet8wPZPRkRHPl9CQUQlMTthe4+ko7RhXYGEeJhzsuJaVXRjWHzIwtNwfYddhqLCwcpTTpGimMhvsuVzv/djYpBgWJvLydxlgptVirdZbX1ASFZUaXtOb4xOZX1OZHxNa4ZRX21DSV5gaG9Je6lqsepstO1knclcfJxtoc5tpNFuptVup9ZnkbdgjrVss+xjpuBvrd9snspOW29jdI5LVmlkj7Vvrd54t+RlfptQXXJWZHtlf51oruNgmMFfdJBYZn1RXnRWZXthfZxSeZiGgYGOhYLdxb/RubWZlpWFd3T////2kwjgAAAARXRSTlMAASFrcAhxIjLb/vWvsPb+20b4+DFFyMkz2vf43CP9/m5y9vZysLGvsQlw9fZs/v7c+PjcR8nK9/hI2/72sbD1/t00bXBAFktiAAAAAWJLR0SnwLcrAwAAAAd0SU1FB+YGCRchHQhxJNoAAAD/SURBVBjTY2BgYGBkYmZhZWVjZmdkgAAOTi5uHl4+fgEOqICgkKubu7uHp7AgiMcoIirm5e3j4+Pr5y8uKMHIwCQpFRAYFOzjExIaFi4tI8vALBcRGRUd4+MTGxefkCivwKColJSckpoGVJGekZmlrMLAqpqdk5uX7+NTUFhUXKLGysCqXlpWXlFZVV1TW1ffoKHJoKXd2NTc0trW3tHZ1d2jo8Kgq+fj09vXP2HCxEmTfXz0FRjYDQyn+EydNmHC9Bk+M42MZRkYJUxMZ82e0z933vwFZoIcYO+Imi9ctHjJ0mUWllC/iFhZ29ja2Ts4OkEFGNmdFTVZXRRkQeoBhkE/Yj5NSZ4AAAAldEVYdGRhdGU6Y3JlYXRlADIwMjItMDYtMDlUMjM6MzM6MjgrMDA6MDA90JbEAAAAJXRFWHRkYXRlOm1vZGlmeQAyMDIyLTA2LTA5VDIzOjMzOjI4KzAwOjAwTI0ueAAAAABJRU5ErkJggg==";
var fav_icon = "data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAABAAAAAQCAMAAAAoLQ9TAAAABGdBTUEAALGPC/xhBQAAACBjSFJNAAB6JgAAgIQAAPoAAACA6AAAdTAAAOpgAAA6mAAAF3CculE8AAAB8lBMVEUAAAAAAAAAAAAAAAABAAAAAAABAAAAAAAAAAAdEBB0Pz5rKCgaBwcZBwdkMzJxPDocDAwAAACLTU6SOzsAAAAAAAA9Mic/LyEAAAA6HByQUUaIVEY+GBgAAACAQkKaQUIAAAABAQGWXl9+NjYBAAAaEBAcCAgZDQ0WBQUAAAB3Nzd9MjIAAACTUVK7UVJRNTWhVVaeVldTJSUAAAA+LC0+GhuGcmCgf2EAAABUTESrl4NzYlEdGhcdGhdiXFbIqIhWRjcAAAAAAAABAQGUSkq1VVX6bW6oUVGXS0vmro7+uJn6c3T6dXX/yqPnu5F3aFhxYVG/oH/7gHv6enjeuJOEcFtzX01VLCs4ODk7NmA5MTH1gHr6e3hWSTxHPl9CQUQ/JCKPYGGko7RhXYGEeJjmcW9cVnFjWH3IwtOHb3CjXV3CwcpTTpGimMjlb3D4c3RmYI1gWJvLydybZWW+T0x+V1hRP0Z7U1WTSEiHRUWGRUSORkZuTlBRQVBwX2CvRkXtaGjvamrNYWKmU1PVZ2fXaGjbaWncaWnAX1+7W1vkYF/ja2zRZWV9QkGeVFN2Pz69XV3ia2zkeHmpWFd/REOJSUirWVjjaGjBYGCeUlKMSkl8QkGBRUSoVlWeUE2QgXeWiHr1zqjmw5+bl5KVe2T///8NZLRGAAAARHRSTlMAASFrcAhxIjLb/vWvsPb+20b4+DFFyMkz2vf43CP9/m5y9vZysLGvsQn19mz+/tz4+NxHycr3+Ejb/vaxsPX+3TRtcBrzrrgAAAABYktHRKUuuUovAAAAB3RJTUUH5gYJFzsfVlK/LQAAAP9JREFUGNNjYGBgYGRiZmFlZWNmZ2SAAA5OLm4eXj5+AQ6ogKCQi6ubm7uHsCCIxygiKubp5e3t7ePrJy4owcjAJCnlHxAY5O0dHBIaJi0jy8AsFx4RGRXt7R0TGxefIK/AoKiUmJSckgpUkZaekamswsCqmpWdk5vn7Z1fUFhUrMbKwKpeUlpWXlFZVV1TW1evocnAotXQ2NTc0trW3tHZ2KWtwqCj6+3d3dPb19c/YaK3t54CA7u+wSTvyVP6+qZO855uaCTLwChhbDJj5qzZc6bOnWcqyAH2jqjZ/AULFy1eYm4B9YuIpZW1ja2dvYMjVICR3UlRk9VZQRakHgAlRz6K4dvoSgAAACV0RVh0ZGF0ZTpjcmVhdGUAMjAyMi0wNi0wOVQyMzo1OTozMSswMDowMJt1iQMAAAAldEVYdGRhdGU6bW9kaWZ5ADIwMjItMDYtMDlUMjM6NTk6MzErMDA6MDDqKDG/AAAAAElFTkSuQmCC" var fav_icon = "data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAABAAAAAQCAMAAAAoLQ9TAAAABGdBTUEAALGPC/xhBQAAACBjSFJNAAB6JgAAgIQAAPoAAACA6AAAdTAAAOpgAAA6mAAAF3CculE8AAAB8lBMVEUAAAAAAAAAAAAAAAABAAAAAAABAAAAAAAAAAAdEBB0Pz5rKCgaBwcZBwdkMzJxPDocDAwAAACLTU6SOzsAAAAAAAA9Mic/LyEAAAA6HByQUUaIVEY+GBgAAACAQkKaQUIAAAABAQGWXl9+NjYBAAAaEBAcCAgZDQ0WBQUAAAB3Nzd9MjIAAACTUVK7UVJRNTWhVVaeVldTJSUAAAA+LC0+GhuGcmCgf2EAAABUTESrl4NzYlEdGhcdGhdiXFbIqIhWRjcAAAAAAAABAQGUSkq1VVX6bW6oUVGXS0vmro7+uJn6c3T6dXX/yqPnu5F3aFhxYVG/oH/7gHv6enjeuJOEcFtzX01VLCs4ODk7NmA5MTH1gHr6e3hWSTxHPl9CQUQ/JCKPYGGko7RhXYGEeJjmcW9cVnFjWH3IwtOHb3CjXV3CwcpTTpGimMjlb3D4c3RmYI1gWJvLydybZWW+T0x+V1hRP0Z7U1WTSEiHRUWGRUSORkZuTlBRQVBwX2CvRkXtaGjvamrNYWKmU1PVZ2fXaGjbaWncaWnAX1+7W1vkYF/ja2zRZWV9QkGeVFN2Pz69XV3ia2zkeHmpWFd/REOJSUirWVjjaGjBYGCeUlKMSkl8QkGBRUSoVlWeUE2QgXeWiHr1zqjmw5+bl5KVe2T///8NZLRGAAAARHRSTlMAASFrcAhxIjLb/vWvsPb+20b4+DFFyMkz2vf43CP9/m5y9vZysLGvsQn19mz+/tz4+NxHycr3+Ejb/vaxsPX+3TRtcBrzrrgAAAABYktHRKUuuUovAAAAB3RJTUUH5gYJFzsfVlK/LQAAAP9JREFUGNNjYGBgYGRiZmFlZWNmZ2SAAA5OLm4eXj5+AQ6ogKCQi6ubm7uHsCCIxygiKubp5e3t7ePrJy4owcjAJCnlHxAY5O0dHBIaJi0jy8AsFx4RGRXt7R0TGxefIK/AoKiUmJSckgpUkZaekamswsCqmpWdk5vn7Z1fUFhUrMbKwKpeUlpWXlFZVV1TW1evocnAotXQ2NTc0trW3tHZ2KWtwqCj6+3d3dPb19c/YaK3t54CA7u+wSTvyVP6+qZO855uaCTLwChhbDJj5qzZc6bOnWcqyAH2jqjZ/AULFy1eYm4B9YuIpZW1ja2dvYMjVICR3UlRk9VZQRakHgAlRz6K4dvoSgAAACV0RVh0ZGF0ZTpjcmVhdGUAMjAyMi0wNi0wOVQyMzo1OTozMSswMDowMJt1iQMAAAAldEVYdGRhdGU6bW9kaWZ5ADIwMjItMDYtMDlUMjM6NTk6MzErMDA6MDDqKDG/AAAAAElFTkSuQmCC"
var submit_start;
var favicon = { var favicon = {
@ -53,11 +54,16 @@ var favicon = {
start_swap: function() { start_swap: function() {
this.run = true; this.run = true;
this.auto_swap(); this.auto_swap();
submit_start = Date.now();
}, },
stop_swap: function() { stop_swap: function() {
this.run = false; this.run = false;
this.change(fav_icon); this.change(fav_icon);
if (typeof submit_start !== 'undefined') {
$("#runtime")[0].innerHTML = `Execution time: ${Math.round((Date.now() - submit_start)/1000)} sec`;
delete submit_start;
}
}, },
docHead:document.getElementsByTagName("head")[0] docHead:document.getElementsByTagName("head")[0]

View File

@ -175,9 +175,9 @@ def num_layers(config):
from flask_socketio import emit from flask_socketio import emit
class Send_to_socketio(object): class Send_to_socketio(object):
def write(self, bar): def write(self, bar):
print("should be emitting: ", bar, end="")
time.sleep(0.01) time.sleep(0.01)
try: try:
print(bar)
emit('from_server', {'cmd': 'model_load_status', 'data': bar.replace(" ", "&nbsp;")}, broadcast=True) emit('from_server', {'cmd': 'model_load_status', 'data': bar.replace(" ", "&nbsp;")}, broadcast=True)
except: except:
pass pass