Bug fix for loading model after loading a model duplicating the settings menu until the website is refreshed

Fixed escaping warnings
Added back/redo unit test
This commit is contained in:
ebolam
2022-06-10 14:47:52 -04:00
parent c432051fe3
commit cfd1147d5a
3 changed files with 65 additions and 9 deletions

View File

@ -281,7 +281,7 @@ class vars:
colaburl = "" # Ngrok url for Google Colab mode
apikey = "" # API key to use for InferKit API calls
oaiapikey = "" # API key to use for OpenAI API calls
savedir = getcwd()+"\stories"
savedir = getcwd()+"\\stories"
hascuda = False # Whether torch has detected CUDA on the system
usegpu = False # Whether to launch pipeline with GPU support
custmodpth = "" # Filesystem location of custom model to run
@ -1072,6 +1072,10 @@ def get_model_info(model, directory=""):
else:
break_values = [layer_count]
break_values += [0] * (gpu_count - len(break_values))
#print("Model_info: {}".format({'cmd': 'selected_model_info', 'key_value': key_value, 'key':key,
# 'gpu':gpu, 'layer_count':layer_count, 'breakmodel':breakmodel,
# 'break_values': break_values, 'gpu_count': gpu_count,
# 'url': url, 'gpu_names': gpu_names}))
emit('from_server', {'cmd': 'selected_model_info', 'key_value': key_value, 'key':key,
'gpu':gpu, 'layer_count':layer_count, 'breakmodel':breakmodel,
'break_values': break_values, 'gpu_count': gpu_count,
@ -3160,7 +3164,7 @@ def sendUSStatItems():
# KoboldAI Markup Formatting (Mixture of Markdown and sanitized html)
#==================================================================#
def kml(txt):
txt = txt.replace('\>', '>')
txt = txt.replace('>', '>')
txt = bleach.clean(markdown.markdown(txt), tags = ['p', 'em', 'strong', 'code', 'h1', 'h2', 'h3', 'h4', 'h5', 'h6', 'li', 'ul', 'b', 'i', 'a', 'span', 'button'], styles = ['color', 'font-weight'], attributes=['id', 'class', 'style', 'href'])
return txt
@ -3184,6 +3188,7 @@ def setStartState():
#==================================================================#
def sendsettings():
# Send settings for selected AI type
emit('from_server', {'cmd': 'reset_menus'})
if(vars.model != "InferKit"):
for set in gensettings.gensettingstf:
emit('from_server', {'cmd': 'addsetting', 'data': set})
@ -5195,6 +5200,7 @@ def loadRequest(loadpath, filename=None):
vars.lastact = ""
vars.submission = ""
vars.lastctx = ""
vars.genseqs = []
del vars.actions
vars.actions = structures.KoboldStoryRegister()
@ -5456,7 +5462,7 @@ def importgame():
vars.importjs = {}
# Reset current save
vars.savedir = getcwd()+"\stories"
vars.savedir = getcwd()+"\\stories"
# Refresh game screen
vars.laststory = None
@ -5538,7 +5544,7 @@ def importAidgRequest(id):
vars.worldinfo_i = [wi for wi in vars.worldinfo if wi["init"]]
# Reset current save
vars.savedir = getcwd()+"\stories"
vars.savedir = getcwd()+"\\stories"
# Refresh game screen
vars.laststory = None
@ -5631,7 +5637,7 @@ def newGameRequest():
vars.lastctx = ""
# Reset current save
vars.savedir = getcwd()+"\stories"
vars.savedir = getcwd()+"\\stories"
# Refresh game screen
vars.laststory = None
@ -5769,7 +5775,7 @@ if __name__ == "__main__":
while attempts < 10:
try:
cloudflare = str(localtunnel.stdout.readline())
cloudflare = (re.search("(?P<url>https?:\/\/[^\s]+loca.lt)", cloudflare).group("url"))
cloudflare = (re.search("(?P<url>https?://[^s]+loca.lt)", cloudflare).group("url"))
break
except:
attempts += 1