diff --git a/aiserver.py b/aiserver.py
index e2d98f49..0396faec 100644
--- a/aiserver.py
+++ b/aiserver.py
@@ -876,61 +876,14 @@ def loadmodelsettings():
#==================================================================#
def savesettings():
# Build json to write
- js = {}
- js["apikey"] = koboldai_vars.apikey
- js["andepth"] = koboldai_vars.andepth
- js["sampler_order"] = koboldai_vars.sampler_order
- js["temp"] = koboldai_vars.temp
- js["top_p"] = koboldai_vars.top_p
- js["top_k"] = koboldai_vars.top_k
- js["tfs"] = koboldai_vars.tfs
- js["typical"] = koboldai_vars.typical
- js["top_a"] = koboldai_vars.top_a
- js["rep_pen"] = koboldai_vars.rep_pen
- js["rep_pen_slope"] = koboldai_vars.rep_pen_slope
- js["rep_pen_range"] = koboldai_vars.rep_pen_range
- js["genamt"] = koboldai_vars.genamt
- js["max_length"] = koboldai_vars.max_length
- js["ikgen"] = koboldai_vars.ikgen
- js["formatoptns"] = {'frmttriminc': koboldai_vars.frmttriminc, 'frmtrmblln': koboldai_vars.frmtrmblln,
- 'frmtrmspch': koboldai_vars.frmtrmspch, 'frmtadsnsp': koboldai_vars.frmtadsnsp, 'singleline': koboldai_vars.singleline}
- js["numseqs"] = koboldai_vars.numseqs
- js["widepth"] = koboldai_vars.widepth
- js["useprompt"] = koboldai_vars.useprompt
- js["adventure"] = koboldai_vars.adventure
- js["chatmode"] = koboldai_vars.chatmode
- js["chatname"] = koboldai_vars.chatname
- js["dynamicscan"] = koboldai_vars.dynamicscan
- js["nopromptgen"] = koboldai_vars.nopromptgen
- js["rngpersist"] = koboldai_vars.rngpersist
- js["nogenmod"] = koboldai_vars.nogenmod
- js["fulldeterminism"] = koboldai_vars.full_determinism
- js["autosave"] = koboldai_vars.autosave
- js["welcome"] = koboldai_vars.welcome
- js["output_streaming"] = koboldai_vars.output_streaming
- js["show_probs"] = koboldai_vars.show_probs
-
- if(koboldai_vars.seed_specified):
- js["seed"] = koboldai_vars.seed
- else:
- js["seed"] = None
-
- js["newlinemode"] = koboldai_vars.newlinemode
-
- js["antemplate"] = koboldai_vars.setauthornotetemplate
-
- js["userscripts"] = koboldai_vars.userscripts
- js["corescript"] = koboldai_vars.corescript
- js["softprompt"] = koboldai_vars.spfilename
-
- # Write it
- if not os.path.exists('settings'):
- os.mkdir('settings')
- file = open("settings/" + getmodelname().replace('/', '_') + ".settings", "w")
- try:
- file.write(json.dumps(js, indent=3))
- finally:
- file.close()
+ for setting in ['model_settings', 'user_settings', 'system_settings']:
+ if setting == "model_settings":
+ filename = "settings/{}.v2_settings".format(koboldai_vars.model.replace("/", "_"))
+ else:
+ filename = "settings/{}.v2_settings".format(setting)
+ with open(filename, "w") as settings_file:
+ settings_file.write(getattr(koboldai_vars, "_{}".format(setting)).to_json())
+
#==================================================================#
# Don't save settings unless 2 seconds have passed without modification
@@ -945,121 +898,10 @@ def settingschanged():
#==================================================================#
def loadsettings():
- if(path.exists("defaults/" + getmodelname().replace('/', '_') + ".settings")):
- # Read file contents into JSON object
- file = open("defaults/" + getmodelname().replace('/', '_') + ".settings", "r")
- js = json.load(file)
+ if(path.exists("settings/" + getmodelname().replace('/', '_') + ".v2_settings")):
+ with open("settings/" + getmodelname().replace('/', '_') + ".v2_settings", "r") as file:
+ getattr(koboldai_vars, "_model_settings").from_json(file.read())
- processsettings(js)
- file.close()
- if(path.exists("settings/" + getmodelname().replace('/', '_') + ".settings")):
- # Read file contents into JSON object
- file = open("settings/" + getmodelname().replace('/', '_') + ".settings", "r")
- js = json.load(file)
-
- processsettings(js)
- file.close()
-
-def processsettings(js):
-# Copy file contents to koboldai_vars
- if("apikey" in js):
- koboldai_vars.apikey = js["apikey"]
- if("andepth" in js):
- koboldai_vars.andepth = js["andepth"]
- if("sampler_order" in js):
- sampler_order = koboldai_vars.sampler_order
- if(len(sampler_order) < 7):
- sampler_order = [6] + sampler_order
- koboldai_vars.sampler_order = sampler_order
- if("temp" in js):
- koboldai_vars.temp = js["temp"]
- if("top_p" in js):
- koboldai_vars.top_p = js["top_p"]
- if("top_k" in js):
- koboldai_vars.top_k = js["top_k"]
- if("tfs" in js):
- koboldai_vars.tfs = js["tfs"]
- if("typical" in js):
- koboldai_vars.typical = js["typical"]
- if("top_a" in js):
- koboldai_vars.top_a = js["top_a"]
- if("rep_pen" in js):
- koboldai_vars.rep_pen = js["rep_pen"]
- if("rep_pen_slope" in js):
- koboldai_vars.rep_pen_slope = js["rep_pen_slope"]
- if("rep_pen_range" in js):
- koboldai_vars.rep_pen_range = js["rep_pen_range"]
- if("genamt" in js):
- koboldai_vars.genamt = js["genamt"]
- if("max_length" in js):
- koboldai_vars.max_length = js["max_length"]
- if("ikgen" in js):
- koboldai_vars.ikgen = js["ikgen"]
- if("formatoptns" in js):
- for setting in ['frmttriminc', 'frmtrmblln', 'frmtrmspch', 'frmtadsnsp', 'singleline']:
- if setting in js["formatoptns"]:
- setattr(koboldai_vars, setting, js["formatoptns"][setting])
- if("numseqs" in js):
- koboldai_vars.numseqs = js["numseqs"]
- if("widepth" in js):
- koboldai_vars.widepth = js["widepth"]
- if("useprompt" in js):
- koboldai_vars.useprompt = js["useprompt"]
- if("adventure" in js):
- koboldai_vars.adventure = js["adventure"]
- if("chatmode" in js):
- koboldai_vars.chatmode = js["chatmode"]
- if("chatname" in js):
- koboldai_vars.chatname = js["chatname"]
- if("dynamicscan" in js):
- koboldai_vars.dynamicscan = js["dynamicscan"]
- if("nopromptgen" in js):
- koboldai_vars.nopromptgen = js["nopromptgen"]
- if("rngpersist" in js):
- koboldai_vars.rngpersist = js["rngpersist"]
- if("nogenmod" in js):
- koboldai_vars.nogenmod = js["nogenmod"]
- if("fulldeterminism" in js):
- koboldai_vars.full_determinism = js["fulldeterminism"]
- if("autosave" in js):
- koboldai_vars.autosave = js["autosave"]
- if("newlinemode" in js):
- koboldai_vars.newlinemode = js["newlinemode"]
- if("welcome" in js):
- koboldai_vars.welcome = js["welcome"]
- if("output_streaming" in js):
- koboldai_vars.output_streaming = js["output_streaming"]
- if("show_probs" in js):
- koboldai_vars.show_probs = js["show_probs"]
-
- if("seed" in js):
- koboldai_vars.seed = js["seed"]
- if(koboldai_vars.seed is not None):
- koboldai_vars.seed_specified = True
- else:
- koboldai_vars.seed_specified = False
- else:
- koboldai_vars.seed_specified = False
-
- if("antemplate" in js):
- koboldai_vars.setauthornotetemplate = js["antemplate"]
- if(not koboldai_vars.gamestarted):
- koboldai_vars.authornotetemplate = koboldai_vars.setauthornotetemplate
-
- if("userscripts" in js):
- koboldai_vars.userscripts = []
- for userscript in js["userscripts"]:
- if type(userscript) is not str:
- continue
- userscript = userscript.strip()
- if len(userscript) != 0 and all(q not in userscript for q in ("..", ":")) and all(userscript[0] not in q for q in ("/", "\\")) and os.path.exists(fileops.uspath(userscript)):
- koboldai_vars.userscripts.append(userscript)
-
- if("corescript" in js and type(js["corescript"]) is str and all(q not in js["corescript"] for q in ("..", ":")) and all(js["corescript"][0] not in q for q in ("/", "\\"))):
- koboldai_vars.corescript = js["corescript"]
- else:
- koboldai_vars.corescript = "default.lua"
-
#==================================================================#
# Load a soft prompt from a file
#==================================================================#
@@ -1270,6 +1112,12 @@ def general_startup(override_args=None):
#setup socketio relay queue
koboldai_settings.queue = multiprocessing.Queue()
socketio.start_background_task(socket_io_relay, koboldai_settings.queue, socketio)
+
+ #load system and user settings
+ for setting in ['user_settings', 'system_settings']:
+ if os.path.exists("settings/{}.v2_settings".format(setting)):
+ with open("settings/{}.v2_settings".format(setting), "r") as settings_file:
+ getattr(koboldai_vars, "_{}".format(setting)).from_json(settings_file.read())
#==================================================================#
# Load Model
@@ -1315,8 +1163,8 @@ def get_model_info(model, directory=""):
if model in ['Colab', 'API']:
url = True
elif model in [x[1] for x in model_menu['apilist']]:
- if path.exists("settings/{}.settings".format(model)):
- with open("settings/{}.settings".format(model), "r") as file:
+ if path.exists("settings/{}.v2_settings".format(model)):
+ with open("settings/{}.v2_settings".format(model), "r") as file:
# Check if API key exists
js = json.load(file)
if("apikey" in js and js["apikey"] != ""):
@@ -1427,8 +1275,8 @@ def get_oai_models(data):
# If the client settings file doesn't exist, create it
# Write API key to file
os.makedirs('settings', exist_ok=True)
- if path.exists("settings/{}.settings".format(model)):
- with open("settings/{}.settings".format(model), "r") as file:
+ if path.exists("settings/{}.v2_settings".format(model)):
+ with open("settings/{}.v2_settings".format(model), "r") as file:
js = json.load(file)
if 'online_model' in js:
online_model = js['online_model']
@@ -1436,7 +1284,7 @@ def get_oai_models(data):
if js['apikey'] != key:
changed=True
if changed:
- with open("settings/{}.settings".format(model), "w") as file:
+ with open("settings/{}.v2_settings".format(model), "w") as file:
js["apikey"] = key
file.write(json.dumps(js, indent=3))
@@ -1876,7 +1724,7 @@ def patch_transformers():
tail = input_ids[..., -koboldai_vars.generated_tkns:]
for i, t in enumerate(tail):
decoded = utils.decodenewlines(tokenizer.decode(t))
- _, found = checkworldinfo(decoded, force_use_txt=True, actions=koboldai_vars._actions)
+ _, found = checkworldinfo(decoded, force_use_txt=True, actions=koboldai_vars.actions)
found -= self.excluded_world_info[i]
if(len(found) != 0):
self.regeneration_required = True
@@ -1981,9 +1829,9 @@ def load_model(use_gpu=True, gpu_layers=None, disk_layers=None, initial_load=Fal
#Let's set the GooseAI or OpenAI server URLs if that's applicable
if online_model != "":
- if path.exists("settings/{}.settings".format(koboldai_vars.model)):
+ if path.exists("settings/{}.v2_settings".format(koboldai_vars.model)):
changed=False
- with open("settings/{}.settings".format(koboldai_vars.model), "r") as file:
+ with open("settings/{}.v2_settings".format(koboldai_vars.model), "r") as file:
# Check if API key exists
js = json.load(file)
if 'online_model' in js:
@@ -1994,7 +1842,7 @@ def load_model(use_gpu=True, gpu_layers=None, disk_layers=None, initial_load=Fal
changed=True
js['online_model'] = online_model
if changed:
- with open("settings/{}.settings".format(koboldai_vars.model), "w") as file:
+ with open("settings/{}.v2_settings".format(koboldai_vars.model), "w") as file:
file.write(json.dumps(js, indent=3))
# Swap OAI Server if GooseAI was selected
if(koboldai_vars.model == "GooseAI"):
@@ -2525,7 +2373,7 @@ def load_model(use_gpu=True, gpu_layers=None, disk_layers=None, initial_load=Fal
for i, t in enumerate(generated):
decoded = utils.decodenewlines(tokenizer.decode(past[i])) + utils.decodenewlines(tokenizer.decode(t[tpu_mtj_backend.params["seq"] : tpu_mtj_backend.params["seq"] + n_generated]))
- _, found = checkworldinfo(decoded, force_use_txt=True, actions=koboldai_vars._actions)
+ _, found = checkworldinfo(decoded, force_use_txt=True, actions=koboldai_vars.actions)
found -= excluded_world_info[i]
if(len(found) != 0):
regeneration_required = True
@@ -2541,7 +2389,7 @@ def load_model(use_gpu=True, gpu_layers=None, disk_layers=None, initial_load=Fal
koboldai_vars.compiling = False
def tpumtjgenerate_settings_callback() -> dict:
- sampler_order = vars.sampler_order[:]
+ sampler_order = koboldai_vars.sampler_order[:]
if len(sampler_order) < 7: # Add repetition penalty at beginning if it's not present
sampler_order = [6] + sampler_order
return {
@@ -2656,6 +2504,9 @@ def load_model(use_gpu=True, gpu_layers=None, disk_layers=None, initial_load=Fal
koboldai_vars.presets = to_use
koboldai_vars.aibusy = False
koboldai_vars.splist = [[f, get_softprompt_desc(os.path.join("./softprompts", f),None,True)] for f in os.listdir("./softprompts") if os.path.isfile(os.path.join("./softprompts", f)) and valid_softprompt(os.path.join("./softprompts", f))]
+ if initial_load and koboldai_vars.cloudflare_link != "":
+ print(format(colors.GREEN) + "KoboldAI has finished loading and is available at the following link for UI 1: " + koboldai_vars.cloudflare_link + format(colors.END))
+ print(format(colors.GREEN) + "KoboldAI has finished loading and is available at the following link for UI 2: " + koboldai_vars.cloudflare_link + "/new_ui" + format(colors.END))
# Set up Flask routes
@app.route('/')
@@ -2728,22 +2579,22 @@ def lua_startup():
global _bridged
global F
global bridged
- if(path.exists("settings/" + getmodelname().replace('/', '_') + ".settings")):
- file = open("settings/" + getmodelname().replace('/', '_') + ".settings", "r")
- js = json.load(file)
- if("userscripts" in js):
- koboldai_vars.userscripts = []
- for userscript in js["userscripts"]:
- if type(userscript) is not str:
- continue
- userscript = userscript.strip()
- if len(userscript) != 0 and all(q not in userscript for q in ("..", ":")) and all(userscript[0] not in q for q in ("/", "\\")) and os.path.exists(fileops.uspath(userscript)):
- koboldai_vars.userscripts.append(userscript)
- if("corescript" in js and type(js["corescript"]) is str and all(q not in js["corescript"] for q in ("..", ":")) and all(js["corescript"][0] not in q for q in ("/", "\\"))):
- koboldai_vars.corescript = js["corescript"]
- else:
- koboldai_vars.corescript = "default.lua"
- file.close()
+ #if(path.exists("settings/" + getmodelname().replace('/', '_') + ".settings")):
+ # file = open("settings/" + getmodelname().replace('/', '_') + ".settings", "r")
+ # js = json.load(file)
+ # if("userscripts" in js):
+ # koboldai_vars.userscripts = []
+ # for userscript in js["userscripts"]:
+ # if type(userscript) is not str:
+ # continue
+ # userscript = userscript.strip()
+ # if len(userscript) != 0 and all(q not in userscript for q in ("..", ":")) and all(userscript[0] not in q for q in ("/", "\\")) and os.path.exists(fileops.uspath(userscript)):
+ # koboldai_vars.userscripts.append(userscript)
+ # if("corescript" in js and type(js["corescript"]) is str and all(q not in js["corescript"] for q in ("..", ":")) and all(js["corescript"][0] not in q for q in ("/", "\\"))):
+ # koboldai_vars.corescript = js["corescript"]
+ # else:
+ # koboldai_vars.corescript = "default.lua"
+ # file.close()
#==================================================================#
# Lua runtime startup
@@ -2894,7 +2745,7 @@ def lua_compute_context(submission, entries, folders, kwargs):
assert type(submission) is str
if(kwargs is None):
kwargs = koboldai_vars.lua_state.table()
- actions = koboldai_vars._actions if koboldai_vars.lua_koboldbridge.userstate == "genmod" else koboldai_vars.actions
+ actions = koboldai_vars.actions
allowed_entries = None
allowed_folders = None
if(entries is not None):
@@ -3220,13 +3071,9 @@ def lua_set_chunk(k, v):
if(len(v) == 0):
print(colors.GREEN + f"{lua_log_format_name(koboldai_vars.lua_koboldbridge.logging_name)} deleted story chunk {k}" + colors.END)
chunk = int(k)
- if(koboldai_vars.lua_koboldbridge.userstate == "genmod"):
- del koboldai_vars._actions[chunk-1]
+ koboldai_vars.actions.delete_action(chunk-1)
koboldai_vars.lua_deleted.add(chunk)
- if(not hasattr(koboldai_vars, "_actions") or koboldai_vars._actions is not koboldai_vars.actions):
- #Instead of deleting we'll blank out the text. This way our actions and actions_metadata stay in sync and we can restore the chunk on an undo
- koboldai_vars.actions[chunk-1] = ""
- send_debug()
+ send_debug()
else:
if(k == 0):
print(colors.GREEN + f"{lua_log_format_name(koboldai_vars.lua_koboldbridge.logging_name)} edited prompt chunk" + colors.END)
@@ -3239,8 +3086,6 @@ def lua_set_chunk(k, v):
koboldai_vars.lua_edited.add(chunk)
koboldai_vars.prompt = v
else:
- if(koboldai_vars.lua_koboldbridge.userstate == "genmod"):
- koboldai_vars._actions[chunk-1] = v
koboldai_vars.lua_edited.add(chunk)
koboldai_vars.actions[chunk-1] = v
send_debug()
@@ -3741,7 +3586,7 @@ def get_message(msg):
f.write(str(msg['gpu_layers']) + '\n' + str(msg['disk_layers']))
f.close()
koboldai_vars.colaburl = msg['url'] + "/request"
- vars.model = vars.model_selected
+ koboldai_vars.model = koboldai_vars.model_selected
load_model(use_gpu=msg['use_gpu'], gpu_layers=msg['gpu_layers'], disk_layers=msg['disk_layers'], online_model=msg['online_model'])
elif(msg['cmd'] == 'show_model'):
print("Model Name: {}".format(getmodelname()))
@@ -4213,10 +4058,7 @@ def apiactionsubmit_tpumtjgenerate(txt, minimum, maximum):
if not koboldai_vars.quiet:
print("{0}Min:{1}, Max:{2}, Txt:{3}{4}".format(colors.YELLOW, minimum, maximum, utils.decodenewlines(tokenizer.decode(txt)), colors.END))
- koboldai_vars._actions = koboldai_vars.actions
koboldai_vars._prompt = koboldai_vars.prompt
- if(koboldai_vars.dynamicscan):
- koboldai_vars._actions = koboldai_vars._actions.copy()
# Submit input text to generator
soft_tokens = tpumtjgetsofttokens()
@@ -4622,10 +4464,7 @@ def _generate(txt, minimum, maximum, found_entries):
model.kai_scanner_excluded_world_info = found_entries
- koboldai_vars._actions = koboldai_vars.actions
koboldai_vars._prompt = koboldai_vars.prompt
- if(koboldai_vars.dynamicscan):
- koboldai_vars._actions = [x for x in koboldai_vars.actions]
with torch.no_grad():
already_generated = 0
@@ -4657,13 +4496,13 @@ def _generate(txt, minimum, maximum, found_entries):
encoded = []
for i in range(koboldai_vars.numseqs):
txt = utils.decodenewlines(tokenizer.decode(genout[i, -already_generated:]))
- winfo, mem, anotetxt, _found_entries = calcsubmitbudgetheader(txt, force_use_txt=True, actions=koboldai_vars._actions)
+ winfo, mem, anotetxt, _found_entries = calcsubmitbudgetheader(txt, force_use_txt=True, actions=koboldai_vars.actions)
found_entries[i].update(_found_entries)
if koboldai_vars.alt_gen:
txt, _, _ = koboldai_vars.calc_ai_text(submitted_text=txt)
print("Using Alt Gen: {}".format(tokenizer.decode(txt)))
else:
- txt, _, _ = calcsubmitbudget(len(koboldai_vars._actions), winfo, mem, anotetxt, koboldai_vars._actions, submission=txt)
+ txt, _, _ = calcsubmitbudget(len(koboldai_vars.actions), winfo, mem, anotetxt, koboldai_vars.actions, submission=txt)
encoded.append(torch.tensor(txt, dtype=torch.long, device=genout.device))
max_length = len(max(encoded, key=len))
encoded = torch.stack(tuple(torch.nn.functional.pad(e, (max_length - len(e), 0), value=model.config.pad_token_id or model.config.eos_token_id) for e in encoded))
@@ -5018,10 +4857,7 @@ def tpumtjgenerate(txt, minimum, maximum, found_entries=None):
if not koboldai_vars.quiet:
print("{0}Min:{1}, Max:{2}, Txt:{3}{4}".format(colors.YELLOW, minimum, maximum, utils.decodenewlines(tokenizer.decode(txt)), colors.END))
- koboldai_vars._actions = koboldai_vars.actions
koboldai_vars._prompt = koboldai_vars.prompt
- if(koboldai_vars.dynamicscan):
- koboldai_vars._actions = koboldai_vars._actions.copy()
# Submit input text to generator
try:
@@ -5060,13 +4896,13 @@ def tpumtjgenerate(txt, minimum, maximum, found_entries=None):
encoded = []
for i in range(koboldai_vars.numseqs):
txt = utils.decodenewlines(tokenizer.decode(past[i]))
- winfo, mem, anotetxt, _found_entries = calcsubmitbudgetheader(txt, force_use_txt=True, actions=koboldai_vars._actions)
+ winfo, mem, anotetxt, _found_entries = calcsubmitbudgetheader(txt, force_use_txt=True, actions=koboldai_vars.actions)
found_entries[i].update(_found_entries)
if koboldai_vars.alt_gen:
txt, _, _ = koboldai_vars.calc_ai_text(submitted_text=txt)
print("Using Alt Gen: {}".format(tokenizer.decode(txt)))
else:
- txt, _, _ = calcsubmitbudget(len(koboldai_vars._actions), winfo, mem, anotetxt, koboldai_vars._actions, submission=txt)
+ txt, _, _ = calcsubmitbudget(len(koboldai_vars.actions), winfo, mem, anotetxt, koboldai_vars.actions, submission=txt)
encoded.append(np.array(txt, dtype=np.uint32))
max_length = len(max(encoded, key=len))
encoded = np.stack(tuple(np.pad(e, (max_length - len(e), 0), constant_values=tpu_mtj_backend.pad_token_id) for e in encoded))
@@ -5660,15 +5496,14 @@ def checkworldinfo(txt, allowed_entries=None, allowed_folders=None, force_use_tx
depth += 1
if(ln > 0):
- chunks = collections.deque()
- i = 0
- for key in reversed(actions):
- chunk = actions[key]
- chunks.appendleft(chunk)
- i += 1
- if(i == depth):
- break
-
+ chunks = actions[-depth:]
+ #i = 0
+ #for key in reversed(actions):
+ # chunk = actions[key]
+ # chunks.appendleft(chunk)
+ # i += 1
+ # if(i == depth):
+ # break
if(ln >= depth):
txt = "".join(chunks)
elif(ln > 0):
@@ -6592,8 +6427,8 @@ def final_startup():
threading.Thread(target=__preempt_tokenizer).start()
# Load soft prompt specified by the settings file, if applicable
- if(path.exists("settings/" + getmodelname().replace('/', '_') + ".settings")):
- file = open("settings/" + getmodelname().replace('/', '_') + ".settings", "r")
+ if(path.exists("settings/" + getmodelname().replace('/', '_') + ".v2_settings")):
+ file = open("settings/" + getmodelname().replace('/', '_') + ".v2_settings", "r")
js = json.load(file)
if(koboldai_vars.allowsp and "softprompt" in js and type(js["softprompt"]) is str and all(q not in js["softprompt"] for q in ("..", ":")) and (len(js["softprompt"]) != 0 and all(js["softprompt"][0] not in q for q in ("/", "\\")))):
if valid_softprompt("softprompts/"+js["softprompt"]):
@@ -6861,8 +6696,7 @@ def file_popup(popup_title, starting_folder, return_event, upload=True, jailed=T
editable=False, show_breadcrumbs=True, item_check=None, show_hidden=False,
valid_only=False, hide_extention=False, extra_parameter_function=None,
column_names=['File Name'], show_filename=True, show_folders=True,
- column_widths=["100%"],
- sort="Modified", desc=False):
+ column_widths=["100%"], sort="Modified", advanced_sort=None, desc=False):
#starting_folder = The folder we're going to get folders and/or items from
#return_event = the socketio event that will be emitted when the load button is clicked
#jailed = if set to true will look for the session variable jailed_folder and prevent navigation outside of that folder
@@ -6895,6 +6729,7 @@ def file_popup(popup_title, starting_folder, return_event, upload=True, jailed=T
session['sort'] = sort
session['desc'] = desc
session['show_folders'] = show_folders
+ session['advanced_sort'] = advanced_sort
socketio.emit("load_popup", {"popup_title": popup_title, "call_back": return_event, "renameable": renameable, "deleteable": deleteable, "editable": editable, 'upload': upload}, broadcast=False, room="UI_2")
socketio.emit("load_popup", {"popup_title": popup_title, "call_back": return_event, "renameable": renameable, "deleteable": deleteable, "editable": editable, 'upload': upload}, broadcast=True, room="UI_1")
@@ -6917,6 +6752,7 @@ def get_files_folders(starting_folder):
sort = session['sort']
desc = session['desc']
show_folders = session['show_folders']
+ advanced_sort = session['advanced_sort']
if starting_folder == 'This PC':
breadcrumbs = [['This PC', 'This PC']]
@@ -6943,7 +6779,11 @@ def get_files_folders(starting_folder):
folders = []
files = []
base_path = os.path.abspath(starting_folder).replace("\\", "/")
- for item in get_files_sorted(base_path, sort, desc=desc):
+ if advanced_sort is not None:
+ files_to_check = advanced_sort(base_path, desc=desc)
+ else:
+ files_to_check = get_files_sorted(base_path, sort, desc=desc)
+ for item in files_to_check:
item_full_path = os.path.join(base_path, item).replace("\\", "/")
if hasattr(os.stat(item_full_path), "st_file_attributes"):
hidden = bool(os.stat(item_full_path).st_file_attributes & stat.FILE_ATTRIBUTE_HIDDEN)
@@ -7238,12 +7078,12 @@ def UI_2_load_story_list(data):
deleteable=True, show_breadcrumbs=True, item_check=valid_story,
valid_only=True, hide_extention=True, extra_parameter_function=get_story_length,
column_names=['Story Name', 'Action Count'], show_filename=False,
- column_widths=['auto', '100px'],
+ column_widths=['auto', '100px'], advanced_sort=story_sort,
sort="Modified", desc=True)
def get_story_length(item_full_path, item, valid_selection):
if not valid_selection:
- return [""]
+ return ["", ""]
with open(item_full_path, "r") as f:
js = json.load(f)
title = js['story_name'] if 'story_name' in js else ".".join(item.split(".")[:-1])
@@ -7251,7 +7091,7 @@ def get_story_length(item_full_path, item, valid_selection):
return [title, len(js['actions'])]
if js['file_version'] == 1:
return [title, len(js['actions'])]
- return [0 if js['actions']['action_count'] == -1 else js['actions']['action_count'] ]
+ return [title, 0 if js['actions']['action_count'] == -1 else js['actions']['action_count'] ]
def valid_story(file):
@@ -7265,6 +7105,24 @@ def valid_story(file):
return 'actions' in js
+def story_sort(base_path, desc=False):
+ files = {}
+ for file in os.scandir(path=base_path):
+ if file.name.endswith(".json"):
+ filename = os.path.join(base_path, file.name).replace("\\", "/")
+ with open(filename, "r") as f:
+ try:
+ js = json.load(f)
+ except:
+ pass
+
+ if 'story_name' in js and js['story_name'] in koboldai_vars.story_loads:
+ files[file.name] = datetime.datetime.strptime(koboldai_vars.story_loads[js['story_name']], "%m/%d/%Y, %H:%M:%S")
+ else:
+ files[file.name] = datetime.datetime.fromtimestamp(file.stat().st_mtime)
+ return [key[0] for key in sorted(files.items(), key=lambda kv: (kv[1], kv[0]), reverse=desc)]
+
+
#==================================================================#
# Event triggered on load story
#==================================================================#
@@ -8097,18 +7955,18 @@ def put_model(body: ModelSelectionSchema):
{api_validation_error_response}
{api_server_busy_response}
"""
- if vars.aibusy or vars.genseqs:
+ if koboldai_vars.aibusy or koboldai_vars.genseqs:
abort(Response(json.dumps({"detail": {
"msg": "Server is busy; please try again later.",
"type": "service_unavailable",
}}), mimetype="application/json", status=503))
set_aibusy(1)
- old_model = vars.model
- vars.model = body.model.strip()
+ old_model = koboldai_vars.model
+ koboldai_vars.model = body.model.strip()
try:
load_model(use_breakmodel_args=True, breakmodel_args_default_to_cpu=True)
except Exception as e:
- vars.model = old_model
+ koboldai_vars.model = old_model
raise e
set_aibusy(0)
return {}
@@ -10322,7 +10180,6 @@ for schema in config_endpoint_schemas:
#==================================================================#
# Final startup commands to launch Flask app
#==================================================================#
-@app.before_first_request
def startup():
if koboldai_vars.model == "" or koboldai_vars.model is None:
koboldai_vars.model = "ReadOnly"
@@ -10334,6 +10191,7 @@ if __name__ == "__main__":
general_startup()
patch_transformers()
+ startup()
# Start Flask/SocketIO (Blocking, so this must be last method!)
port = args.port if "port" in args and args.port is not None else 5000
koboldai_vars.port = port
@@ -10365,6 +10223,7 @@ if __name__ == "__main__":
if(args.localtunnel or args.ngrok or args.remote):
with open('cloudflare.log', 'w') as cloudflarelog:
cloudflarelog.write("KoboldAI has finished loading and is available at the following link : " + cloudflare)
+ koboldai_vars.cloudflare_link = cloudflare
print(format(colors.GREEN) + "KoboldAI has finished loading and is available at the following link for UI 1: " + cloudflare + format(colors.END))
print(format(colors.GREEN) + "KoboldAI has finished loading and is available at the following link for UI 2: " + cloudflare + "/new_ui" + format(colors.END))
else:
diff --git a/bridge.lua b/bridge.lua
index 014426a8..9173302a 100644
--- a/bridge.lua
+++ b/bridge.lua
@@ -730,7 +730,7 @@ return function(_python, _bridged)
return prompt
end
end
- local actions = koboldbridge.userstate == "genmod" and bridged.koboldai_vars._actions or bridged.koboldai_vars.actions
+ local actions = koboldbridge.userstate == "genmod" and bridged.koboldai_vars.actions
return _python.as_attrgetter(actions).get(math.tointeger(rawget(t, "_num")) - 1)
end
end
@@ -752,7 +752,7 @@ return function(_python, _bridged)
error("Attempted to set the prompt chunk's content to the empty string; this is not allowed")
return
end
- local actions = koboldbridge.userstate == "genmod" and bridged.koboldai_vars._actions or bridged.koboldai_vars.actions
+ local actions = koboldbridge.userstate == "genmod" and bridged.koboldai_vars.actions
if _k ~= 0 and _python.as_attrgetter(actions).get(_k-1) == nil then
return
end
@@ -777,7 +777,7 @@ return function(_python, _bridged)
---@return fun(): KoboldStoryChunk, table, nil
function KoboldStory:forward_iter()
- local actions = koboldbridge.userstate == "genmod" and bridged.koboldai_vars._actions or bridged.koboldai_vars.actions
+ local actions = koboldbridge.userstate == "genmod" and bridged.koboldai_vars.actions
local nxt, iterator = _python.iter(actions)
local run_once = false
local function f()
@@ -805,7 +805,7 @@ return function(_python, _bridged)
---@return fun(): KoboldStoryChunk, table, nil
function KoboldStory:reverse_iter()
- local actions = koboldbridge.userstate == "genmod" and bridged.koboldai_vars._actions or bridged.koboldai_vars.actions
+ local actions = koboldbridge.userstate == "genmod" and bridged.koboldai_vars.actions
local nxt, iterator = _python.iter(_python.builtins.reversed(actions))
local last_run = false
local function f()
diff --git a/gensettings.py b/gensettings.py
index 0f3f239b..c071264a 100644
--- a/gensettings.py
+++ b/gensettings.py
@@ -506,6 +506,22 @@ gensettingstf = [
"default": 0,
"tooltip": "Shows token usage when typing in relevant text boxes. May lag slower devices."
},
+ {
+ "UI_V2_Only": True,
+ "uitype": "toggle",
+ "unit": "bool",
+ "label": "Beep on Complete",
+ "id": "beep_on_complete",
+ "min": 1,
+ "max": 5,
+ "step": 1,
+ "default": 3,
+ "tooltip": "When enabled the UI will beep when completing an action such as generation or model loading.",
+ "menu_path": "Interface",
+ "sub_path": "UI",
+ "classname": "user",
+ "name": "beep_on_complete"
+ },
]
gensettingsik =[{
diff --git a/koboldai_settings.py b/koboldai_settings.py
index 614bf508..0b4b571a 100644
--- a/koboldai_settings.py
+++ b/koboldai_settings.py
@@ -13,6 +13,8 @@ def clean_var_for_emit(value):
return value.to_json()
elif isinstance(value, set):
return list(value)
+ elif isinstance(value, datetime.datetime):
+ return str(value)
else:
return value
@@ -59,12 +61,21 @@ class koboldai_vars(object):
def load_story(self, story_name, json_data):
#Story name here is intended for multiple users on multiple stories. Now always uses default
#If we can figure out a way to get flask sessions into/through the lua bridge we could re-enable
+ original_story_name = story_name
story_name = 'default'
if story_name in self._story_settings:
self._story_settings[story_name].socketio.emit("reset_story", {}, broadcast=True, room="UI_2")
+ self._story_settings[story_name].no_save = True
self._story_settings[story_name].from_json(json_data)
+ self._story_settings[story_name].no_save = False
else:
+ self._story_settings[story_name].no_save = True
self.create_story(story_name, json_data=json_data)
+ self._story_settings[story_name].no_save = False
+ self._system_settings.story_loads[original_story_name] = datetime.datetime.now().strftime("%m/%d/%Y, %H:%M:%S")
+ with open("settings/system_settings.v2_settings", "w") as settings_file:
+ settings_file.write(self._system_settings.to_json())
+
def save_story(self):
self._story_settings['default'].save_story()
@@ -110,7 +121,7 @@ class koboldai_vars(object):
# TODO: We may want to replace the "text" variable with a list-type
# class of context blocks, the class having a __str__ function.
- if self.sp:
+ if self.sp_length > 0:
context.append({"type": "soft_prompt", "text": f"<{self.sp_length} tokens of Soft Prompt.>"})
# Header is never used?
# if koboldai_vars.model not in ("Colab", "API", "OAI") and self.tokenizer._koboldai_header:
@@ -320,6 +331,8 @@ class settings(object):
return data.to_json()
elif isinstance(data, KoboldWorldInfo):
return data.to_json()
+ elif isinstance(data, datetime.datetime):
+ return str(data)
output = BytesIO()
pickle.dump(data, output)
output.seek(0)
@@ -331,8 +344,16 @@ class settings(object):
json_data = json.loads(data)
else:
json_data = data
- for key, value in data.items():
+ #since loading will trigger the autosave, we need to disable it
+ if 'no_save' in self.__dict__:
+ setattr(self, 'no_save', True)
+ for key, value in json_data.items():
if key in self.__dict__:
+ if key == 'sampler_order':
+ if(len(value) < 7):
+ value = [6] + value
+ if key == 'autosave':
+ autosave = value
if isinstance(value, str):
if value[:7] == 'base64:':
value = pickle.loads(base64.b64decode(value[7:]))
@@ -352,6 +373,9 @@ class settings(object):
else:
setattr(self, key, value)
+ if 'no_save' in self.__dict__:
+ setattr(self, 'no_save', False)
+
def send_to_ui(self):
for (name, value) in vars(self).items():
if name not in self.local_only_variables and name[0] != "_":
@@ -478,8 +502,8 @@ class model_settings(settings):
process_variable_changes(self.socketio, self.__class__.__name__.replace("_settings", ""), name, value, old_value)
class story_settings(settings):
- local_only_variables = ['socketio', 'tokenizer', 'koboldai_vars']
- no_save_variables = ['socketio', 'tokenizer', 'koboldai_vars', 'context']
+ local_only_variables = ['socketio', 'tokenizer', 'koboldai_vars', 'no_save']
+ no_save_variables = ['socketio', 'tokenizer', 'koboldai_vars', 'context', 'no_save']
settings_name = "story"
def __init__(self, socketio, koboldai_vars, tokenizer=None):
self.socketio = socketio
@@ -492,6 +516,7 @@ class story_settings(settings):
self.gamestarted = False # Whether the game has started (disables UI elements)
self.gamesaved = True # Whether or not current game is saved
self.autosave = False # Whether or not to automatically save after each action
+ self.no_save = False #Temporary disable save (doesn't save with the file)
self.prompt = "" # Prompt
self.memory = "" # Text submitted to memory field
self.authornote = "" # Text submitted to Author's Note field
@@ -539,27 +564,30 @@ class story_settings(settings):
self.max_authornote_length = 512
self.prompt_in_ai = False
self.context = []
+ self.last_story_load = None
def save_story(self):
- print("Saving")
- save_name = self.story_name if self.story_name != "" else "untitled"
- adder = ""
- while True:
- if os.path.exists("stories/{}{}_v2.json".format(save_name, adder)):
- with open("stories/{}{}_v2.json".format(save_name, adder), "r") as f:
- temp = json.load(f)
- if 'story_id' in temp:
- if self.story_id != temp['story_id']:
- adder = 0 if adder == "" else adder+1
+ if not self.no_save:
+ if self.prompt != "" or self.memory != "" or self.authornote != "" or len(self.actions) > 0 or len(self.worldinfo_v2) > 0:
+ print("Saving")
+ save_name = self.story_name if self.story_name != "" else "untitled"
+ adder = ""
+ while True:
+ if os.path.exists("stories/{}{}_v2.json".format(save_name, adder)):
+ with open("stories/{}{}_v2.json".format(save_name, adder), "r") as f:
+ temp = json.load(f)
+ if 'story_id' in temp:
+ if self.story_id != temp['story_id']:
+ adder = 0 if adder == "" else adder+1
+ else:
+ break
+ else:
+ adder = 0 if adder == "" else adder+1
else:
break
- else:
- adder = 0 if adder == "" else adder+1
- else:
- break
- with open("stories/{}{}_v2.json".format(save_name, adder), "w") as settings_file:
- settings_file.write(self.to_json())
- self.gamesaved = True
+ with open("stories/{}{}_v2.json".format(save_name, adder), "w") as settings_file:
+ settings_file.write(self.to_json())
+ self.gamesaved = True
def reset(self):
self.socketio.emit("reset_story", {}, broadcast=True, room="UI_2")
@@ -659,6 +687,7 @@ class user_settings(settings):
self.debug = False # If set to true, will send debug information to the client for display
self.output_streaming = True
self.show_probs = False # Whether or not to show token probabilities
+ self.beep_on_complete = False
def __setattr__(self, name, value):
@@ -731,6 +760,8 @@ class system_settings(settings):
self.seed = None # The current RNG seed (as an int), or None if unknown
self.alt_gen = False # Use the calc_ai_text method for generating text to go to the AI
self.theme_list = [".".join(f.split(".")[:-1]) for f in os.listdir("./themes") if os.path.isfile(os.path.join("./themes", f))]
+ self.cloudflare_link = ""
+ self.story_loads = {} #dict of when each story was last loaded
self.port = 5000
self.on_colab = 'google.colab' in sys.modules
self.horde_share = False
@@ -790,7 +821,7 @@ class KoboldStoryRegister(object):
self.append(item)
def reset(self, sequence=[]):
- self.__init__(self.socketio, self.story_settings, sequence=sequence, tokenizer=self.tokenizer)
+ self.__init__(self.socketio, self.story_settings, self.koboldai_vars, sequence=sequence, tokenizer=self.tokenizer)
def __str__(self):
return "".join([x['Selected Text'] for ignore, x in sorted(self.actions.items())])
@@ -810,7 +841,11 @@ class KoboldStoryRegister(object):
raise StopIteration
def __getitem__(self, i):
- return self.actions[i]["Selected Text"]
+ if isinstance(i, slice):
+ temp = [self.actions[x]["Selected Text"] for x in list(self.actions)[i]]
+ return temp
+ else:
+ return self.actions[i]["Selected Text"]
def __setitem__(self, i, text):
if i in self.actions:
@@ -873,6 +908,8 @@ class KoboldStoryRegister(object):
self.actions = temp
self.set_game_saved()
self.recalc_token_length()
+ self.story_settings.save_story()
+
def append(self, text):
self.clear_unused_options()
@@ -1096,23 +1133,36 @@ class KoboldStoryRegister(object):
self.actions[self.action_count+1] = {"Selected Text": "", "Selected Text Length": 0, "Options": []}
for i in range(len(text_list)):
self.actions[self.action_count+1]['Options'].append({"text": text_list[i], "Pinned": False, "Previous Selection": False, "Edited": False, "Probabilities": [], "stream_id": i})
-
- process_variable_changes(self.socketio, "actions", "Options", {"id": self.action_count+1, "options": self.actions[self.action_count+1]["Options"]}, {"id": self.action_count+1, "options": None})
- process_variable_changes(self.socketio, "story", 'actions', {"id": self.action_count+1, 'action': self.actions[self.action_count+1]}, None)
+
+ #We need to see if this is the last token being streamed. If so due to the rely it will come in AFTER the actual trimmed final text overwriting it in the UI
+ if self.tokenizer is not None:
+ if len(self.tokenizer.encode(self.actions[self.action_count+1]["Options"][0]['text'])) != self.koboldai_vars.genamt:
+ #process_variable_changes(self.socketio, "actions", "Options", {"id": self.action_count+1, "options": self.actions[self.action_count+1]["Options"]}, {"id": self.action_count+1, "options": None})
+ process_variable_changes(self.socketio, "story", 'actions', {"id": self.action_count+1, 'action': self.actions[self.action_count+1]}, None)
else:
#We're streaming single options so our output is our selected
- if self.tokenizer is not None:
- selected_text_length = len(self.tokenizer.encode(text_list[0]))
- else:
- selected_text_length = 0
- if self.action_count+1 in self.actions:
- self.actions[self.action_count+1]['Selected Text'] = "{}{}".format(self.actions[self.action_count+1]['Selected Text'], text_list[0])
- else:
- self.actions[self.action_count+1] = {"Selected Text": text_list[0], "Selected Text Length": selected_text_length, "Options": []}
-
- process_variable_changes(self.socketio, "actions", "Selected Text", {"id": self.action_count+1, "text": self.actions[self.action_count+1]['Selected Text']}, None)
- process_variable_changes(self.socketio, "actions", 'Selected Text Length', {"id": self.action_count+1, 'length': self.actions[self.action_count+1]['Selected Text Length']}, {"id": self.action_count, 'length': 0})
- process_variable_changes(self.socketio, "story", 'actions', {"id": self.action_count+1, 'action': self.actions[self.action_count+1]}, None)
+ #First we need to see if this is actually the prompt. If so we'll just not do streaming:
+ if self.story_settings.prompt != "":
+ if self.action_count+1 in self.actions:
+ if self.tokenizer is not None:
+ selected_text_length = len(self.tokenizer.encode(self.actions[self.action_count+1]['Selected Text']))
+ else:
+ selected_text_length = 0
+ self.actions[self.action_count+1]['Selected Text'] = "{}{}".format(self.actions[self.action_count+1]['Selected Text'], text_list[0])
+ self.actions[self.action_count+1]['Selected Text Length'] = selected_text_length
+ else:
+ if self.tokenizer is not None:
+ selected_text_length = len(self.tokenizer.encode(text_list[0]))
+ else:
+ selected_text_length = 0
+ self.actions[self.action_count+1] = {"Selected Text": text_list[0], "Selected Text Length": selected_text_length, "Options": []}
+
+
+
+ if self.tokenizer is not None:
+ if len(self.tokenizer.encode(self.actions[self.action_count+1]['Selected Text'])) != self.koboldai_vars.genamt:
+ #process_variable_changes(self.socketio, "actions", "Options", {"id": self.action_count+1, "options": self.actions[self.action_count+1]["Options"]}, {"id": self.action_count+1, "options": None})
+ process_variable_changes(self.socketio, "story", 'actions', {"id": self.action_count+1, 'action': self.actions[self.action_count+1]}, None)
def set_probabilites(self, probabilities, action_id=None):
if action_id is None:
@@ -1175,6 +1225,9 @@ class KoboldWorldInfo(object):
def __getitem__(self, i):
return self.self.world_info[i].copy()
+ def __len__(self):
+ return len(self.world_info)
+
def recalc_token_length(self):
if self.tokenizer is not None:
for uid in self.world_info:
@@ -1401,8 +1454,8 @@ class KoboldWorldInfo(object):
#Make sure we have all the appropriate variables:
for item in self.world_info:
for column in ["uid","title","key","keysecondary","folder","constant","content","comment","token_length","selective","used_in_game"]:
- if column not in item:
- item[column] = None
+ if column not in self.world_info[item]:
+ self.world_info[item][column] = None
try:
self.sync_world_info_to_old_format()
except:
diff --git a/static/koboldai.css b/static/koboldai.css
index e06ccdb7..931661e8 100644
--- a/static/koboldai.css
+++ b/static/koboldai.css
@@ -205,13 +205,17 @@ input[type="range"]::-ms-fill-upper {
"item item"
"minlabel maxlabel";
grid-template-rows: 20px 23px 20px;
- grid-template-columns: 288px 60px;
+ grid-template-columns: 310px 60px;
row-gap: 0.2em;
background-color: var(--setting_background);
color: var(--setting_text);
border-radius: var(--radius_settings_background);
padding: 3px;
- margin: 2px;
+ margin-top: 5px;
+}
+
+#story_menu_author .setting_tile_area{
+ padding: 0;
}
@@ -350,6 +354,10 @@ input[type="range"]::-ms-fill-upper {
display: inline-block;
transition: left 0.5s;
cursor: pointer;
+ filter: brightness(85%);
+}
+.pinned .menu_pin {
+ filter: brightness(200%);
}
}
@@ -478,13 +486,35 @@ input[type="range"]::-ms-fill-upper {
.settings_button {
color: var(--button_text);
background: var(--button_background);
- border-color: white;
+ border-color: var(--button_text);
border-width: 1px;
border-radius: var(--radius_settings_button);
border-style:solid;
height: 35px;
}
+
+.settings_button > .button_label {
+ position: relative;
+ bottom: 3px;
+ right: 4px;
+}
+
+.settings_button > .material-icons-outlined {
+ position: relative;
+ top: 1px;
+ right: 2px;
+}
+
+.Model_Info .settings_button {
+ transform: translateY(7%);
+ margin: -5px;
+}
+
+#import_story_button {
+ transform: translateY(22%);
+}
+
.settings_button[story_gamesaved="true"] {
filter: brightness(40%);
}
@@ -657,6 +687,13 @@ input[type="range"]::-ms-fill-upper {
#palette_area {
width: 100%;
text-align: center;
+ border: 2px solid var(--palette_card_text);
+ border-radius: var(--radius_palette_card);
+ box-shadow: var(--palette_card_shadow);
+ margin: 5px;
+ padding: 5px 0 10px 0;
+ background: var(--palette_card_background);
+ color: var(--palette_card_text);
}
#save_theme_area {
@@ -667,10 +704,15 @@ input[type="range"]::-ms-fill-upper {
#save_theme_name {
width: 88%;
margin: 0 0 5px -25px;
+ background-color: var(--palette_card_background);
+ filter: brightness(90%);
+ border-color: var(--palette_card_text);
+ color: var(--palette_card_text);
}
#palette_area .material-icons-outlined {
position: absolute;
+ margin-left: 2px;
}
#Palette {
@@ -678,17 +720,22 @@ input[type="range"]::-ms-fill-upper {
}
#Palette_Table{
width: 100%;
+ color: var(--palette_card_text);
}
-.Theme_Input {
+#Palette .Theme_Input {
width: 100%;
+ background-color: var(--palette_card_background);
+ border-color: var(--palette_card_text);
+ border-width: 0px;
}
.advanced_theme {
margin: 10px 10px 0 auto;
padding: 2px 4px 2px 4px;
- border-radius: var(--radius_wi_card);
- background-color: var(--wi_tag_color);
+ border-radius: var(--radius_alternate_button);
+ background-color: var(--alternate_button_background);
+ color: var(--alternate_button_text);
}
.advanced_theme:hover {
filter: brightness(85%);
@@ -793,6 +840,10 @@ input[type="range"]::-ms-fill-upper {
display: inline-block;
transition: left 0.5s;
cursor: pointer;
+ filter: brightness(85%);
+}
+.pinned .story_menu_pin {
+ filter: brightness(200%);
}
}
@@ -912,8 +963,9 @@ td.server_vars {
.wi_add_button{
margin: 0 0 0 10px;
padding: 4px 4px 4px 4px;
- border-radius: var(--radius_wi_card);
- background-color: var(--wi_tag_color);
+ border-radius: var(--radius_alternate_button);
+ background-color: var(--alternate_button_background);
+ color: var(--alternate_button_text);
}
.wi_add_button .material-icons-outlined{
@@ -932,10 +984,11 @@ td.server_vars {
border: 2px outset var(--wi_card_border_color);
background-color: var(--wi_card_bg_color);
color:var(--wi_card_text_color);
+ box-shadow: var(--wi_card_shadow);
margin: 10px;
}
-.world_info_card.world_info_included {
+.world_info_card.used_in_game {
border: 2px outset var(--wi_card_border_color_to_ai);
}
@@ -990,7 +1043,7 @@ td.server_vars {
}
.oi[folder] {
- margin-right: 5px;
+ margin-left: 5px;
}
.oi[data-glyph="folder"] {
@@ -1499,8 +1552,8 @@ body {
}
#error_message.popup {
- background-color: var(--error_palette);
- color: var(--on_error_palette);
+ background-color: var(--error);
+ color: var(--error_text);
height: 30vh;
top: 35vh;
overflow: hidden;
@@ -1508,22 +1561,22 @@ body {
#error_message .title {
width: 100%;
- background-color: var(--error_container_palette);
- color: var(--on_error_container_palette);
+ background-color: var(--error_title);
+ color: var(--error_title_text);
text-align: center;
font-size: calc(1.3em + var(--font_size_adjustment));
}
#error_message.popup .btn-primary {
- background-color: var(--error_palette);
- color: var(--on_error_palette);
- border-color: var(--on_error_palette);
+ background-color: var(--error);
+ color: var(--error_text);
+ border-color: var(--error_text);
}
#error_message .popup_load_cancel{
- background-color: var(--error_container_palette);
- color: var(--on_error_container_palette);
+ background-color: var(--error_title);
+ color: var(--error_title_text);
}
@@ -1533,8 +1586,8 @@ body {
flex-grow: 1;
flex-shrink: 1;
flex-basis: auto;
- background-color: var(--error_container_palette);
- color: var(--on_error_container_palette);
+ background-color: var(--error);
+ color: var(--error_text);
}
.breadcrumbitem {
@@ -1658,7 +1711,7 @@ body {
width: 50%;
height: 75%;
padding-bottom: 10px;
- background-color: var(--layer1_palette);
+ background-color: var(--popup_background_color);
}
#context-viewer-header {
@@ -1667,7 +1720,7 @@ body {
padding: 5px;
- background-color: var(--background);
+ background-color: var(--popup_background_color);
margin-bottom: 3px;
}
@@ -1737,6 +1790,81 @@ body {
font-size: 50vh;
opacity: 0.7;
}
+
+/* Finder */
+
+#finder-container {
+ display: flex;
+ justify-content: center;
+ align-items: center;
+
+ position: absolute;
+ left: 0px;
+ top: 0px;
+
+ background-color: rgba(0, 0, 0, 0.5);
+ z-index: 99999999;
+
+ width: 100vw;
+ height: 100vh;
+}
+
+#finder {
+ width: 25%;
+ background-color: var(--flyout_background_pinned);
+ padding: 10px;
+ border-radius: 4px;
+}
+
+#finder-input {
+ width: 100%;
+ padding: 5px;
+ border: none;
+}
+
+.finder-result {
+ display: flex;
+ flex-direction: row;
+ justify-content: space-between;
+
+ padding: 3px;
+ margin-top: 5px;
+ background-color: var(--input_background);
+ cursor: pointer;
+}
+
+.result-selected { background-color: #273b48; }
+
+.result-title {
+ display: flex !important;
+ align-items: center;
+ white-space: pre;
+
+ font-weight: bold;
+ font-size: 15px;
+}
+
+.result-details {
+ margin-left: 15px;
+ opacity: 0.7;
+
+ /* Limit to 2 lines of text */
+
+ display: -webkit-box;
+ -webkit-line-clamp: 2;
+ -webkit-box-orient: vertical;
+ overflow: hidden;
+}
+
+.result-highlight {
+ background-color: rgb(112, 112, 31);
+}
+
+.result-icon {
+ display: flex !important;
+ align-items: center;
+}
+
/*---------------------------------- Global ------------------------------------------------*/
.hidden {
display: none;
@@ -1768,6 +1896,7 @@ input {
color: var(--enabled_button_text);
background-color: var(--enabled_button_background_color);
border-color: var(--enabled_button_border_color);
+ border-radius: var(--radius_button);
}
.action_button:hover {
@@ -1904,6 +2033,7 @@ h2 .material-icons-outlined {
.collapsable_header,
.section_header,
.help_text,
+.sample_order,
.noselect {
-webkit-touch-callout: none;
-webkit-user-select: none;
@@ -1934,3 +2064,22 @@ h2 .material-icons-outlined {
.rightSideMenu{
color: var(--flyout_text);
}
+
+
+@media (pointer: coarse), (hover: none) {
+ [title] {
+ position: relative;
+ display: inline-flex;
+ justify-content: center;
+ }
+ [title]:focus::after {
+ content: attr(title);
+ position: absolute;
+ top: 90%;
+ color: #000;
+ background-color: #fff;
+ border: 1px solid;
+ width: fit-content;
+ padding: 3px;
+ }
+}
diff --git a/static/koboldai.js b/static/koboldai.js
index f2c1f7df..8cad91fc 100644
--- a/static/koboldai.js
+++ b/static/koboldai.js
@@ -37,6 +37,27 @@ var shift_down = false;
var world_info_data = {};
var world_info_folder_data = {};
var saved_settings = {};
+var finder_selection_index = -1;
+
+// name, desc, icon, func
+const finder_actions = [
+ {name: "Load Model", icon: "folder_open", func: function() { socket.emit('load_model_button', {}); }},
+ {name: "New Story", icon: "description", func: function() { socket.emit('new_story', ''); }},
+ {name: "Load Story", icon: "folder_open", func: function() { socket.emit('load_story_list', ''); }},
+ {name: "Save Story", icon: "save", func: function() { socket.emit("save_story", null, (response) => {save_as_story(response);}); }},
+ {name: "Download Story", icon: "file_download", func: function() { document.getElementById('download_iframe').src = 'json'; }},
+
+ // Locations
+ {name: "Setting Presets", icon: "open_in_new", func: function() { highlightEl(".var_sync_model_selected_preset") }},
+ {name: "Memory", icon: "open_in_new", func: function() { highlightEl("#memory") }},
+ {name: "Author's Note", icon: "open_in_new", func: function() { highlightEl("#authors_notes") }},
+ {name: "Notes", icon: "open_in_new", func: function() { highlightEl(".var_sync_story_notes") }},
+ {name: "World Info", icon: "open_in_new", func: function() { highlightEl("#WI_Area") }},
+
+ // TODO: Direct theme selection
+ // {name: "", icon: "palette", func: function() { highlightEl("#biasing") }},
+];
+
const map1 = new Map()
map1.set('Top K Sampling', 0)
map1.set('Top A Sampling', 1)
@@ -56,6 +77,7 @@ map2.set(6, 'Repetition Penalty')
var calc_token_usage_timeout;
var game_text_scroll_timeout;
var var_processing_time = 0;
+var finder_last_input;
//-----------------------------------Server to UI Functions-----------------------------------------------
function connect() {
console.log("connected");
@@ -120,14 +142,11 @@ function create_options(data) {
//Set all options before the next chunk to hidden
var option_container = document.getElementById("Select Options");
var current_chunk = parseInt(document.getElementById("action_count").textContent)+1;
- var children = option_container.children;
- for (var i = 0; i < children.length; i++) {
- var chunk = children[i];
- if (chunk.id == "Select Options Chunk " + current_chunk) {
- chunk.classList.remove("hidden");
- } else {
- chunk.classList.add("hidden");
- }
+ if (document.getElementById("Select Options Chunk " + current_chunk)) {
+ document.getElementById("Select Options Chunk " + current_chunk).classList.remove("hidden")
+ }
+ if (document.getElementById("Select Options Chunk " + (current_chunk-1))) {
+ document.getElementById("Select Options Chunk " + (current_chunk-1)).classList.add("hidden")
}
if (document.getElementById("Select Options Chunk "+data.value.id)) {
@@ -410,6 +429,10 @@ function do_ai_busy(data) {
if (data.value) {
ai_busy_start = Date.now();
favicon.start_swap()
+ current_chunk = parseInt(document.getElementById("action_count").textContent)+1;
+ if (document.getElementById("Select Options Chunk " + current_chunk)) {
+ document.getElementById("Select Options Chunk " + current_chunk).classList.add("hidden")
+ }
} else {
runtime = Date.now() - ai_busy_start;
if (document.getElementById("Execution Time")) {
@@ -420,6 +443,9 @@ function do_ai_busy(data) {
for (item of document.getElementsByClassName('statusbar_outer')) {
item.classList.add("hidden");
}
+ if (document.getElementById("user_beep_on_complete").checked) {
+ beep();
+ }
}
}
@@ -776,10 +802,12 @@ function popup_items(data) {
var accept = document.getElementById("popup_accept");
if (this.getAttribute("valid") == "true") {
accept.classList.remove("disabled");
+ accept.disabled = false;
accept.setAttribute("selected_value", this.id);
} else {
accept.setAttribute("selected_value", "");
accept.classList.add("disabled");
+ accept.disabled = true;
if (this.getAttribute("folder") == "true") {
socket.emit("popup_change_folder", this.id);
}
@@ -805,10 +833,12 @@ function popup_items(data) {
var accept = document.getElementById("popup_accept");
if (this.getAttribute("valid") == "true") {
accept.classList.remove("disabled");
+ accept.disabled = false;
accept.setAttribute("selected_value", this.id);
} else {
accept.setAttribute("selected_value", "");
accept.classList.add("disabled");
+ accept.disabled = true;
if (this.getAttribute("folder") == "true") {
socket.emit("popup_change_folder", this.id);
}
@@ -817,7 +847,7 @@ function popup_items(data) {
for (item of popup_list) {
item.classList.remove("selected");
}
- this.classList.add("selected");
+ this.parentElement.classList.add("selected");
};
tr.append(td);
}
@@ -997,6 +1027,8 @@ function show_model_menu(data) {
model_list.append(list_item);
}
+ var accept = document.getElementById("btn_loadmodelaccept");
+ accept.disabled = true;
}
@@ -1153,6 +1185,7 @@ function selected_model_info(data) {
document.getElementById("modellayers").classList.add("hidden");
accept.classList.remove("disabled");
}
+ accept.disabled = false;
}
@@ -2945,6 +2978,23 @@ function detect_key_up(e) {
}
}
+function selectTab(tab) {
+ let tabTarget = document.getElementById(tab.getAttribute("tab-target"));
+ let tabClass = Array.from(tab.classList).filter((c) => c.startsWith("tab-"))[0];
+ let targetClass = Array.from(tabTarget.classList).filter((c) => c.startsWith("tab-target-"))[0];
+
+ $(`.${tabClass}`).removeClass("selected");
+ tab.classList.add("selected");
+
+ $(`.${targetClass}`).addClass("hidden");
+ tabTarget.classList.remove("hidden");
+}
+
+function beep() {
+ var snd = new Audio("data:audio/wav;base64,//uQRAAAAWMSLwUIYAAsYkXgoQwAEaYLWfkWgAI0wWs/ItAAAGDgYtAgAyN+QWaAAihwMWm4G8QQRDiMcCBcH3Cc+CDv/7xA4Tvh9Rz/y8QADBwMWgQAZG/ILNAARQ4GLTcDeIIIhxGOBAuD7hOfBB3/94gcJ3w+o5/5eIAIAAAVwWgQAVQ2ORaIQwEMAJiDg95G4nQL7mQVWI6GwRcfsZAcsKkJvxgxEjzFUgfHoSQ9Qq7KNwqHwuB13MA4a1q/DmBrHgPcmjiGoh//EwC5nGPEmS4RcfkVKOhJf+WOgoxJclFz3kgn//dBA+ya1GhurNn8zb//9NNutNuhz31f////9vt///z+IdAEAAAK4LQIAKobHItEIYCGAExBwe8jcToF9zIKrEdDYIuP2MgOWFSE34wYiR5iqQPj0JIeoVdlG4VD4XA67mAcNa1fhzA1jwHuTRxDUQ//iYBczjHiTJcIuPyKlHQkv/LHQUYkuSi57yQT//uggfZNajQ3Vmz+Zt//+mm3Wm3Q576v////+32///5/EOgAAADVghQAAAAA//uQZAUAB1WI0PZugAAAAAoQwAAAEk3nRd2qAAAAACiDgAAAAAAABCqEEQRLCgwpBGMlJkIz8jKhGvj4k6jzRnqasNKIeoh5gI7BJaC1A1AoNBjJgbyApVS4IDlZgDU5WUAxEKDNmmALHzZp0Fkz1FMTmGFl1FMEyodIavcCAUHDWrKAIA4aa2oCgILEBupZgHvAhEBcZ6joQBxS76AgccrFlczBvKLC0QI2cBoCFvfTDAo7eoOQInqDPBtvrDEZBNYN5xwNwxQRfw8ZQ5wQVLvO8OYU+mHvFLlDh05Mdg7BT6YrRPpCBznMB2r//xKJjyyOh+cImr2/4doscwD6neZjuZR4AgAABYAAAABy1xcdQtxYBYYZdifkUDgzzXaXn98Z0oi9ILU5mBjFANmRwlVJ3/6jYDAmxaiDG3/6xjQQCCKkRb/6kg/wW+kSJ5//rLobkLSiKmqP/0ikJuDaSaSf/6JiLYLEYnW/+kXg1WRVJL/9EmQ1YZIsv/6Qzwy5qk7/+tEU0nkls3/zIUMPKNX/6yZLf+kFgAfgGyLFAUwY//uQZAUABcd5UiNPVXAAAApAAAAAE0VZQKw9ISAAACgAAAAAVQIygIElVrFkBS+Jhi+EAuu+lKAkYUEIsmEAEoMeDmCETMvfSHTGkF5RWH7kz/ESHWPAq/kcCRhqBtMdokPdM7vil7RG98A2sc7zO6ZvTdM7pmOUAZTnJW+NXxqmd41dqJ6mLTXxrPpnV8avaIf5SvL7pndPvPpndJR9Kuu8fePvuiuhorgWjp7Mf/PRjxcFCPDkW31srioCExivv9lcwKEaHsf/7ow2Fl1T/9RkXgEhYElAoCLFtMArxwivDJJ+bR1HTKJdlEoTELCIqgEwVGSQ+hIm0NbK8WXcTEI0UPoa2NbG4y2K00JEWbZavJXkYaqo9CRHS55FcZTjKEk3NKoCYUnSQ0rWxrZbFKbKIhOKPZe1cJKzZSaQrIyULHDZmV5K4xySsDRKWOruanGtjLJXFEmwaIbDLX0hIPBUQPVFVkQkDoUNfSoDgQGKPekoxeGzA4DUvnn4bxzcZrtJyipKfPNy5w+9lnXwgqsiyHNeSVpemw4bWb9psYeq//uQZBoABQt4yMVxYAIAAAkQoAAAHvYpL5m6AAgAACXDAAAAD59jblTirQe9upFsmZbpMudy7Lz1X1DYsxOOSWpfPqNX2WqktK0DMvuGwlbNj44TleLPQ+Gsfb+GOWOKJoIrWb3cIMeeON6lz2umTqMXV8Mj30yWPpjoSa9ujK8SyeJP5y5mOW1D6hvLepeveEAEDo0mgCRClOEgANv3B9a6fikgUSu/DmAMATrGx7nng5p5iimPNZsfQLYB2sDLIkzRKZOHGAaUyDcpFBSLG9MCQALgAIgQs2YunOszLSAyQYPVC2YdGGeHD2dTdJk1pAHGAWDjnkcLKFymS3RQZTInzySoBwMG0QueC3gMsCEYxUqlrcxK6k1LQQcsmyYeQPdC2YfuGPASCBkcVMQQqpVJshui1tkXQJQV0OXGAZMXSOEEBRirXbVRQW7ugq7IM7rPWSZyDlM3IuNEkxzCOJ0ny2ThNkyRai1b6ev//3dzNGzNb//4uAvHT5sURcZCFcuKLhOFs8mLAAEAt4UWAAIABAAAAAB4qbHo0tIjVkUU//uQZAwABfSFz3ZqQAAAAAngwAAAE1HjMp2qAAAAACZDgAAAD5UkTE1UgZEUExqYynN1qZvqIOREEFmBcJQkwdxiFtw0qEOkGYfRDifBui9MQg4QAHAqWtAWHoCxu1Yf4VfWLPIM2mHDFsbQEVGwyqQoQcwnfHeIkNt9YnkiaS1oizycqJrx4KOQjahZxWbcZgztj2c49nKmkId44S71j0c8eV9yDK6uPRzx5X18eDvjvQ6yKo9ZSS6l//8elePK/Lf//IInrOF/FvDoADYAGBMGb7FtErm5MXMlmPAJQVgWta7Zx2go+8xJ0UiCb8LHHdftWyLJE0QIAIsI+UbXu67dZMjmgDGCGl1H+vpF4NSDckSIkk7Vd+sxEhBQMRU8j/12UIRhzSaUdQ+rQU5kGeFxm+hb1oh6pWWmv3uvmReDl0UnvtapVaIzo1jZbf/pD6ElLqSX+rUmOQNpJFa/r+sa4e/pBlAABoAAAAA3CUgShLdGIxsY7AUABPRrgCABdDuQ5GC7DqPQCgbbJUAoRSUj+NIEig0YfyWUho1VBBBA//uQZB4ABZx5zfMakeAAAAmwAAAAF5F3P0w9GtAAACfAAAAAwLhMDmAYWMgVEG1U0FIGCBgXBXAtfMH10000EEEEEECUBYln03TTTdNBDZopopYvrTTdNa325mImNg3TTPV9q3pmY0xoO6bv3r00y+IDGid/9aaaZTGMuj9mpu9Mpio1dXrr5HERTZSmqU36A3CumzN/9Robv/Xx4v9ijkSRSNLQhAWumap82WRSBUqXStV/YcS+XVLnSS+WLDroqArFkMEsAS+eWmrUzrO0oEmE40RlMZ5+ODIkAyKAGUwZ3mVKmcamcJnMW26MRPgUw6j+LkhyHGVGYjSUUKNpuJUQoOIAyDvEyG8S5yfK6dhZc0Tx1KI/gviKL6qvvFs1+bWtaz58uUNnryq6kt5RzOCkPWlVqVX2a/EEBUdU1KrXLf40GoiiFXK///qpoiDXrOgqDR38JB0bw7SoL+ZB9o1RCkQjQ2CBYZKd/+VJxZRRZlqSkKiws0WFxUyCwsKiMy7hUVFhIaCrNQsKkTIsLivwKKigsj8XYlwt/WKi2N4d//uQRCSAAjURNIHpMZBGYiaQPSYyAAABLAAAAAAAACWAAAAApUF/Mg+0aohSIRobBAsMlO//Kk4soosy1JSFRYWaLC4qZBYWFRGZdwqKiwkNBVmoWFSJkWFxX4FFRQWR+LsS4W/rFRb/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////VEFHAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAU291bmRib3kuZGUAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAMjAwNGh0dHA6Ly93d3cuc291bmRib3kuZGUAAAAAAAAAACU=");
+ snd.play();
+}
+
function loadNAILorebook(data, filename) {
let lorebookVersion = data.lorebookVersion;
let wi_data = {folders: {[filename]: []}, entries: {}};
@@ -3028,6 +3078,120 @@ async function processDroppedFile(file) {
}
}
+function highlightEl(element) {
+ if (typeof element === "string") element = document.querySelector(element);
+ if (!element) {
+ console.error("Bad jump!")
+ return;
+ }
+
+ let area = $(element).closest(".tab-target")[0];
+
+ if (!area) {
+ console.error("No error? :^(");
+ return;
+ }
+
+ let tab = Array.from($(".tab")).filter((c) => c.getAttribute("tab-target") === area.id)[0];
+ tab.click();
+ element.scrollIntoView();
+}
+
+function addSearchListing(action, highlight) {
+ const finderContainer = document.getElementById("finder-container");
+ const finder = document.getElementById("finder");
+
+ let result = document.createElement("div");
+ result.classList.add("finder-result");
+ result.addEventListener("click", function(event) {
+ finderContainer.classList.add("hidden");
+ action.func();
+ });
+
+ let textblock = document.createElement("div");
+ textblock.classList.add("result-textbox");
+ result.appendChild(textblock);
+
+ let titleEl = document.createElement("span");
+ titleEl.classList.add("result-title");
+ titleEl.innerText = action.name;
+
+ // TODO: Sanitation
+ titleEl.innerHTML = titleEl.innerHTML.replace(
+ new RegExp(`(${highlight})`, "i"),
+ '$1'
+ );
+ textblock.appendChild(titleEl);
+
+ if (action.desc) {
+ let descriptionEl = document.createElement("span");
+ descriptionEl.classList.add("result-details");
+ descriptionEl.innerText = action.desc;
+ descriptionEl.innerHTML = descriptionEl.innerHTML.replace(
+ new RegExp(`(${highlight})`, "i"),
+ '$1'
+ );
+
+ // It can get cut off by CSS, so let's add a tooltip.
+ descriptionEl.setAttribute("title", action.desc);
+ textblock.appendChild(descriptionEl);
+ }
+
+ let icon = document.createElement("span");
+ icon.classList.add("result-icon");
+ icon.classList.add("material-icons-outlined");
+
+ // TODO: Change depending on what pressing enter does
+ icon.innerText = action.icon;
+ result.appendChild(icon)
+
+ finder.appendChild(result);
+
+ return result;
+}
+
+function updateSearchListings() {
+ const maxResultCount = 5;
+
+ if (this.value === finder_last_input) return;
+ finder_last_input = this.value;
+ finder_selection_index = -1;
+
+ let query = this.value.toLowerCase();
+
+ // TODO: Maybe reuse the element? Would it give better performance?
+ $(".finder-result").remove();
+
+ if (!query) return;
+
+ const actionMatches = {name: [], desc: []};
+
+ for (const action of finder_actions) {
+ if (action.name.toLowerCase().includes(query)) {
+ actionMatches.name.push(action);
+ } else if (action.desc && action.desc.toLowerCase().includes(query)) {
+ actionMatches.desc.push(action);
+ }
+ }
+
+ // Title matches over desc matches
+ const matchingActions = actionMatches.name.concat(actionMatches.desc);
+
+
+ for (let i=0;i= 0 ? finder_selection_index : 0;
+ actions[index].click();
+ } else if (event.key === "ArrowUp") {
+ delta = -1;
+ } else if (event.key === "ArrowDown") {
+ delta = 1
+ } else if (event.key === "Tab") {
+ delta = event.shiftKey ? -1 : 1;
+ } else {
+ return;
+ }
+
+ const actionsCount = actions.length;
+ let future = finder_selection_index + delta;
+
+ event.preventDefault();
+
+ if (future >= actionsCount) {
+ future = 0;
+ } else if (future < 0) {
+ future = actionsCount - 1;
+ }
+
+ finder_selection_index = future;
+ updateFinderSelection(delta);
+ });
+
+ finderContainer.addEventListener("click", function(e) {
+ finderContainer.classList.add("hidden");
+ });
+
+ finder.addEventListener("click", function(e) {
+ e.stopPropagation();
+ });
});
+
+document.addEventListener("keydown", function(event) {
+ const finderContainer = document.getElementById("finder-container");
+ if (event.key === "Escape") finderContainer.classList.add("hidden");
+
+ if (!event.ctrlKey) return;
+
+ switch (event.key) {
+ // TODO: Add other shortcuts
+ case "k":
+ const finderInput = document.getElementById("finder-input");
+ finderInput.value = "";
+ $(".finder-result").remove();
+ finder_selection_index = -1;
+
+ finderContainer.classList.remove("hidden");
+ finderInput.focus();
+
+ event.preventDefault();
+ break;
+}
+});
\ No newline at end of file
diff --git a/templates/index_new.html b/templates/index_new.html
index 3b4d77bb..a04ee9d1 100644
--- a/templates/index_new.html
+++ b/templates/index_new.html
@@ -143,5 +143,11 @@
upload_file
+
+