From 03d54364f4bcb3d24f0ca1128b572dfb28346d77 Mon Sep 17 00:00:00 2001 From: Ben Fox Date: Thu, 20 Jan 2022 15:18:43 -0500 Subject: [PATCH 01/28] Initial commit of the actions metadata variable population --- aiserver.py | 101 ++++++++++++++++++++++++++++++++++++++++++++++++++-- 1 file changed, 98 insertions(+), 3 deletions(-) diff --git a/aiserver.py b/aiserver.py index 7c9598ce..c4d095cb 100644 --- a/aiserver.py +++ b/aiserver.py @@ -115,6 +115,12 @@ class vars: setauthornotetemplate = authornotetemplate # Saved author's note template in settings andepth = 3 # How far back in history to append author's note actions = structures.KoboldStoryRegister() # Actions submitted by user and AI + actions_metadata = [] # List of dictonaries, one dictonary for every action that contains information about the action like alternative options. + # Contains at least the same number of items as actions. Back action will remove an item from actions, but not actions_metadata + # Dictonary keys are: + # Selected Text: (text the user had selected. None when this is a newly generated action) + # Alternative Generated Text: {Text, Pinned, Previous Selection, Edited} + # worldinfo = [] # List of World Info key/value objects worldinfo_i = [] # List of World Info key/value objects sans uninitialized entries worldinfo_u = {} # Dictionary of World Info UID - key/value pairs @@ -1578,7 +1584,11 @@ def lua_set_chunk(k, v): del vars._actions[chunk-1] vars.lua_deleted.add(chunk) if(not hasattr(vars, "_actions") or vars._actions is not vars.actions): - del vars.actions[chunk-1] + #Instead of deleting we'll blank out the text. This way our actions and actions_metadata stay in sync and we can restore the chunk on an undo + vars.actions[chunk-1] = "" + vars.actions_metadata[chunk-1]['Alternative Text'] = [{"Text": vars.actions_metadata[chunk-1]['Selected Text'], "Pinned": False, "Editted": True})] + + vars.actions_metadata[chunk-1]['Alternative Text'] + vars.actions_metadata[chunk-1]['Selected Text'] = '' else: if(k == 0): print(colors.GREEN + f"{lua_log_format_name(vars.lua_koboldbridge.logging_name)} edited prompt chunk" + colors.END) @@ -1595,6 +1605,9 @@ def lua_set_chunk(k, v): vars._actions[chunk-1] = v vars.lua_edited.add(chunk) vars.actions[chunk-1] = v + vars.actions_metadata[chunk-1]['Alternative Text'] = [{"Text": vars.actions_metadata[chunk-1]['Selected Text'], "Pinned": False, "Editted": True})] + + vars.actions_metadata[chunk-1]['Alternative Text'] + vars.actions_metadata[chunk-1]['Selected Text'] = v #==================================================================# # Get model type as "gpt-2-xl", "gpt-neo-2.7B", etc. @@ -2427,6 +2440,18 @@ def actionsubmit(data, actionmode=0, force_submit=False, force_prompt_gen=False, vars.prompt = data else: vars.actions.append(data) + # we now need to update the actions_metadata + # we'll have two conditions. + # 1. This is totally new (user entered) + if len(vars.actions_metadata) < len(vars.actions): + vars.actions_metadata.append({"Selected Text": data, "Alternative Text": []}) + else: + # 2. We've selected a chunk of text that is was presented previously + alternatives = [item['Text'] for item in vars.actions_metadata[len(vars.actions)]["Alternative Text"]] + if data in alternatives: + alternatives = [item for item in vars.actions_metadata[len(vars.actions)]["Alternative Text"] if item['Text'] <> data] + vars.actions_metadata[len(vars.actions)]["Alternative Text"] = alternatives + vars.actions_metadata[len(vars.actions)]["Selected Text"] = data update_story_chunk('last') if(not vars.noai and vars.lua_koboldbridge.generating): @@ -2482,6 +2507,14 @@ def actionretry(data): # Remove last action if possible and resubmit if(vars.gamestarted if vars.useprompt else len(vars.actions) > 0): if(not vars.recentback and len(vars.actions) != 0 and len(vars.genseqs) == 0): # Don't pop if we're in the "Select sequence to keep" menu or if there are no non-prompt actions + # We are going to move the selected text to alternative text in the actions_metadata variable so we can redo this action + vars.actions_metadata[vars.actions]['Alternative Text'] = [{'Text': vars.actions_metadata[vars.actions]['Selected Text'], + 'Pinned': False, + "Previous Selection": True, + "Edited": False}] + vars.actions_metadata[vars.actions]['Alternative Text'] + vars.actions_metadata[vars.actions]['Selected Text'] = "" + + last_key = vars.actions.get_last_key() vars.actions.pop() remove_story_chunk(last_key + 1) @@ -2500,6 +2533,13 @@ def actionback(): return # Remove last index of actions and refresh game screen if(len(vars.genseqs) == 0 and len(vars.actions) > 0): + # We are going to move the selected text to alternative text in the actions_metadata variable so we can redo this action + vars.actions_metadata[vars.actions]['Alternative Text'] = [{'Text': vars.actions_metadata[vars.actions]['Selected Text'], + 'Pinned': False, + "Previous Selection": True, + "Edited": False}] + vars.actions_metadata[vars.actions]['Alternative Text'] + vars.actions_metadata[vars.actions]['Selected Text'] = "" + last_key = vars.actions.get_last_key() vars.actions.pop() vars.recentback = True @@ -2900,6 +2940,7 @@ def genresult(genout, flash=True): vars.prompt = genout else: vars.actions.append(genout) + vars.actions_metadata.append({'Selected Text': genout, 'Alternative Text': []}) update_story_chunk('last') if(flash): emit('from_server', {'cmd': 'texteffect', 'data': vars.actions.get_last_key() + 1 if len(vars.actions) else 0}, broadcast=True) @@ -2915,9 +2956,23 @@ def genselect(genout): print("{0}[Result {1}]\n{2}{3}".format(colors.CYAN, i, result["generated_text"], colors.END)) i += 1 + # Add the options to the actions metadata + # If we've already generated text for this action but haven't selected one we'll want to kill all non-pinned, non-previous selection, and non-edited options then add the new ones + if vars.actions_metadata[len(vars.actions)+1]['Selected Text'] == "": + vars.actions_metadata[len(vars.actions)]['Alternative Text'] = [{"Text": item['Text'], "Pinned": item['Pinned'], + "Previous Selection": item["Previous Selection"], + "Edited": item["Edited"]} for item in vars.actions_metadata[len(vars.actions)]['Alternative Text'] + if item['Pinned'] or item["Previous Selection"] or item["Edited"]] + + [{"Text": text["generated_text"], "Pinned": False, "Previous Selection": False, "Edited": False} for text in genout] + else: + vars.actions_metadata.append({'Selected Text': '', 'Alternative Text': [{"Text": text["generated_text"], "Pinned": False, "Previous Selection": False, "Edited": False} for text in genout]}) + + genout = [item['Text'] for item in vars.actions_metadata[len(vars.actions)+1]['Alternative Text'] if item['Pinned']] + [item["generated_text"] for item in genout] + # Store sequences in memory until selection is made vars.genseqs = genout + # Send sequences to UI for selection emit('from_server', {'cmd': 'genseqs', 'data': genout}, broadcast=True) @@ -2930,6 +2985,9 @@ def selectsequence(n): vars.lua_koboldbridge.feedback = vars.genseqs[int(n)]["generated_text"] if(len(vars.lua_koboldbridge.feedback) != 0): vars.actions.append(vars.lua_koboldbridge.feedback) + #We'll want to remove the option from the alternative text and put it in selected text + vars.actions_metadata[len(vars.actions)]['Alternative Text'] = [item for item in vars.actions_metadata[vars.actions]['Alternative Text'] if item['Text'] != vars.lua_koboldbridge.feedback] + vars.actions_metadata[chunk-1]['Selected Text'] = vars.lua_koboldbridge.feedback update_story_chunk('last') emit('from_server', {'cmd': 'texteffect', 'data': vars.actions.get_last_key() + 1 if len(vars.actions) else 0}, broadcast=True) emit('from_server', {'cmd': 'hidegenseqs', 'data': ''}, broadcast=True) @@ -3330,6 +3388,10 @@ def editsubmit(data): if(vars.editln == 0): vars.prompt = data else: + vars.actions_metadata[vars.editln-1]['Alternative Text'] = vars.actions_metadata[vars.editln-1]['Alternative Text'] + [{"Text": vars.actions[vars.editln-1], "Pinned": False, + "Previous Selection": False, + "Edited": True}] + vars.actions_metadata[vars.editln-1]['Selected Text'] = data vars.actions[vars.editln-1] = data vars.mode = "play" @@ -3347,7 +3409,10 @@ def deleterequest(): # Send error message pass else: - del vars.actions[vars.editln-1] + vars.actions_metadata[vars.editln-1]['Alternative Text'] = [{"Text": vars.actions[vars.editln-1], "Pinned": False, + "Previous Selection": True, "Edited": False}] + vars.actions_metadata[vars.editln-1]['Alternative Text'] + vars.actions_metadata[vars.editln-1]['Selected Text'] = '' + vars.actions[vars.editln-1] = '' vars.mode = "play" remove_story_chunk(vars.editln) emit('from_server', {'cmd': 'editmode', 'data': 'false'}) @@ -3364,6 +3429,10 @@ def inlineedit(chunk, data): vars.prompt = data else: if(chunk-1 in vars.actions): + vars.actions_metadata[chunk-1]['Alternative Text'] = vars.actions_metadata[chunk-1]['Alternative Text'] + [{"Text": vars.actions[chunk-1], "Pinned": False, + "Previous Selection": False, + "Edited": True}] + vars.actions_metadata[chunk-1]['Selected Text'] = data vars.actions[chunk-1] = data setgamesaved(False) @@ -3385,7 +3454,11 @@ def inlinedelete(chunk): emit('from_server', {'cmd': 'editmode', 'data': 'false'}, broadcast=True) else: if(chunk-1 in vars.actions): - del vars.actions[chunk-1] + vars.actions_metadata[chunk-1]['Alternative Text'] = [{"Text": vars.actions[chunk-1], "Pinned": False, + "Previous Selection": True, + "Edited": False}] + vars.actions_metadata[chunk-1]['Alternative Text'] + vars.actions_metadata[chunk-1]['Selected Text'] = '' + vars.actions[chunk-1] = '' setgamesaved(False) remove_story_chunk(chunk) emit('from_server', {'cmd': 'editmode', 'data': 'false'}, broadcast=True) @@ -3776,6 +3849,15 @@ def ikrequest(txt): print("{0}{1}{2}".format(colors.CYAN, genout, colors.END)) vars.actions.append(genout) + if len(vars.actions_metadata) < len(vars.actions): + vars.actions_metadata.append({"Selected Text": genout, "Alternative Text": []}) + else: + # 2. We've selected a chunk of text that is was presented previously + alternatives = [item['Text'] for item in vars.actions_metadata[len(vars.actions)]["Alternative Text"]] + if genout in alternatives: + alternatives = [item for item in vars.actions_metadata[len(vars.actions)]["Alternative Text"] if item['Text'] <> genout] + vars.actions_metadata[len(vars.actions)]["Alternative Text"] = alternatives + vars.actions_metadata[len(vars.actions)]["Selected Text"] = genout update_story_chunk('last') emit('from_server', {'cmd': 'texteffect', 'data': vars.actions.get_last_key() + 1 if len(vars.actions) else 0}, broadcast=True) @@ -3835,6 +3917,15 @@ def oairequest(txt, min, max): print("{0}{1}{2}".format(colors.CYAN, genout, colors.END)) vars.actions.append(genout) + if len(vars.actions_metadata) < len(vars.actions): + vars.actions_metadata.append({"Selected Text": genout, "Alternative Text": []}) + else: + # 2. We've selected a chunk of text that is was presented previously + alternatives = [item['Text'] for item in vars.actions_metadata[len(vars.actions)]["Alternative Text"]] + if genout in alternatives: + alternatives = [item for item in vars.actions_metadata[len(vars.actions)]["Alternative Text"] if item['Text'] <> genout] + vars.actions_metadata[len(vars.actions)]["Alternative Text"] = alternatives + vars.actions_metadata[len(vars.actions)]["Selected Text"] = genout update_story_chunk('last') emit('from_server', {'cmd': 'texteffect', 'data': vars.actions.get_last_key() + 1 if len(vars.actions) else 0}, broadcast=True) @@ -4603,6 +4694,10 @@ if(vars.model in ("TPUMeshTransformerGPTJ",)): }, ).start() +@app.route('/action_metadata') +def show_action_metadata(): + return vars.actions_metadata + #==================================================================# # Final startup commands to launch Flask app #==================================================================# From c9a99adde8908a64e771da108283c19743e1e140 Mon Sep 17 00:00:00 2001 From: ebolam Date: Fri, 21 Jan 2022 07:41:04 -0500 Subject: [PATCH 02/28] Add files via upload --- static/pin.svg | 3 +++ 1 file changed, 3 insertions(+) create mode 100644 static/pin.svg diff --git a/static/pin.svg b/static/pin.svg new file mode 100644 index 00000000..88452ea0 --- /dev/null +++ b/static/pin.svg @@ -0,0 +1,3 @@ + + + \ No newline at end of file From d31fb278ce3801371e33d32f3921eeb97717cc89 Mon Sep 17 00:00:00 2001 From: ebolam Date: Fri, 21 Jan 2022 15:30:37 -0500 Subject: [PATCH 03/28] Working redo and pin options --- aiserver.py | 94 ++++++++++++++++++++++++++++++++----------- static/application.js | 27 ++++++++++++- templates/index.html | 1 + 3 files changed, 98 insertions(+), 24 deletions(-) diff --git a/aiserver.py b/aiserver.py index 0ae1469f..eef93ab9 100644 --- a/aiserver.py +++ b/aiserver.py @@ -1128,6 +1128,7 @@ def download(): js["authorsnote"] = vars.authornote js["anotetemplate"] = vars.authornotetemplate js["actions"] = tuple(vars.actions.values()) + js["actions_metadata"] = vars.actions_metadata js["worldinfo"] = [] # Extract only the important bits of WI @@ -1586,8 +1587,7 @@ def lua_set_chunk(k, v): if(not hasattr(vars, "_actions") or vars._actions is not vars.actions): #Instead of deleting we'll blank out the text. This way our actions and actions_metadata stay in sync and we can restore the chunk on an undo vars.actions[chunk-1] = "" - vars.actions_metadata[chunk-1]['Alternative Text'] = [{"Text": vars.actions_metadata[chunk-1]['Selected Text'], "Pinned": False, "Editted": True})] + - vars.actions_metadata[chunk-1]['Alternative Text'] + vars.actions_metadata[chunk-1]['Alternative Text'] = [{"Text": vars.actions_metadata[chunk-1]['Selected Text'], "Pinned": False, "Editted": True}] + vars.actions_metadata[chunk-1]['Alternative Text'] vars.actions_metadata[chunk-1]['Selected Text'] = '' else: if(k == 0): @@ -1605,8 +1605,7 @@ def lua_set_chunk(k, v): vars._actions[chunk-1] = v vars.lua_edited.add(chunk) vars.actions[chunk-1] = v - vars.actions_metadata[chunk-1]['Alternative Text'] = [{"Text": vars.actions_metadata[chunk-1]['Selected Text'], "Pinned": False, "Editted": True})] + - vars.actions_metadata[chunk-1]['Alternative Text'] + vars.actions_metadata[chunk-1]['Alternative Text'] = [{"Text": vars.actions_metadata[chunk-1]['Selected Text'], "Pinned": False, "Editted": True}] + vars.actions_metadata[chunk-1]['Alternative Text'] vars.actions_metadata[chunk-1]['Selected Text'] = v #==================================================================# @@ -1836,6 +1835,9 @@ def get_message(msg): # Back/Undo Action elif(msg['cmd'] == 'back'): actionback() + # Forward/Redo Action + elif(msg['cmd'] == 'redo'): + actionredo() # EditMode Action (old) elif(msg['cmd'] == 'edit'): if(vars.mode == "play"): @@ -2063,6 +2065,8 @@ def get_message(msg): vars.saveow = False elif(msg['cmd'] == 'seqsel'): selectsequence(msg['data']) + elif(msg['cmd'] == 'seqpin'): + pinsequence(msg['data']) elif(msg['cmd'] == 'setnumseq'): vars.numseqs = int(msg['data']) emit('from_server', {'cmd': 'setlabelnumseq', 'data': msg['data']}) @@ -2449,7 +2453,7 @@ def actionsubmit(data, actionmode=0, force_submit=False, force_prompt_gen=False, # 2. We've selected a chunk of text that is was presented previously alternatives = [item['Text'] for item in vars.actions_metadata[len(vars.actions)]["Alternative Text"]] if data in alternatives: - alternatives = [item for item in vars.actions_metadata[len(vars.actions)]["Alternative Text"] if item['Text'] <> data] + alternatives = [item for item in vars.actions_metadata[len(vars.actions)]["Alternative Text"] if item['Text'] != data] vars.actions_metadata[len(vars.actions)]["Alternative Text"] = alternatives vars.actions_metadata[len(vars.actions)]["Selected Text"] = data update_story_chunk('last') @@ -2534,11 +2538,11 @@ def actionback(): # Remove last index of actions and refresh game screen if(len(vars.genseqs) == 0 and len(vars.actions) > 0): # We are going to move the selected text to alternative text in the actions_metadata variable so we can redo this action - vars.actions_metadata[vars.actions]['Alternative Text'] = [{'Text': vars.actions_metadata[vars.actions]['Selected Text'], + vars.actions_metadata[len(vars.actions)-1]['Alternative Text'] = [{'Text': vars.actions_metadata[len(vars.actions)-1]['Selected Text'], 'Pinned': False, "Previous Selection": True, - "Edited": False}] + vars.actions_metadata[vars.actions]['Alternative Text'] - vars.actions_metadata[vars.actions]['Selected Text'] = "" + "Edited": False}] + vars.actions_metadata[len(vars.actions)-1]['Alternative Text'] + vars.actions_metadata[len(vars.actions)-1]['Selected Text'] = "" last_key = vars.actions.get_last_key() vars.actions.pop() @@ -2548,6 +2552,19 @@ def actionback(): emit('from_server', {'cmd': 'errmsg', 'data': "Cannot delete the prompt."}) else: vars.genseqs = [] + +def actionredo(): + i = 0 + + genout = [{"generated_text": item['Text']} for item in vars.actions_metadata[len(vars.actions)]['Alternative Text'] if (item["Previous Selection"]==True)] + + # Store sequences in memory until selection is made + vars.genseqs = genout + + + # Send sequences to UI for selection + genout = [[item['Text'], True] for item in vars.actions_metadata[len(vars.actions)]['Alternative Text'] if (item["Previous Selection"]==True)] + emit('from_server', {'cmd': 'genseqs', 'data': genout}, broadcast=True) #==================================================================# # @@ -2940,7 +2957,10 @@ def genresult(genout, flash=True): vars.prompt = genout else: vars.actions.append(genout) - vars.actions_metadata.append({'Selected Text': genout, 'Alternative Text': []}) + if len(vars.actions) > len(vars.actions_metadata): + vars.actions_metadata.append({'Selected Text': genout, 'Alternative Text': []}) + else: + vars.actions_metadata[len(vars.actions)-1]['Selected Text'] = genout update_story_chunk('last') if(flash): emit('from_server', {'cmd': 'texteffect', 'data': vars.actions.get_last_key() + 1 if len(vars.actions) else 0}, broadcast=True) @@ -2958,21 +2978,25 @@ def genselect(genout): # Add the options to the actions metadata # If we've already generated text for this action but haven't selected one we'll want to kill all non-pinned, non-previous selection, and non-edited options then add the new ones - if vars.actions_metadata[len(vars.actions)+1]['Selected Text'] == "": - vars.actions_metadata[len(vars.actions)]['Alternative Text'] = [{"Text": item['Text'], "Pinned": item['Pinned'], - "Previous Selection": item["Previous Selection"], - "Edited": item["Edited"]} for item in vars.actions_metadata[len(vars.actions)]['Alternative Text'] - if item['Pinned'] or item["Previous Selection"] or item["Edited"]] + - [{"Text": text["generated_text"], "Pinned": False, "Previous Selection": False, "Edited": False} for text in genout] + if (len(vars.actions_metadata) > len(vars.actions)): + if (vars.actions_metadata[len(vars.actions)]['Selected Text'] == ""): + vars.actions_metadata[len(vars.actions)]['Alternative Text'] = [{"Text": item['Text'], "Pinned": item['Pinned'], + "Previous Selection": item["Previous Selection"], + "Edited": item["Edited"]} for item in vars.actions_metadata[len(vars.actions)]['Alternative Text'] + if item['Pinned'] or item["Previous Selection"] or item["Edited"]] + [{"Text": text["generated_text"], + "Pinned": False, "Previous Selection": False, "Edited": False} for text in genout] + else: + vars.actions_metadata.append({'Selected Text': '', 'Alternative Text': [{"Text": text["generated_text"], "Pinned": False, "Previous Selection": False, "Edited": False} for text in genout]}) else: vars.actions_metadata.append({'Selected Text': '', 'Alternative Text': [{"Text": text["generated_text"], "Pinned": False, "Previous Selection": False, "Edited": False} for text in genout]}) - genout = [item['Text'] for item in vars.actions_metadata[len(vars.actions)+1]['Alternative Text'] if item['Pinned']] + [item["generated_text"] for item in genout] - + genout = [{"generated_text": item['Text']} for item in vars.actions_metadata[len(vars.actions)]['Alternative Text'] if (item["Previous Selection"]==False) and (item["Edited"]==False)] + # Store sequences in memory until selection is made vars.genseqs = genout - + genout = [[item['Text'], item['Pinned']] for item in vars.actions_metadata[len(vars.actions)]['Alternative Text'] if (item["Previous Selection"]==False) and (item["Edited"]==False)] + print(genout) # Send sequences to UI for selection emit('from_server', {'cmd': 'genseqs', 'data': genout}, broadcast=True) @@ -2986,8 +3010,8 @@ def selectsequence(n): if(len(vars.lua_koboldbridge.feedback) != 0): vars.actions.append(vars.lua_koboldbridge.feedback) #We'll want to remove the option from the alternative text and put it in selected text - vars.actions_metadata[len(vars.actions)]['Alternative Text'] = [item for item in vars.actions_metadata[vars.actions]['Alternative Text'] if item['Text'] != vars.lua_koboldbridge.feedback] - vars.actions_metadata[chunk-1]['Selected Text'] = vars.lua_koboldbridge.feedback + vars.actions_metadata[len(vars.actions)-1]['Alternative Text'] = [item for item in vars.actions_metadata[len(vars.actions)-1]['Alternative Text'] if item['Text'] != vars.lua_koboldbridge.feedback] + vars.actions_metadata[len(vars.actions)-1]['Selected Text'] = vars.lua_koboldbridge.feedback update_story_chunk('last') emit('from_server', {'cmd': 'texteffect', 'data': vars.actions.get_last_key() + 1 if len(vars.actions) else 0}, broadcast=True) emit('from_server', {'cmd': 'hidegenseqs', 'data': ''}, broadcast=True) @@ -2996,6 +3020,21 @@ def selectsequence(n): if(vars.lua_koboldbridge.restart_sequence is not None): actionsubmit("", actionmode=vars.actionmode, force_submit=True, disable_recentrng=True) +#==================================================================# +# Pin/Unpin the selected sequence +#==================================================================# +def pinsequence(n): + if n.isnumeric(): + text = vars.genseqs[int(n)]['generated_text'] + if text in [item['Text'] for item in vars.actions_metadata[len(vars.actions)]['Alternative Text']]: + alternatives = vars.actions_metadata[len(vars.actions)]['Alternative Text'] + for i in range(len(alternatives)): + if alternatives[i]['Text'] == text: + alternatives[i]['Pinned'] = not alternatives[i]['Pinned'] + break + vars.actions_metadata[len(vars.actions)]['Alternative Text'] = alternatives + + #==================================================================# # Send transformers-style request to ngrok/colab host #==================================================================# @@ -3855,7 +3894,7 @@ def ikrequest(txt): # 2. We've selected a chunk of text that is was presented previously alternatives = [item['Text'] for item in vars.actions_metadata[len(vars.actions)]["Alternative Text"]] if genout in alternatives: - alternatives = [item for item in vars.actions_metadata[len(vars.actions)]["Alternative Text"] if item['Text'] <> genout] + alternatives = [item for item in vars.actions_metadata[len(vars.actions)]["Alternative Text"] if item['Text'] != genout] vars.actions_metadata[len(vars.actions)]["Alternative Text"] = alternatives vars.actions_metadata[len(vars.actions)]["Selected Text"] = genout update_story_chunk('last') @@ -3923,7 +3962,7 @@ def oairequest(txt, min, max): # 2. We've selected a chunk of text that is was presented previously alternatives = [item['Text'] for item in vars.actions_metadata[len(vars.actions)]["Alternative Text"]] if genout in alternatives: - alternatives = [item for item in vars.actions_metadata[len(vars.actions)]["Alternative Text"] if item['Text'] <> genout] + alternatives = [item for item in vars.actions_metadata[len(vars.actions)]["Alternative Text"] if item['Text'] != genout] vars.actions_metadata[len(vars.actions)]["Alternative Text"] = alternatives vars.actions_metadata[len(vars.actions)]["Selected Text"] = genout update_story_chunk('last') @@ -4053,6 +4092,7 @@ def saveRequest(savpath): js["authorsnote"] = vars.authornote js["anotetemplate"] = vars.authornotetemplate js["actions"] = tuple(vars.actions.values()) + js["actions_metadata"] = vars.actions_metadata js["worldinfo"] = [] js["wifolders_d"] = vars.wifolders_d js["wifolders_l"] = vars.wifolders_l @@ -4178,6 +4218,14 @@ def loadRequest(loadpath, filename=None): vars.actions = structures.KoboldStoryRegister() actions = collections.deque(js["actions"]) + if "actions_metadata" in js: + vars.actions_metadata = js["actions_metadata"] + else: + print(js["actions"]) + print([{'Selected Text': text, 'Alternative Text': []} for text in js["actions"]]) + vars.actions_metadata = [{'Selected Text': text, 'Alternative Text': []} for text in js["actions"]] + + if(len(vars.prompt.strip()) == 0): while(len(actions)): action = actions.popleft() @@ -4699,7 +4747,7 @@ if(vars.model in ("TPUMeshTransformerGPTJ",)): @app.route('/action_metadata') def show_action_metadata(): - return vars.actions_metadata + return "{}".format(vars.actions_metadata) #==================================================================# # Final startup commands to launch Flask app diff --git a/static/application.js b/static/application.js index 14d7de37..07829de1 100644 --- a/static/application.js +++ b/static/application.js @@ -25,6 +25,7 @@ var button_mode_label; var button_send; var button_actmem; var button_actback; +var button_actfwd; var button_actretry; var button_actwi; var game_text; @@ -1160,10 +1161,27 @@ function parsegenseqs(seqs) { seqselcontents.html(""); var i; for(i=0; i"+seqs[i].generated_text+""); + if (seqs[i][1]) { + color = "white" + } else { + color = "grey" + } + seqselcontents.append("
"+seqs[i][0]+"
"); $("#seqsel"+i).on("click", function () { socket.send({'cmd': 'seqsel', 'data': $(this).attr("n")}); }); + $("#seqselpin"+i).on("click", function () { + socket.send({'cmd': 'seqpin', 'data': $(this).attr("n")}); + if ($(this).attr("style") == "color: grey") { + console.log($(this).attr("style")); + $(this).css({"color": "white"}); + console.log($(this).attr("style")); + } else { + console.log($(this).attr("style")); + $(this).css({"color": "grey"}); + console.log($(this).attr("style")); + } + }); } $('#seqselmenu').slideDown("slow"); } @@ -1741,6 +1759,7 @@ $(document).ready(function(){ button_send = $('#btnsend'); button_actmem = $('#btn_actmem'); button_actback = $('#btn_actundo'); + button_actfwd = $('#btn_actredo'); button_actretry = $('#btn_actretry'); button_actwi = $('#btn_actwi'); game_text = $('#gametext'); @@ -2316,6 +2335,12 @@ $(document).ready(function(){ hidegenseqs(); }); + button_actfwd.on("click", function(ev) { + hideMessage(); + socket.send({'cmd': 'redo', 'data': ''}); + hidegenseqs(); + }); + button_actmem.on("click", function(ev) { socket.send({'cmd': 'memory', 'data': ''}); }); diff --git a/templates/index.html b/templates/index.html index be53a03d..9c08fa10 100644 --- a/templates/index.html +++ b/templates/index.html @@ -123,6 +123,7 @@ + From 2010e7b9bc56c4aae9c198b28218e0400e9e1c44 Mon Sep 17 00:00:00 2001 From: ebolam Date: Fri, 21 Jan 2022 19:02:56 -0500 Subject: [PATCH 04/28] Added saveas option for saving without metadata information Fixed redo on an empty story erroring Fixed redo when you're at the current end of a chain causing an error --- aiserver.py | 37 ++++++++++++++++++++++--------------- static/application.js | 20 +++++++++++--------- templates/index.html | 4 +++- 3 files changed, 36 insertions(+), 25 deletions(-) diff --git a/aiserver.py b/aiserver.py index eef93ab9..e3c9df71 100644 --- a/aiserver.py +++ b/aiserver.py @@ -2555,16 +2555,19 @@ def actionback(): def actionredo(): i = 0 - - genout = [{"generated_text": item['Text']} for item in vars.actions_metadata[len(vars.actions)]['Alternative Text'] if (item["Previous Selection"]==True)] - - # Store sequences in memory until selection is made - vars.genseqs = genout - - - # Send sequences to UI for selection - genout = [[item['Text'], True] for item in vars.actions_metadata[len(vars.actions)]['Alternative Text'] if (item["Previous Selection"]==True)] - emit('from_server', {'cmd': 'genseqs', 'data': genout}, broadcast=True) + if len(vars.actions) < len(vars.actions_metadata): + genout = [{"generated_text": item['Text']} for item in vars.actions_metadata[len(vars.actions)]['Alternative Text'] if (item["Previous Selection"]==True)] + genout = genout + [{"generated_text": item['Text']} for item in vars.actions_metadata[len(vars.actions)]['Alternative Text'] if (item["Pinned"]==True) and (item["Previous Selection"]==False)] + + # Store sequences in memory until selection is made + vars.genseqs = genout + + + # Send sequences to UI for selection + genout = [[item['Text'], True] for item in vars.actions_metadata[len(vars.actions)]['Alternative Text'] if (item["Previous Selection"]==True)] + emit('from_server', {'cmd': 'genseqs', 'data': genout}, broadcast=True) + else: + emit('from_server', {'cmd': 'popuperror', 'data': "There's nothing to undo"}, broadcast=True) #==================================================================# # @@ -2996,7 +2999,7 @@ def genselect(genout): vars.genseqs = genout genout = [[item['Text'], item['Pinned']] for item in vars.actions_metadata[len(vars.actions)]['Alternative Text'] if (item["Previous Selection"]==False) and (item["Edited"]==False)] - print(genout) + # Send sequences to UI for selection emit('from_server', {'cmd': 'genseqs', 'data': genout}, broadcast=True) @@ -3995,12 +3998,15 @@ def exitModes(): #==================================================================# # Launch in-browser save prompt #==================================================================# -def saveas(name): +def saveas(data): + + name = data['name'] + savepins = data['pins'] # Check if filename exists already name = utils.cleanfilename(name) if(not fileops.saveexists(name) or (vars.saveow and vars.svowname == name)): # All clear to save - e = saveRequest(fileops.storypath(name)) + e = saveRequest(fileops.storypath(name), savepins=savepins) vars.saveow = False vars.svowname = "" if(e is None): @@ -4076,7 +4082,7 @@ def savetofile(): #==================================================================# # Save the story to specified path #==================================================================# -def saveRequest(savpath): +def saveRequest(savpath, savepins=True): if(savpath): # Leave Edit/Memory mode before continuing exitModes() @@ -4092,7 +4098,8 @@ def saveRequest(savpath): js["authorsnote"] = vars.authornote js["anotetemplate"] = vars.authornotetemplate js["actions"] = tuple(vars.actions.values()) - js["actions_metadata"] = vars.actions_metadata + if savepins: + js["actions_metadata"] = vars.actions_metadata js["worldinfo"] = [] js["wifolders_d"] = vars.wifolders_d js["wifolders_l"] = vars.wifolders_l diff --git a/static/application.js b/static/application.js index 07829de1..e7268167 100644 --- a/static/application.js +++ b/static/application.js @@ -50,6 +50,7 @@ var aidg_accept; var aidg_close; var saveaspopup; var saveasinput; +var savepins; var topic; var saveas_accept; var saveas_close; @@ -749,7 +750,7 @@ function enterMemoryMode() { setchatnamevisibility(false); showMessage("Edit the memory to be sent with each request to the AI."); button_actmem.html("Cancel"); - hide([button_actback, button_actretry, button_actwi]); + hide([button_actback, button_actfwd, button_actretry, button_actwi]); // Display Author's Note field anote_menu.slideDown("fast"); } @@ -760,7 +761,7 @@ function exitMemoryMode() { setchatnamevisibility(chatmode); hideMessage(); button_actmem.html("Memory"); - show([button_actback, button_actretry, button_actwi]); + show([button_actback, button_actfwd, button_actretry, button_actwi]); input_text.val(""); // Hide Author's Note field anote_menu.slideUp("fast"); @@ -769,7 +770,7 @@ function exitMemoryMode() { function enterWiMode() { showMessage("World Info will be added to memory only when the key appears in submitted text or the last action."); button_actwi.html("Accept"); - hide([button_actback, button_actmem, button_actretry, game_text]); + hide([button_actback, button_actfwd, button_actmem, button_actretry, game_text]); setchatnamevisibility(false); show([wi_menu]); disableSendBtn(); @@ -781,7 +782,7 @@ function exitWiMode() { button_actwi.html("W Info"); hide([wi_menu]); setchatnamevisibility(chatmode); - show([button_actback, button_actmem, button_actretry, game_text]); + show([button_actback, button_actfwd, button_actmem, button_actretry, game_text]); enableSendBtn(); $("#gamescreen").removeClass("wigamescreen"); } @@ -885,7 +886,7 @@ function hideSaveAsPopup() { } function sendSaveAsRequest() { - socket.send({'cmd': 'saveasrequest', 'data': saveasinput.val()}); + socket.send({'cmd': 'saveasrequest', 'data': {"name": saveasinput.val(), "pins": savepins.val()}); } function showLoadPopup() { @@ -1143,9 +1144,9 @@ function updateSPStatItems(items) { function setStartState() { enableSendBtn(); enableButtons([button_actmem, button_actwi]); - disableButtons([button_actback, button_actretry]); + disableButtons([button_actback, button_actfwd, button_actretry]); hide([wi_menu]); - show([game_text, button_actmem, button_actwi, button_actback, button_actretry]); + show([game_text, button_actmem, button_actwi, button_actback, button_actfwd, button_actretry]); hideMessage(); hideWaitAnimation(); button_actmem.html("Memory"); @@ -1784,6 +1785,7 @@ $(document).ready(function(){ aidg_close = $("#btn_aidgpopupclose"); saveaspopup = $("#saveascontainer"); saveasinput = $("#savename"); + savepins = $("#savepins"); topic = $("#topic"); saveas_accept = $("#btn_saveasaccept"); saveas_close = $("#btn_saveasclose"); @@ -1932,13 +1934,13 @@ $(document).ready(function(){ // Enable or Disable buttons if(msg.data == "ready") { enableSendBtn(); - enableButtons([button_actmem, button_actwi, button_actback, button_actretry]); + enableButtons([button_actmem, button_actwi, button_actback, button_actfwd, button_actretry]); hideWaitAnimation(); gamestate = "ready"; } else if(msg.data == "wait") { gamestate = "wait"; disableSendBtn(); - disableButtons([button_actmem, button_actwi, button_actback, button_actretry]); + disableButtons([button_actmem, button_actwi, button_actback, button_actfwd, button_actretry]); showWaitAnimation(); } else if(msg.data == "start") { setStartState(); diff --git a/templates/index.html b/templates/index.html index 9c08fa10..8afbf9af 100644 --- a/templates/index.html +++ b/templates/index.html @@ -229,7 +229,9 @@
Enter Name For Save
- +
+ +
Save Pin Information
+ " + + //Now do the icon (pin/redo) + + if (seqs[i][1] == "redo") { + text_data = text_data + "" + } else if (seqs[i][1] == "pinned") { + text_data = text_data + "" } else { - color = "grey" + text_data = text_data + "" } - seqselcontents.append("
"+seqs[i][0]+"
"); + text_data = text_data + "" + seqselcontents.append(text_data); + + //setup on-click actions $("#seqsel"+i).on("click", function () { socket.send({'cmd': 'seqsel', 'data': $(this).attr("n")}); }); - $("#seqselpin"+i).on("click", function () { - socket.send({'cmd': 'seqpin', 'data': $(this).attr("n")}); - if ($(this).attr("style") == "color: grey") { - console.log($(this).attr("style")); - $(this).css({"color": "white"}); - console.log($(this).attr("style")); - } else { - console.log($(this).attr("style")); - $(this).css({"color": "grey"}); - console.log($(this).attr("style")); - } - }); + + //onclick for pin only + if (seqs[i][1] != "redo") { + $("#seqselpin"+i).on("click", function () { + socket.send({'cmd': 'seqpin', 'data': $(this).attr("n")}); + if ($(this).attr("style") == "color: grey") { + console.log($(this).attr("style")); + $(this).css({"color": "white"}); + console.log($(this).attr("style")); + } else { + console.log($(this).attr("style")); + $(this).css({"color": "grey"}); + console.log($(this).attr("style")); + } + }); + } } $('#seqselmenu').slideDown("slow"); } From 5534fc9800792404bdc56b04eedf457b9756631a Mon Sep 17 00:00:00 2001 From: ebolam Date: Thu, 3 Feb 2022 08:25:51 -0500 Subject: [PATCH 25/28] Moved build script into the docker folder --- build_docker_images.sh => docker-cuda/build_docker_images.sh | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename build_docker_images.sh => docker-cuda/build_docker_images.sh (100%) diff --git a/build_docker_images.sh b/docker-cuda/build_docker_images.sh similarity index 100% rename from build_docker_images.sh rename to docker-cuda/build_docker_images.sh From 9e17ea9636542a72254a6bd6c6d0c09732989de9 Mon Sep 17 00:00:00 2001 From: ebolam Date: Sun, 6 Feb 2022 13:42:46 -0500 Subject: [PATCH 26/28] Fixed model downloading problem where models were downloaded multiple times --- aiserver.py | 41 ++++++++++++++++++++++++----------------- 1 file changed, 24 insertions(+), 17 deletions(-) diff --git a/aiserver.py b/aiserver.py index 3b75a98d..2ce35317 100644 --- a/aiserver.py +++ b/aiserver.py @@ -970,43 +970,50 @@ if(not vars.model in ["InferKit", "Colab", "OAI", "ReadOnly", "TPUMeshTransforme lowmem = {} # Download model from Huggingface if it does not exist, otherwise load locally + + #If we specify a model and it's in the root directory, we need to move it to the models directory (legacy folder structure to new) + if os.path.isdir(vars.model.replace('/', '_')): + import shutil + shutil.move(vars.model.replace('/', '_'), "models/{}".format(vars.model.replace('/', '_'))) if(os.path.isdir(vars.custmodpth)): with(maybe_use_float16()): try: - tokenizer = AutoTokenizer.from_pretrained(vars.custmodpth, cache_dir="cache/") + tokenizer = AutoTokenizer.from_pretrained(vars.custmodpth, cache_dir="cache") except ValueError as e: - tokenizer = GPT2TokenizerFast.from_pretrained(vars.custmodpth, cache_dir="cache/") + tokenizer = GPT2TokenizerFast.from_pretrained(vars.custmodpth, cache_dir="cache") try: - model = AutoModelForCausalLM.from_pretrained(vars.custmodpth, cache_dir="cache/", **lowmem) + model = AutoModelForCausalLM.from_pretrained(vars.custmodpth, cache_dir="cache", **lowmem) except ValueError as e: - model = GPTNeoForCausalLM.from_pretrained(vars.custmodpth, cache_dir="cache/", **lowmem) - elif(os.path.isdir(vars.model.replace('/', '_'))): - with(maybe_use_float16()): + model = GPTNeoForCausalLM.from_pretrained(vars.custmodpth, cache_dir="cache", **lowmem) + elif(os.path.isdir("models/{}".format(vars.model.replace('/', '_')))): + with(maybe_use_float16()): try: - tokenizer = AutoTokenizer.from_pretrained(vars.model.replace('/', '_'), cache_dir="cache/") + tokenizer = AutoTokenizer.from_pretrained("models/{}".format(vars.model.replace('/', '_')), cache_dir="cache") except ValueError as e: - tokenizer = GPT2TokenizerFast.from_pretrained(vars.model.replace('/', '_'), cache_dir="cache/") + tokenizer = GPT2TokenizerFast.from_pretrained("models/{}".format(vars.model.replace('/', '_')), cache_dir="cache") try: - model = AutoModelForCausalLM.from_pretrained(vars.model.replace('/', '_'), cache_dir="cache/", **lowmem) + model = AutoModelForCausalLM.from_pretrained("models/{}".format(vars.model.replace('/', '_')), cache_dir="cache", **lowmem) except ValueError as e: - model = GPTNeoForCausalLM.from_pretrained(vars.model.replace('/', '_'), cache_dir="cache/", **lowmem) + model = GPTNeoForCausalLM.from_pretrained("models/{}".format(vars.model.replace('/', '_')), cache_dir="cache", **lowmem) else: try: - tokenizer = AutoTokenizer.from_pretrained(vars.model, cache_dir="cache/") + tokenizer = AutoTokenizer.from_pretrained(vars.model, cache_dir="cache") except ValueError as e: - tokenizer = GPT2TokenizerFast.from_pretrained(vars.model, cache_dir="cache/") + tokenizer = GPT2TokenizerFast.from_pretrained(vars.model, cache_dir="cache") with(maybe_use_float16()): try: - model = AutoModelForCausalLM.from_pretrained(vars.model, cache_dir="cache/", **lowmem) + model = AutoModelForCausalLM.from_pretrained(vars.model, cache_dir="cache", **lowmem) except ValueError as e: - model = GPTNeoForCausalLM.from_pretrained(vars.model, cache_dir="cache/", **lowmem) + model = GPTNeoForCausalLM.from_pretrained(vars.model, cache_dir="cache", **lowmem) if not args.colab: - model = model.half() + print("Trying to save model") import shutil shutil.rmtree("cache/") - model.save_pretrained("/models/{}".format(vars.model.replace('/', '_'))) - tokenizer.save_pretrained("/models/{}".format(vars.model.replace('/', '_'))) + model = model.half() + model.save_pretrained("models/{}".format(vars.model.replace('/', '_'))) + tokenizer.save_pretrained("models/{}".format(vars.model.replace('/', '_'))) + print("Saved") if(vars.hascuda): if(vars.usegpu): From 586b989582a2f67c2de6b750d3f667073b578dca Mon Sep 17 00:00:00 2001 From: ebolam Date: Sun, 6 Feb 2022 18:53:24 -0500 Subject: [PATCH 27/28] Redo bug fix --- aiserver.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/aiserver.py b/aiserver.py index 0f55c5db..7058eb9f 100644 --- a/aiserver.py +++ b/aiserver.py @@ -2606,7 +2606,7 @@ def actionretry(data): if(vars.gamestarted if vars.useprompt else len(vars.actions) > 0): if(not vars.recentback and len(vars.actions) != 0 and len(vars.genseqs) == 0): # Don't pop if we're in the "Select sequence to keep" menu or if there are no non-prompt actions # We are going to move the selected text to alternative text in the actions_metadata variable so we can redo this action - vars.actions_metadata[vars.actions]['Alternative Text'] = [{'Text': vars.actions_metadata[vars.actions]['Selected Text'], + vars.actions_metadata[vars.actions]['Alternative Text'] = [{'Text': vars.actions_metadata[len(vars.actions)]['Selected Text'], 'Pinned': False, "Previous Selection": True, "Edited": False}] + vars.actions_metadata[vars.actions]['Alternative Text'] From c0bbe9f810c80b887f8d5b6fe17ae9524d463e31 Mon Sep 17 00:00:00 2001 From: ebolam Date: Sun, 6 Feb 2022 19:04:13 -0500 Subject: [PATCH 28/28] Reverted docker-cuda to mainline version. --- docker-cuda/Dockerfile | 8 +++++--- docker-cuda/Dockerfile_base | 10 ---------- docker-cuda/Dockerfile_base_finetune | 6 ------ docker-cuda/Dockerfile_base_huggingface | 7 ------- docker-cuda/Dockerfile_finetune | 7 ------- docker-cuda/build_docker_images.sh | 5 ----- 6 files changed, 5 insertions(+), 38 deletions(-) delete mode 100644 docker-cuda/Dockerfile_base delete mode 100644 docker-cuda/Dockerfile_base_finetune delete mode 100644 docker-cuda/Dockerfile_base_huggingface delete mode 100644 docker-cuda/Dockerfile_finetune delete mode 100644 docker-cuda/build_docker_images.sh diff --git a/docker-cuda/Dockerfile b/docker-cuda/Dockerfile index 36dac8e1..49ec3c8a 100644 --- a/docker-cuda/Dockerfile +++ b/docker-cuda/Dockerfile @@ -1,7 +1,9 @@ # This dockerfile is meant to serve as a rocm base image. It registers the debian rocm package repository, and # installs the rocm-dev package. -FROM ebolam/koboldai_base +FROM mambaorg/micromamba WORKDIR /content/ -COPY . /content/ -CMD python aiserver.py +COPY env.yml /home/micromamba/env.yml +RUN micromamba install -y -n base -f /home/micromamba/env.yml +USER root +RUN apt update && apt install xorg -y \ No newline at end of file diff --git a/docker-cuda/Dockerfile_base b/docker-cuda/Dockerfile_base deleted file mode 100644 index 8f7e2f63..00000000 --- a/docker-cuda/Dockerfile_base +++ /dev/null @@ -1,10 +0,0 @@ -# This dockerfile is meant to serve as a rocm base image. It registers the debian rocm package repository, and -# installs the rocm-dev package. - - -FROM mambaorg/micromamba -WORKDIR /content/ -COPY environments /content/environments -RUN micromamba install -y -n base -f /content/environments/base.yml -USER root -RUN apt update && apt install xorg -y diff --git a/docker-cuda/Dockerfile_base_finetune b/docker-cuda/Dockerfile_base_finetune deleted file mode 100644 index 843fdb06..00000000 --- a/docker-cuda/Dockerfile_base_finetune +++ /dev/null @@ -1,6 +0,0 @@ -# This dockerfile is meant to serve as a rocm base image. It registers the debian rocm package repository, and -# installs the rocm-dev package. - -FROM ebolam/koboldai_base:bare -WORKDIR /content/ -RUN micromamba update -y -n base -f /content/environments/finetuneanon.yml diff --git a/docker-cuda/Dockerfile_base_huggingface b/docker-cuda/Dockerfile_base_huggingface deleted file mode 100644 index c3fb8833..00000000 --- a/docker-cuda/Dockerfile_base_huggingface +++ /dev/null @@ -1,7 +0,0 @@ -# This dockerfile is meant to serve as a rocm base image. It registers the debian rocm package repository, and -# installs the rocm-dev package. - - -FROM ebolam/koboldai_base:bare -WORKDIR /content/ -RUN micromamba update -y -n base -f /content/environments/huggingface.yml diff --git a/docker-cuda/Dockerfile_finetune b/docker-cuda/Dockerfile_finetune deleted file mode 100644 index cd8f4b4b..00000000 --- a/docker-cuda/Dockerfile_finetune +++ /dev/null @@ -1,7 +0,0 @@ -# This dockerfile is meant to serve as a rocm base image. It registers the debian rocm package repository, and -# installs the rocm-dev package. - -FROM ebolam/koboldai_base:finetune -WORKDIR /content/ -COPY . /content/ -CMD python aiserver.py diff --git a/docker-cuda/build_docker_images.sh b/docker-cuda/build_docker_images.sh deleted file mode 100644 index 42a4d49a..00000000 --- a/docker-cuda/build_docker_images.sh +++ /dev/null @@ -1,5 +0,0 @@ -docker image build -f docker-cuda/Dockerfile_base . -t ebolam/koboldai_base:bare -docker image build -f docker-cuda/Dockerfile_base_huggingface . -t ebolam/koboldai_base -docker image build -f docker-cuda/Dockerfile_base_finetune . -t ebolam/koboldai_base:finetune -docker image build -f docker-cuda/Dockerfile . -t ebolam/koboldai -docker image build -f docker-cuda/Dockerfile_finetune . -t ebolam/koboldai:finetune \ No newline at end of file