From 9559347f82529f43a0a2f239b7830722c66b93f3 Mon Sep 17 00:00:00 2001
From: Javalar <39832337+Javalar@users.noreply.github.com>
Date: Tue, 15 Jun 2021 00:59:08 -0400
Subject: [PATCH 001/117] Update or remove targeted chunks in Game Screen (#2)
---
.gitignore | 8 +++++-
aiserver.py | 59 +++++++++++++++++++++++++++++++++----------
static/application.js | 33 ++++++++++++++++++++++++
3 files changed, 86 insertions(+), 14 deletions(-)
diff --git a/.gitignore b/.gitignore
index ff83d6e7..33d3c719 100644
--- a/.gitignore
+++ b/.gitignore
@@ -3,4 +3,10 @@ client.settings
# Ignore stories file except for test_story
stories/*
-!stories/sample_story.json
\ No newline at end of file
+!stories/sample_story.json
+
+# Ignore PyCharm project files.
+.idea
+
+# Ignore compiled Python files.
+*.pyc
diff --git a/aiserver.py b/aiserver.py
index 8dcec2af..c6b27672 100644
--- a/aiserver.py
+++ b/aiserver.py
@@ -9,6 +9,8 @@ from os import path, getcwd
import tkinter as tk
from tkinter import messagebox
import json
+from typing import Literal, Union
+
import requests
import html
@@ -666,12 +668,12 @@ def actionsubmit(data):
data = applyinputformatting(data)
# Store the result in the Action log
vars.actions.append(data)
-
+ update_story_chunk('last')
+
if(not vars.noai):
# Off to the tokenizer!
calcsubmit(data)
else:
- refresh_story()
set_aibusy(0)
#==================================================================#
@@ -687,7 +689,7 @@ def actionretry(data):
# Remove last action if possible and resubmit
if(len(vars.actions) > 0):
vars.actions.pop()
- refresh_story()
+ remove_story_chunk(len(vars.actions) + 1)
calcsubmit('')
#==================================================================#
@@ -698,8 +700,9 @@ def actionback():
return
# Remove last index of actions and refresh game screen
if(len(vars.actions) > 0):
+ action_index = len(vars.actions)
vars.actions.pop()
- refresh_story()
+ remove_story_chunk(len(vars.actions) + 1)
#==================================================================#
# Take submitted text and build the text to be given to generator
@@ -936,7 +939,7 @@ def genresult(genout):
# Add formatted text to Actions array and refresh the game screen
vars.actions.append(genout)
- refresh_story()
+ update_story_chunk('last')
emit('from_server', {'cmd': 'texteffect', 'data': len(vars.actions)})
#==================================================================#
@@ -955,9 +958,6 @@ def genselect(genout):
# Send sequences to UI for selection
emit('from_server', {'cmd': 'genseqs', 'data': genout})
-
- # Refresh story for any input text
- refresh_story()
#==================================================================#
# Send selected sequence to action log and refresh UI
@@ -966,7 +966,7 @@ def selectsequence(n):
if(len(vars.genseqs) == 0):
return
vars.actions.append(vars.genseqs[int(n)]["generated_text"])
- refresh_story()
+ update_story_chunk('last')
emit('from_server', {'cmd': 'texteffect', 'data': len(vars.actions)})
emit('from_server', {'cmd': 'hidegenseqs', 'data': ''})
vars.genseqs = []
@@ -1096,6 +1096,39 @@ def refresh_story():
text_parts.extend(('', html.escape(item), ''))
emit('from_server', {'cmd': 'updatescreen', 'data': formatforhtml(''.join(text_parts))})
+
+#==================================================================#
+# Signals the Game Screen to update one of the chunks
+#==================================================================#
+def update_story_chunk(idx: Union[int, Literal['last']]):
+ if idx == 'last':
+ if len(vars.actions) <= 1:
+ # In this case, we are better off just refreshing the whole thing as the
+ # prompt might not have been shown yet (with a "Generating story..."
+ # messsage instead).
+ refresh_story()
+ return
+
+ idx = len(vars.actions)
+
+ if idx == 0:
+ text = vars.prompt
+ else:
+ # Actions are 0 based, but in chunks 0 is the prompt.
+ # So the chunk index is one more than the corresponding action index.
+ text = vars.actions[idx - 1]
+
+ chunk_text = f'{formatforhtml(html.escape(text))}'
+ emit('from_server', {'cmd': 'updatechunk', 'data': {'index': idx, 'html': chunk_text, 'last': (idx == len(vars.actions))}})
+
+
+#==================================================================#
+# Signals the Game Screen to remove one of the chunks
+#==================================================================#
+def remove_story_chunk(idx: int):
+ emit('from_server', {'cmd': 'removechunk', 'data': idx})
+
+
#==================================================================#
# Sends the current generator settings to the Game Menu
#==================================================================#
@@ -1161,7 +1194,7 @@ def editsubmit(data):
vars.actions[vars.editln-1] = data
vars.mode = "play"
- refresh_story()
+ update_story_chunk(vars.editln)
emit('from_server', {'cmd': 'texteffect', 'data': vars.editln})
emit('from_server', {'cmd': 'editmode', 'data': 'false'})
@@ -1176,7 +1209,7 @@ def deleterequest():
else:
del vars.actions[vars.editln-1]
vars.mode = "play"
- refresh_story()
+ remove_story_chunk(vars.editln)
emit('from_server', {'cmd': 'editmode', 'data': 'false'})
#==================================================================#
@@ -1382,7 +1415,7 @@ def ikrequest(txt):
genout = req.json()["data"]["text"]
print("{0}{1}{2}".format(colors.CYAN, genout, colors.END))
vars.actions.append(genout)
- refresh_story()
+ update_story_chunk('last')
emit('from_server', {'cmd': 'texteffect', 'data': len(vars.actions)})
set_aibusy(0)
@@ -1432,7 +1465,7 @@ def oairequest(txt, min, max):
genout = req.json()["choices"][0]["text"]
print("{0}{1}{2}".format(colors.CYAN, genout, colors.END))
vars.actions.append(genout)
- refresh_story()
+ update_story_chunk('last')
emit('from_server', {'cmd': 'texteffect', 'data': len(vars.actions)})
set_aibusy(0)
diff --git a/static/application.js b/static/application.js
index e5ebc806..2cf66dfe 100644
--- a/static/application.js
+++ b/static/application.js
@@ -599,6 +599,39 @@ $(document).ready(function(){
setTimeout(function () {
$('#gamescreen').animate({scrollTop: $('#gamescreen').prop('scrollHeight')}, 1000);
}, 5);
+ } else if(msg.cmd == "updatechunk") {
+ const {index, html, last} = msg.data;
+ const existingChunk = game_text.children(`#n${index}`)
+ const newChunk = $(html);
+ if (existingChunk.length > 0) {
+ // Update existing chunk
+ existingChunk.before(newChunk);
+ existingChunk.remove();
+ } else {
+ // Append at the end
+ game_text.append(newChunk);
+ }
+ if(last) {
+ // Scroll to bottom of text if it's the last element
+ setTimeout(function () {
+ $('#gamescreen').animate({scrollTop: $('#gamescreen').prop('scrollHeight')}, 1000);
+ }, 5);
+ }
+ } else if(msg.cmd == "removechunk") {
+ let index = msg.data;
+ // Remove the chunk
+ game_text.children(`#n${index}`).remove()
+ // Shift all existing chunks by 1
+ index++;
+ while (true) {
+ const chunk = game_text.children(`#n${index}`)
+ if(chunk.length === 0) {
+ break;
+ }
+ const newIndex = index - 1;
+ chunk.attr('n', newIndex.toString()).attr('id', `n${newIndex}`);
+ index++;
+ }
} else if(msg.cmd == "setgamestate") {
// Enable or Disable buttons
if(msg.data == "ready") {
From 81aba7cba8070426ae09361763caa634a325030e Mon Sep 17 00:00:00 2001
From: Yves Dubois
Date: Tue, 15 Jun 2021 01:02:11 -0400
Subject: [PATCH 002/117] Fix typo
---
aiserver.py | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/aiserver.py b/aiserver.py
index c6b27672..6703fb53 100644
--- a/aiserver.py
+++ b/aiserver.py
@@ -1105,7 +1105,7 @@ def update_story_chunk(idx: Union[int, Literal['last']]):
if len(vars.actions) <= 1:
# In this case, we are better off just refreshing the whole thing as the
# prompt might not have been shown yet (with a "Generating story..."
- # messsage instead).
+ # message instead).
refresh_story()
return
From bbe3a92ce4804f9bef5de7a235b746f9ede7a922 Mon Sep 17 00:00:00 2001
From: Marcus Llewellyn
Date: Fri, 25 Jun 2021 17:02:19 -0500
Subject: [PATCH 003/117] Fixes for unusual story loading circumstances.
This PR does three things when loading a story from within the browser:
1. Prevents an error if a story file is not valid JSON.
2. Catches an error is a file is JSON, but lacks an actions property.
3. Replaces getcwd() and instead uses the path of the script file itself in case someone does not start the app from the current working directory.
---
fileops.py | 18 +++++++++++++-----
1 file changed, 13 insertions(+), 5 deletions(-)
diff --git a/fileops.py b/fileops.py
index dc5cb66e..640d3083 100644
--- a/fileops.py
+++ b/fileops.py
@@ -59,14 +59,22 @@ def getdirpath(dir, title):
#==================================================================#
def getstoryfiles():
list = []
- for file in listdir(getcwd()+"/stories"):
+ for file in listdir(path.dirname(path.realpath(__file__))+"/stories"):
if file.endswith(".json"):
ob = {}
ob["name"] = file.replace(".json", "")
- f = open(getcwd()+"/stories/"+file, "r")
- js = json.load(f)
+ f = open(path.dirname(path.realpath(__file__))+"/stories/"+file, "r")
+ try:
+ js = json.load(f)
+ except:
+ print("Browser loading error: Story file is malformed or not a JSON file.")
+ f.close()
+ continue
f.close()
- ob["actions"] = len(js["actions"])
+ try:
+ ob["actions"] = len(js["actions"])
+ except TypeError:
+ print("Browser loading error: Story file has incorrect format.")
list.append(ob)
return list
@@ -74,4 +82,4 @@ def getstoryfiles():
# Returns True if json file exists with requested save name
#==================================================================#
def saveexists(name):
- return path.exists(getcwd()+"/stories/"+name+".json")
\ No newline at end of file
+ return path.exists(path.dirname(os.path.realpath(__file__))+"/stories/"+name+".json")
\ No newline at end of file
From f9db17025db3e4901fbab2db9d2cc3102b46cfb2 Mon Sep 17 00:00:00 2001
From: Marcus Llewellyn
Date: Fri, 25 Jun 2021 17:17:07 -0500
Subject: [PATCH 004/117] Ack! Forgot a continue statement.
---
fileops.py | 3 ++-
1 file changed, 2 insertions(+), 1 deletion(-)
diff --git a/fileops.py b/fileops.py
index 640d3083..b4c02329 100644
--- a/fileops.py
+++ b/fileops.py
@@ -75,6 +75,7 @@ def getstoryfiles():
ob["actions"] = len(js["actions"])
except TypeError:
print("Browser loading error: Story file has incorrect format.")
+ continue
list.append(ob)
return list
@@ -82,4 +83,4 @@ def getstoryfiles():
# Returns True if json file exists with requested save name
#==================================================================#
def saveexists(name):
- return path.exists(path.dirname(os.path.realpath(__file__))+"/stories/"+name+".json")
\ No newline at end of file
+ return path.exists(path.dirname(os.path.realpath(__file__))+"/stories/"+name+".json")
From ad39a4c8b3a8c010d1b1a8a25d7fd13cf1193366 Mon Sep 17 00:00:00 2001
From: Marcus Llewellyn
Date: Fri, 25 Jun 2021 17:18:37 -0500
Subject: [PATCH 005/117] Ack! Forgot a continue statement.
---
fileops.py | 1 +
1 file changed, 1 insertion(+)
diff --git a/fileops.py b/fileops.py
index 640d3083..9a574537 100644
--- a/fileops.py
+++ b/fileops.py
@@ -75,6 +75,7 @@ def getstoryfiles():
ob["actions"] = len(js["actions"])
except TypeError:
print("Browser loading error: Story file has incorrect format.")
+ continue
list.append(ob)
return list
From d5522f0d0ad6a6eb432bf2e7489f216d65c73ddd Mon Sep 17 00:00:00 2001
From: Marcus Llewellyn
Date: Fri, 25 Jun 2021 17:31:43 -0500
Subject: [PATCH 006/117] Yet another silly mistake. Sigh.
---
fileops.py | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/fileops.py b/fileops.py
index b4c02329..70fd79b0 100644
--- a/fileops.py
+++ b/fileops.py
@@ -83,4 +83,4 @@ def getstoryfiles():
# Returns True if json file exists with requested save name
#==================================================================#
def saveexists(name):
- return path.exists(path.dirname(os.path.realpath(__file__))+"/stories/"+name+".json")
+ return path.exists(path.dirname(path.realpath(__file__))+"/stories/"+name+".json")
From 2a191c2d7c2b934226638a5efaa407008a670006 Mon Sep 17 00:00:00 2001
From: henk717
Date: Thu, 19 Aug 2021 01:24:01 +0200
Subject: [PATCH 007/117] Update readme.txt
---
readme.txt | 82 ++----------------------------------------------------
1 file changed, 2 insertions(+), 80 deletions(-)
diff --git a/readme.txt b/readme.txt
index 5e67a05d..7490f34e 100644
--- a/readme.txt
+++ b/readme.txt
@@ -1,80 +1,2 @@
-Thanks for checking out the KoboldAI Client! Get support and updates on the subreddit:
-https://www.reddit.com/r/KoboldAI/
-
-[ABOUT]
-
-This is a browser-based front-end for AI-assisted writing with multiple local & remote AI models.
-It offers the standard array of tools, including Memory, Author's Note, World Info, Save & Load,
-adjustable AI settings, formatting options, and the ability to import exising AI Dungeon adventures.
-Current UI Snapshot: https://imgur.com/mjk5Yre
-
-For local generation, KoboldAI uses Transformers (https://huggingface.co/transformers/) to interact
-with the AI models. This can be done either on CPU, or GPU with sufficient hardware. If you have a
-high-end GPU with sufficient VRAM to run your model of choice, see
-(https://www.tensorflow.org/install/gpu) for instructions on enabling GPU support.
-
-Transformers/Tensorflow can still be used on CPU if you do not have high-end hardware, but generation
-times will be much longer. Alternatively, KoboldAI also supports utilizing remotely-hosted models.
-The currently supported remote APIs are InferKit and Google Colab, see the dedicated sections below
-for more info on these.
-
-[SETUP]
-
-1. Install a 64-bit version of Python.
- (Development was done on 3.7, I have not tested newer versions)
- Windows download link: https://www.python.org/ftp/python/3.7.9/python-3.7.9-amd64.exe
-2. When installing Python make sure "Add Python to PATH" is selected.
- (If pip isn't working, run the installer again and choose Modify to choose Optional features.)
-3. Run install_requirements.bat.
- (This will install the necessary python packages via pip)
-4. Run play.bat
-5. Select a model from the list. Flask will start and give you a message that it's ready to connect.
-6. Open a web browser and enter http://127.0.0.1:5000/
-
-[ENABLE COLORS IN WINDOWS 10 COMMAND LINE]
-
-If you see strange numeric tags in the console output, then your console of choice does not have
-color support enabled. On Windows 10, you can enable color support by lanching the registry editor
-and adding the REG_DWORD key VirtualTerminalLevel to Computer\HKEY_CURRENT_USER\Console and setting
-its value to 1.
-
-[ENABLE GPU FOR SUPPORTED VIDEO CARDS]
-
-1. Install NVidia CUDA toolkit from https://developer.nvidia.com/cuda-10.2-download-archive
-2. Visit PyTorch's website(https://pytorch.org/get-started/locally/) and select Pip under "Package"
-and your version of CUDA under "Compute Platform" (I linked 10.2) to get the pip3 command.
-3. Copy and paste pip3 command into command prompt to install torch with GPU support
-
-Be aware that when using GPU mode, inference will be MUCH faster but if your GPU doesn't have enough
-VRAM to load the model it will crash the application.
-
-[IMPORT AI DUNGEON GAMES]
-
-To import your games from AI Dungeon, first grab CuriousNekomimi's AI Dungeon Content Archive Toolkit:
-https://github.com/CuriousNekomimi/AIDCAT
-Follow the video instructions for getting your access_token, and run aidcat.py in command prompt.
-Choose option [1] Download your saved content.
-Choose option [2] Download your adventures.
-Save the JSON file to your computer using the prompt.
-Run KoboldAI, and after connecting to the web GUI, press the Import button at the top.
-Navigate to the JSON file exported from AIDCAT and select it. A prompt will appear in the GUI
-presenting you with all Adventures scraped from your AI Dungeon account.
-Select an Adventure and click the Accept button.
-
-[HOST GPT-NEO ON GOOGLE COLAB]
-
-If your computer does not have an 8GB GPU to run GPT-Neo locally, you can now run a Google Colab
-notebook hosting a GPT-Neo-2.7B model remotely and connect to it using the KoboldAI client.
-See the instructions on the Colab at the link below:
-https://colab.research.google.com/drive/1uGe9f4ruIQog3RLxfUsoThakvLpHjIkX?usp=sharing
-
-[FOR INFERKIT INTEGRATION]
-
-If you would like to use InferKit's Megatron-11b model, sign up for a free account on their website.
-https://inferkit.com/
-After verifying your email address, sign in and click on your profile picture in the top right.
-In the drop down menu, click "API Key".
-On the API Key page, click "Reveal API Key" and copy it. When starting KoboldAI and selecting the
-InferKit API model, you will be asked to paste your API key into the terminal. After entering,
-the API key will be stored in the client.settings file for future use.
-You can see your remaining budget for generated characters on their website under "Billing & Usage".
\ No newline at end of file
+This branch will eventually be used for a community edition of KoboldAI, uniting the different community made editions.
+For now it is just a placeholder.
From 17dce2e16cc7a921fcd0ffa9d7acb6653fa9bab0 Mon Sep 17 00:00:00 2001
From: henk717
Date: Thu, 19 Aug 2021 01:26:03 +0200
Subject: [PATCH 008/117] Update readme.txt
---
readme.txt | 4 ++++
1 file changed, 4 insertions(+)
diff --git a/readme.txt b/readme.txt
index 7490f34e..5ea1e028 100644
--- a/readme.txt
+++ b/readme.txt
@@ -1,2 +1,6 @@
This branch will eventually be used for a community edition of KoboldAI, uniting the different community made editions.
For now it is just a placeholder.
+
+---
+
+Looking for ColabKobold? Check the different branches on my account.
From efbe40f1f6fd130c381530acdaa435ef957d1771 Mon Sep 17 00:00:00 2001
From: henk717
Date: Thu, 19 Aug 2021 12:54:44 +0200
Subject: [PATCH 009/117] Random Story Generator
Add the Random Story Generator and more userfriendly defaults
---
.gitignore | 3 +-
UPDATE YOUR COLAB NOTEBOOK.txt | 3 -
aiserver.py | 23 +-
gensettings.py | 18 +-
readme.txt | 2 +-
static/application - Copy.js | 981 +++++++++++++++++++++++++++++++++
static/application.js | 33 ++
static/application.js.bak | 981 +++++++++++++++++++++++++++++++++
static/custom.css | 6 +
templates/index.html | 33 +-
10 files changed, 2058 insertions(+), 25 deletions(-)
delete mode 100644 UPDATE YOUR COLAB NOTEBOOK.txt
create mode 100644 static/application - Copy.js
create mode 100644 static/application.js.bak
diff --git a/.gitignore b/.gitignore
index ff83d6e7..8a6b218c 100644
--- a/.gitignore
+++ b/.gitignore
@@ -3,4 +3,5 @@ client.settings
# Ignore stories file except for test_story
stories/*
-!stories/sample_story.json
\ No newline at end of file
+!stories/sample_story.json
+/.project
diff --git a/UPDATE YOUR COLAB NOTEBOOK.txt b/UPDATE YOUR COLAB NOTEBOOK.txt
deleted file mode 100644
index 67a821f4..00000000
--- a/UPDATE YOUR COLAB NOTEBOOK.txt
+++ /dev/null
@@ -1,3 +0,0 @@
-If you use Google Colab to run your models, and you made a local copy of the Colab notebook in Google Drive instead of using the community notebook, you MUST make a new copy of the community notebook to use the new multiple-sequence generation feature. The link is below:
-
-https://colab.research.google.com/drive/1uGe9f4ruIQog3RLxfUsoThakvLpHjIkX?usp=sharing
\ No newline at end of file
diff --git a/aiserver.py b/aiserver.py
index 8dcec2af..ba583480 100644
--- a/aiserver.py
+++ b/aiserver.py
@@ -56,13 +56,13 @@ class vars:
model = "" # Model ID string chosen at startup
noai = False # Runs the script without starting up the transformers pipeline
aibusy = False # Stops submissions while the AI is working
- max_length = 512 # Maximum number of tokens to submit per action
+ max_length = 1024 # Maximum number of tokens to submit per action
ikmax = 3000 # Maximum number of characters to submit to InferKit
- genamt = 60 # Amount of text for each action to generate
+ genamt = 80 # Amount of text for each action to generate
ikgen = 200 # Number of characters for InferKit to generate
- rep_pen = 1.0 # Default generator repetition_penalty
- temp = 1.0 # Default generator temperature
- top_p = 1.0 # Default generator top_p
+ rep_pen = 1.1 # Default generator repetition_penalty
+ temp = 0.5 # Default generator temperature
+ top_p = 0.9 # Default generator top_p
numseqs = 1 # Number of sequences to ask the generator to create
gamestarted = False # Whether the game has started (disables UI elements)
prompt = "" # Prompt
@@ -75,7 +75,7 @@ class vars:
badwordsids = [] # Tokenized array of badwords
deletewi = -1 # Temporary storage for index to delete
wirmvwhtsp = False # Whether to remove leading whitespace from WI entries
- widepth = 1 # How many historical actions to scan for WI hits
+ widepth = 3 # How many historical actions to scan for WI hits
mode = "play" # Whether the interface is in play, memory, or edit mode
editln = 0 # Which line was last selected in Edit Mode
url = "https://api.inferkit.com/v1/models/standard/generate" # InferKit API URL
@@ -434,6 +434,8 @@ def get_message(msg):
importRequest()
elif(msg['cmd'] == 'newgame'):
newGameRequest()
+ elif(msg['cmd'] == 'rndgame'):
+ randomGameRequest(msg['data'])
elif(msg['cmd'] == 'settemp'):
vars.temp = float(msg['data'])
emit('from_server', {'cmd': 'setlabeltemp', 'data': msg['data']})
@@ -1803,6 +1805,11 @@ def newGameRequest():
sendwi()
setStartState()
+def randomGameRequest(topic):
+ newGameRequest()
+ vars.memory = "You generate the following " + topic + " story concept :"
+ actionsubmit("")
+ vars.memory = ""
#==================================================================#
# Final startup commands to launch Flask app
@@ -1813,5 +1820,5 @@ if __name__ == "__main__":
# Start Flask/SocketIO (Blocking, so this must be last method!)
print("{0}Server started!\rYou may now connect with a browser at http://127.0.0.1:5000/{1}".format(colors.GREEN, colors.END))
- #socketio.run(app, host='0.0.0.0', port=5000)
- socketio.run(app)
\ No newline at end of file
+ socketio.run(app, host='0.0.0.0', port=5000)
+ #socketio.run(app)
diff --git a/gensettings.py b/gensettings.py
index 8f6a67a0..eb914139 100644
--- a/gensettings.py
+++ b/gensettings.py
@@ -6,7 +6,7 @@ gensettingstf = [{
"min": 0.1,
"max": 2.0,
"step": 0.05,
- "default": 1.0,
+ "default": 0.5,
"tooltip": "Randomness of sampling. High values can increase creativity but may make text less sensible. Lower values will make text more predictable but can become repetitious."
},
{
@@ -17,7 +17,7 @@ gensettingstf = [{
"min": 0.1,
"max": 1.0,
"step": 0.05,
- "default": 1.0,
+ "default": 0.9,
"tooltip": "Used to discard unlikely text in the sampling process. Lower values will make text more predictable but can become repetitious."
},
{
@@ -28,7 +28,7 @@ gensettingstf = [{
"min": 1.0,
"max": 2.0,
"step": 0.05,
- "default": 1.0,
+ "default": 1.1,
"tooltip": "Used to penalize words that were already generated or belong to the context."
},
{
@@ -39,7 +39,7 @@ gensettingstf = [{
"min": 16,
"max": 512,
"step": 2,
- "default": 60,
+ "default": 80,
"tooltip": "Number of tokens the AI should generate. Higher numbers will take longer to generate."
},
{
@@ -50,7 +50,7 @@ gensettingstf = [{
"min": 512,
"max": 2048,
"step": 8,
- "default": 512,
+ "default": 1024,
"tooltip": "Max number of tokens of context to submit to the AI for sampling. Make sure this is higher than Amount to Generate. Higher values increase VRAM/RAM usage."
},
{
@@ -72,7 +72,7 @@ gensettingstf = [{
"min": 1,
"max": 5,
"step": 1,
- "default": 1,
+ "default": 3,
"tooltip": "Number of historic actions to scan for W Info keys."
},
{
@@ -95,7 +95,7 @@ gensettingsik =[{
"min": 0.1,
"max": 2.0,
"step": 0.05,
- "default": 1.0,
+ "default": 0.5,
"tooltip": "Randomness of sampling. High values can increase creativity but may make text less sensible. Lower values will make text more predictable but can become repetitious."
},
{
@@ -106,7 +106,7 @@ gensettingsik =[{
"min": 0.1,
"max": 1.0,
"step": 0.05,
- "default": 1.0,
+ "default": 1.1,
"tooltip": "Used to discard unlikely text in the sampling process. Lower values will make text more predictable but can become repetitious."
},
{
@@ -128,7 +128,7 @@ gensettingsik =[{
"min": 1,
"max": 5,
"step": 1,
- "default": 1,
+ "default": 3,
"tooltip": "Number of historic actions to scan for W Info keys."
},
{
diff --git a/readme.txt b/readme.txt
index 5ea1e028..9ab782bb 100644
--- a/readme.txt
+++ b/readme.txt
@@ -1,5 +1,5 @@
This branch will eventually be used for a community edition of KoboldAI, uniting the different community made editions.
-For now it is just a placeholder.
+For now it is a WIP branch with Random Story Generation and better default settings.
---
diff --git a/static/application - Copy.js b/static/application - Copy.js
new file mode 100644
index 00000000..71c066d4
--- /dev/null
+++ b/static/application - Copy.js
@@ -0,0 +1,981 @@
+//=================================================================//
+// VARIABLES
+//=================================================================//
+
+// Socket IO Object
+var socket;
+
+// UI references for jQuery
+var connect_status;
+var button_newgame;
+var button_rndgame;
+var button_save;
+var button_saveas;
+var button_savetofile;
+var button_load;
+var button_import;
+var button_importwi;
+var button_impaidg;
+var button_settings;
+var button_format;
+var button_send;
+var button_actedit;
+var button_actmem;
+var button_actback;
+var button_actretry;
+var button_delete;
+var button_actwi;
+var game_text;
+var input_text;
+var message_text;
+var settings_menu;
+var format_menu;
+var wi_menu;
+var anote_menu;
+var anote_input;
+var anote_labelcur;
+var anote_slider;
+var popup;
+var popup_title;
+var popup_content;
+var popup_accept;
+var popup_close;
+var aidgpopup;
+var aidgpromptnum;
+var aidg_accept;
+var aidg_close;
+var saveaspopup;
+var saveasinput;
+var saveas_accept;
+var saveas_close;
+var saveasoverwrite;
+var loadpopup;
+var loadcontent;
+var load_accept;
+var load_close;
+var nspopup;
+var ns_accept;
+var ns_close;
+var rspopup;
+var rs_accept;
+var rs_close;
+var seqselmenu;
+var seqselcontents;
+
+// Key states
+var shift_down = false;
+var do_clear_ent = false;
+
+// Display vars
+var allowtoggle = false;
+var formatcount = 0;
+
+//=================================================================//
+// METHODS
+//=================================================================//
+
+function addSetting(ob) {
+ // Add setting block to Settings Menu
+ if(ob.uitype == "slider"){
+ settings_menu.append("
\
+
\
+
\
+ "+ob.label+" ?"+ob.tooltip+"\
+
\
+
\
+ "+ob.default+"\
+
\
+
\
+
\
+ \
+
\
+
\
+
\
+ "+ob.min+"\
+
\
+
\
+ "+ob.max+"\
+
\
+
\
+
");
+ // Set references to HTML objects
+ var refin = $("#"+ob.id);
+ var reflb = $("#"+ob.id+"cur");
+ window["setting_"+ob.id] = refin; // Is this still needed?
+ window["label_"+ob.id] = reflb; // Is this still needed?
+ // Add event function to input
+ refin.on("input", function () {
+ socket.send({'cmd': $(this).attr('id'), 'data': $(this).val()});
+ });
+ } else if(ob.uitype == "toggle"){
+ settings_menu.append("
\
+ \
+ "+ob.label+" \
+ ?"+ob.tooltip+"\
+
");
+ // Tell Bootstrap-Toggle to render the new checkbox
+ $("input[type=checkbox]").bootstrapToggle();
+ $("#"+ob.id).on("change", function () {
+ if(allowtoggle) {
+ socket.send({'cmd': $(this).attr('id'), 'data': $(this).prop('checked')});
+ }
+ });
+ }
+}
+
+function addFormat(ob) {
+ // Check if we need to make a new column for this button
+ if(formatcount == 0) {
+ format_menu.append("");
+ }
+ // Get reference to the last child column
+ var ref = $("#formatmenu > div").last();
+ // Add format block to Format Menu
+ ref.append("
");
+ // Send key value to text input
+ $("#wikey"+ob.num).val(ob.key);
+ // Assign delete event to button
+ $("#btn_wi"+ob.num).on("click", function () {
+ showWiDeleteConfirm(ob.num);
+ });
+ } else {
+ // Show WI line item with form fields hidden (uninitialized)
+ wi_menu.append("
\
+
\
+ \
+ \
+ \
+
\
+
\
+ \
+
\
+
\
+ \
+
\
+
");
+ // Assign function to expand WI item to button
+ $("#btn_wi"+ob.num).on("click", function () {
+ expandWiLine(ob.num);
+ });
+ }
+ // Assign actions to other elements
+ $("#btn_wican"+ob.num).on("click", function () {
+ hideWiDeleteConfirm(ob.num);
+ });
+ $("#btn_widel"+ob.num).on("click", function () {
+ socket.send({'cmd': 'widelete', 'data': ob.num});
+ });
+}
+
+function expandWiLine(num) {
+ show([$("#wikey"+num), $("#wientry"+num)]);
+ $("#btn_wi"+num).html("X");
+ $("#btn_wi"+num).off();
+ // Tell server the WI entry was initialized
+ socket.send({'cmd': 'wiinit', 'data': num});
+ $("#btn_wi"+num).on("click", function () {
+ showWiDeleteConfirm(num);
+ });
+}
+
+function showWiDeleteConfirm(num) {
+ hide([$("#btn_wi"+num)]);
+ show([$("#btn_widel"+num), $("#btn_wican"+num)]);
+}
+
+function hideWiDeleteConfirm(num) {
+ show([$("#btn_wi"+num)]);
+ hide([$("#btn_widel"+num), $("#btn_wican"+num)]);
+}
+
+function highlightImportLine(ref) {
+ $("#popupcontent > div").removeClass("popuplistselected");
+ ref.addClass("popuplistselected");
+ enableButtons([popup_accept]);
+}
+
+function enableButtons(refs) {
+ for(i=0; i");
+}
+
+function hideWaitAnimation() {
+ $('#waitanim').remove();
+}
+
+function hide(refs) {
+ for(i=0; i *', function() {
+ editModeSelect($(this).attr("n"));
+ });
+ disableSendBtn();
+ hide([button_actback, button_actmem, button_actretry, button_actwi]);
+ show([button_delete]);
+}
+
+function exitEditMode() {
+ // Remove class to each story chunk
+ hideMessage();
+ button_actedit.html("Edit");
+ game_text.children('chunk').removeClass("chunkhov");
+ game_text.off('click', '> *');
+ enableSendBtn();
+ show([button_actback, button_actmem, button_actretry, button_actwi]);
+ hide([button_delete]);
+ input_text.val("");
+}
+
+function editModeSelect(n) {
+ socket.send({'cmd': 'editline', 'data': n});
+}
+
+function enterMemoryMode() {
+ showMessage("Edit the memory to be sent with each request to the AI.");
+ button_actmem.html("Cancel");
+ hide([button_actback, button_actretry, button_actedit, button_delete, button_actwi]);
+ // Display Author's Note field
+ anote_menu.slideDown("fast");
+}
+
+function exitMemoryMode() {
+ hideMessage();
+ button_actmem.html("Memory");
+ show([button_actback, button_actretry, button_actedit, button_actwi]);
+ input_text.val("");
+ // Hide Author's Note field
+ anote_menu.slideUp("fast");
+}
+
+function enterWiMode() {
+ showMessage("World Info will be added to memory only when the key appears in submitted text or the last action.");
+ button_actwi.html("Accept");
+ hide([button_actedit, button_actback, button_actmem, button_actretry, game_text]);
+ show([wi_menu]);
+ disableSendBtn();
+}
+
+function exitWiMode() {
+ hideMessage();
+ button_actwi.html("W Info");
+ hide([wi_menu]);
+ show([button_actedit, button_actback, button_actmem, button_actretry, game_text]);
+ enableSendBtn();
+}
+
+function returnWiList(ar) {
+ var list = [];
+ var i;
+ for(i=0; i\
+
");
+ // Set references to HTML objects
+ var refin = $("#"+ob.id);
+ var reflb = $("#"+ob.id+"cur");
+ window["setting_"+ob.id] = refin; // Is this still needed?
+ window["label_"+ob.id] = reflb; // Is this still needed?
+ // Add event function to input
+ refin.on("input", function () {
+ socket.send({'cmd': $(this).attr('id'), 'data': $(this).val()});
+ });
+ } else if(ob.uitype == "toggle"){
+ settings_menu.append("
\
+ \
+ "+ob.label+" \
+ ?"+ob.tooltip+"\
+
");
+ // Tell Bootstrap-Toggle to render the new checkbox
+ $("input[type=checkbox]").bootstrapToggle();
+ $("#"+ob.id).on("change", function () {
+ if(allowtoggle) {
+ socket.send({'cmd': $(this).attr('id'), 'data': $(this).prop('checked')});
+ }
+ });
+ }
+}
+
+function addFormat(ob) {
+ // Check if we need to make a new column for this button
+ if(formatcount == 0) {
+ format_menu.append("");
+ }
+ // Get reference to the last child column
+ var ref = $("#formatmenu > div").last();
+ // Add format block to Format Menu
+ ref.append("
");
+ // Send key value to text input
+ $("#wikey"+ob.num).val(ob.key);
+ // Assign delete event to button
+ $("#btn_wi"+ob.num).on("click", function () {
+ showWiDeleteConfirm(ob.num);
+ });
+ } else {
+ // Show WI line item with form fields hidden (uninitialized)
+ wi_menu.append("
\
+
\
+ \
+ \
+ \
+
\
+
\
+ \
+
\
+
\
+ \
+
\
+
");
+ // Assign function to expand WI item to button
+ $("#btn_wi"+ob.num).on("click", function () {
+ expandWiLine(ob.num);
+ });
+ }
+ // Assign actions to other elements
+ $("#btn_wican"+ob.num).on("click", function () {
+ hideWiDeleteConfirm(ob.num);
+ });
+ $("#btn_widel"+ob.num).on("click", function () {
+ socket.send({'cmd': 'widelete', 'data': ob.num});
+ });
+}
+
+function expandWiLine(num) {
+ show([$("#wikey"+num), $("#wientry"+num)]);
+ $("#btn_wi"+num).html("X");
+ $("#btn_wi"+num).off();
+ // Tell server the WI entry was initialized
+ socket.send({'cmd': 'wiinit', 'data': num});
+ $("#btn_wi"+num).on("click", function () {
+ showWiDeleteConfirm(num);
+ });
+}
+
+function showWiDeleteConfirm(num) {
+ hide([$("#btn_wi"+num)]);
+ show([$("#btn_widel"+num), $("#btn_wican"+num)]);
+}
+
+function hideWiDeleteConfirm(num) {
+ show([$("#btn_wi"+num)]);
+ hide([$("#btn_widel"+num), $("#btn_wican"+num)]);
+}
+
+function highlightImportLine(ref) {
+ $("#popupcontent > div").removeClass("popuplistselected");
+ ref.addClass("popuplistselected");
+ enableButtons([popup_accept]);
+}
+
+function enableButtons(refs) {
+ for(i=0; i");
+}
+
+function hideWaitAnimation() {
+ $('#waitanim').remove();
+}
+
+function hide(refs) {
+ for(i=0; i *', function() {
+ editModeSelect($(this).attr("n"));
+ });
+ disableSendBtn();
+ hide([button_actback, button_actmem, button_actretry, button_actwi]);
+ show([button_delete]);
+}
+
+function exitEditMode() {
+ // Remove class to each story chunk
+ hideMessage();
+ button_actedit.html("Edit");
+ game_text.children('chunk').removeClass("chunkhov");
+ game_text.off('click', '> *');
+ enableSendBtn();
+ show([button_actback, button_actmem, button_actretry, button_actwi]);
+ hide([button_delete]);
+ input_text.val("");
+}
+
+function editModeSelect(n) {
+ socket.send({'cmd': 'editline', 'data': n});
+}
+
+function enterMemoryMode() {
+ showMessage("Edit the memory to be sent with each request to the AI.");
+ button_actmem.html("Cancel");
+ hide([button_actback, button_actretry, button_actedit, button_delete, button_actwi]);
+ // Display Author's Note field
+ anote_menu.slideDown("fast");
+}
+
+function exitMemoryMode() {
+ hideMessage();
+ button_actmem.html("Memory");
+ show([button_actback, button_actretry, button_actedit, button_actwi]);
+ input_text.val("");
+ // Hide Author's Note field
+ anote_menu.slideUp("fast");
+}
+
+function enterWiMode() {
+ showMessage("World Info will be added to memory only when the key appears in submitted text or the last action.");
+ button_actwi.html("Accept");
+ hide([button_actedit, button_actback, button_actmem, button_actretry, game_text]);
+ show([wi_menu]);
+ disableSendBtn();
+}
+
+function exitWiMode() {
+ hideMessage();
+ button_actwi.html("W Info");
+ hide([wi_menu]);
+ show([button_actedit, button_actback, button_actmem, button_actretry, game_text]);
+ enableSendBtn();
+}
+
+function returnWiList(ar) {
+ var list = [];
+ var i;
+ for(i=0; i\
+
+
+ Story quality and topic depends on the model and your settings/suggestion (Around 0.5 temp is recommended).
+ This feature works best with finetuned models like GPT-Neo-AID or GPT-Neo-Horni but is limited to what the AI knows.
+ If you get random spam then your model is not capable of using this feature and if you get unrelated stories it does not understand the topic.
+ Generated results are unfiltered and can be offensive or unsuitable for children.
+ Unsaved data will be lost.
+ Below you can input a genre suggestion for the AI to loosely base the story on (For example Horror or Cowboy).
+