diff --git a/aiserver.py b/aiserver.py
index 96cc4ef2..74cd5fd3 100644
--- a/aiserver.py
+++ b/aiserver.py
@@ -1,6 +1,6 @@
#==================================================================#
# KoboldAI Client
-# Version: 1.14.0
+# Version: 1.15.0
# By: KoboldAIDev
#==================================================================#
@@ -43,40 +43,44 @@ modellist = [
["InferKit API (requires API key)", "InferKit", ""],
["Custom Neo (eg Neo-horni)", "NeoCustom", ""],
["Custom GPT-2 (eg CloverEdition)", "GPT2Custom", ""],
- ["Google Colab", "Colab", ""]
+ ["Google Colab", "Colab", ""],
+ ["OpenAI API (requires API key)", "OAI", ""]
]
# Variables
class vars:
- lastact = "" # The last action received from the user
- lastctx = "" # The last context submitted to the generator
- model = ""
- noai = False # Runs the script without starting up the transformers pipeline
- aibusy = False # Stops submissions while the AI is working
- max_length = 512 # Maximum number of tokens to submit per action
- ikmax = 3000 # Maximum number of characters to submit to InferKit
- genamt = 60 # Amount of text for each action to generate
- ikgen = 200 # Number of characters for InferKit to generate
- rep_pen = 1.0 # Default generator repetition_penalty
- temp = 1.0 # Default generator temperature
- top_p = 1.0 # Default generator top_p
- gamestarted = False
- prompt = ""
- memory = ""
- authornote = ""
+ lastact = "" # The last action received from the user
+ lastctx = "" # The last context submitted to the generator
+ model = "" # Model ID string chosen at startup
+ noai = False # Runs the script without starting up the transformers pipeline
+ aibusy = False # Stops submissions while the AI is working
+ max_length = 512 # Maximum number of tokens to submit per action
+ ikmax = 3000 # Maximum number of characters to submit to InferKit
+ genamt = 60 # Amount of text for each action to generate
+ ikgen = 200 # Number of characters for InferKit to generate
+ rep_pen = 1.0 # Default generator repetition_penalty
+ temp = 1.0 # Default generator temperature
+ top_p = 1.0 # Default generator top_p
+ gamestarted = False # Whether the game has started (disables UI elements)
+ prompt = "" # Prompt
+ memory = "" # Text submitted to memory field
+ authornote = "" # Text submitted to Author's Note field
andepth = 3 # How far back in history to append author's note
- actions = []
- worldinfo = []
- badwords = []
- badwordsids = []
+ actions = [] # Array of actions submitted by user and AI
+ worldinfo = [] # Array of World Info key/value objects
+ badwords = [] # Array of str/chr values that should be removed from output
+ badwordsids = [] # Tokenized array of badwords
deletewi = -1 # Temporary storage for index to delete
wirmvwhtsp = False # Whether to remove leading whitespace from WI entries
widepth = 1 # How many historical actions to scan for WI hits
mode = "play" # Whether the interface is in play, memory, or edit mode
editln = 0 # Which line was last selected in Edit Mode
url = "https://api.inferkit.com/v1/models/standard/generate" # InferKit API URL
+ oaiurl = "" # OpenAI API URL
+ oaiengines = "https://api.openai.com/v1/engines"
colaburl = "" # Ngrok url for Google Colab mode
apikey = "" # API key to use for InferKit API calls
+ oaiapikey = "" # API key to use for OpenAI API calls
savedir = getcwd()+"\stories"
hascuda = False # Whether torch has detected CUDA on the system
usegpu = False # Whether to launch pipeline with GPU support
@@ -84,6 +88,9 @@ class vars:
formatoptns = {} # Container for state of formatting options
importnum = -1 # Selection on import popup list
importjs = {} # Temporary storage for import data
+ loadselect = "" # Temporary storage for filename to load
+ svowname = ""
+ saveow = False
#==================================================================#
# Function to get model selection at startup
@@ -138,7 +145,7 @@ print("{0}Welcome to the KoboldAI Client!\nSelect an AI model to continue:{1}\n"
getModelSelection()
# If transformers model was selected & GPU available, ask to use CPU or GPU
-if(not vars.model in ["InferKit", "Colab"]):
+if(not vars.model in ["InferKit", "Colab", "OAI"]):
# Test for GPU support
import torch
print("{0}Looking for GPU support...{1}".format(colors.PURPLE, colors.END), end="")
@@ -185,7 +192,7 @@ if(vars.model == "InferKit"):
file = open("client.settings", "r")
# Check if API key exists
js = json.load(file)
- if(js["apikey"] != ""):
+ if("apikey" in js and js["apikey"] != ""):
# API key exists, grab it and close the file
vars.apikey = js["apikey"]
file.close()
@@ -201,6 +208,73 @@ if(vars.model == "InferKit"):
finally:
file.close()
+# Ask for API key if OpenAI was selected
+if(vars.model == "OAI"):
+ if(not path.exists("client.settings")):
+ # If the client settings file doesn't exist, create it
+ print("{0}Please enter your OpenAI API key:{1}\n".format(colors.CYAN, colors.END))
+ vars.oaiapikey = input("Key> ")
+ # Write API key to file
+ file = open("client.settings", "w")
+ try:
+ js = {"oaiapikey": vars.oaiapikey}
+ file.write(json.dumps(js, indent=3))
+ finally:
+ file.close()
+ else:
+ # Otherwise open it up
+ file = open("client.settings", "r")
+ # Check if API key exists
+ js = json.load(file)
+ if("oaiapikey" in js and js["oaiapikey"] != ""):
+ # API key exists, grab it and close the file
+ vars.oaiapikey = js["oaiapikey"]
+ file.close()
+ else:
+ # Get API key, add it to settings object, and write it to disk
+ print("{0}Please enter your OpenAI API key:{1}\n".format(colors.CYAN, colors.END))
+ vars.oaiapikey = input("Key> ")
+ js["oaiapikey"] = vars.oaiapikey
+ # Write API key to file
+ file = open("client.settings", "w")
+ try:
+ file.write(json.dumps(js, indent=3))
+ finally:
+ file.close()
+
+ # Get list of models from OAI
+ print("{0}Retrieving engine list...{1}".format(colors.PURPLE, colors.END), end="")
+ req = requests.get(
+ vars.oaiengines,
+ headers = {
+ 'Authorization': 'Bearer '+vars.oaiapikey
+ }
+ )
+ if(req.status_code == 200):
+ print("{0}OK!{1}".format(colors.GREEN, colors.END))
+ print("{0}Please select an engine to use:{1}\n".format(colors.CYAN, colors.END))
+ engines = req.json()["data"]
+ # Print list of engines
+ i = 0
+ for en in engines:
+ print(" {0} - {1} ({2})".format(i, en["id"], "\033[92mready\033[0m" if en["ready"] == True else "\033[91mnot ready\033[0m"))
+ i += 1
+ # Get engine to use
+ print("")
+ engselected = False
+ while(engselected == False):
+ engine = input("Engine #> ")
+ if(engine.isnumeric() and int(engine) < len(engines)):
+ vars.oaiurl = "https://api.openai.com/v1/engines/{0}/completions".format(engines[int(engine)]["id"])
+ engselected = True
+ else:
+ print("{0}Please enter a valid selection.{1}".format(colors.RED, colors.END))
+ else:
+ # Something went wrong, print the message and quit since we can't initialize an engine
+ print("{0}ERROR!{1}".format(colors.RED, colors.END))
+ print(req.json())
+ quit()
+
# Ask for ngrok url if Google Colab was selected
if(vars.model == "Colab"):
print("{0}Please enter the ngrok.io URL displayed in Google Colab:{1}\n".format(colors.CYAN, colors.END))
@@ -221,7 +295,7 @@ socketio = SocketIO(app)
print("{0}OK!{1}".format(colors.GREEN, colors.END))
# Start transformers and create pipeline
-if(not vars.model in ["InferKit", "Colab"]):
+if(not vars.model in ["InferKit", "Colab", "OAI"]):
if(not vars.noai):
print("{0}Initializing transformers, please wait...{1}".format(colors.PURPLE, colors.END))
from transformers import pipeline, GPT2Tokenizer, GPT2LMHeadModel, GPTNeoForCausalLM
@@ -262,10 +336,13 @@ if(not vars.model in ["InferKit", "Colab"]):
print("{0}OK! {1} pipeline created!{2}".format(colors.GREEN, vars.model, colors.END))
else:
- # If we're running Colab, we still need a tokenizer.
+ # If we're running Colab or OAI, we still need a tokenizer.
if(vars.model == "Colab"):
from transformers import GPT2Tokenizer
tokenizer = GPT2Tokenizer.from_pretrained("EleutherAI/gpt-neo-2.7B")
+ elif(vars.model == "OAI"):
+ from transformers import GPT2Tokenizer
+ tokenizer = GPT2Tokenizer.from_pretrained("gpt2")
# Set up Flask routes
@app.route('/')
@@ -354,10 +431,10 @@ def get_message(msg):
deleterequest()
elif(msg['cmd'] == 'memory'):
togglememorymode()
- elif(msg['cmd'] == 'save'):
- saveRequest()
- elif(msg['cmd'] == 'load'):
- loadRequest()
+ elif(msg['cmd'] == 'savetofile'):
+ savetofile()
+ elif(msg['cmd'] == 'loadfromfile'):
+ loadfromfile()
elif(msg['cmd'] == 'import'):
importRequest()
elif(msg['cmd'] == 'newgame'):
@@ -431,16 +508,29 @@ def get_message(msg):
commitwi(msg['data'])
elif(msg['cmd'] == 'aidgimport'):
importAidgRequest(msg['data'])
+ elif(msg['cmd'] == 'saveasrequest'):
+ saveas(msg['data'])
+ elif(msg['cmd'] == 'saverequest'):
+ save()
+ elif(msg['cmd'] == 'loadlistrequest'):
+ getloadlist()
+ elif(msg['cmd'] == 'loadselect'):
+ vars.loadselect = msg["data"]
+ elif(msg['cmd'] == 'loadrequest'):
+ loadRequest(getcwd()+"/stories/"+vars.loadselect+".json")
+ elif(msg['cmd'] == 'clearoverwrite'):
+ vars.svowname = ""
+ vars.saveow = False
#==================================================================#
-#
+# Send start message and tell Javascript to set UI state
#==================================================================#
def setStartState():
emit('from_server', {'cmd': 'updatescreen', 'data': 'Welcome to KoboldAI Client! You are running '+vars.model+'.
Please load a game or enter a prompt below to begin!'})
emit('from_server', {'cmd': 'setgamestate', 'data': 'start'})
#==================================================================#
-#
+# Transmit applicable settings to SocketIO to build UI sliders/toggles
#==================================================================#
def sendsettings():
# Send settings for selected AI type
@@ -459,7 +549,7 @@ def sendsettings():
vars.formatoptns[frm["id"]] = False;
#==================================================================#
-#
+# Take settings from vars and write them to client settings file
#==================================================================#
def savesettings():
# Build json to write
@@ -482,7 +572,7 @@ def savesettings():
file.close()
#==================================================================#
-#
+# Read settings from client file JSON and send to vars
#==================================================================#
def loadsettings():
if(path.exists("client.settings")):
@@ -513,7 +603,7 @@ def loadsettings():
file.close()
#==================================================================#
-#
+# Don't save settings unless 2 seconds have passed without modification
#==================================================================#
@debounce(2)
def settingschanged():
@@ -521,7 +611,7 @@ def settingschanged():
savesettings()
#==================================================================#
-#
+# Take input text from SocketIO and decide what to do with it
#==================================================================#
def actionsubmit(data):
# Ignore new submissions if the AI is currently busy
@@ -601,10 +691,12 @@ def calcsubmit(txt):
subtxt = vars.memory + winfo + anotetxt + vars.prompt
lnsub = lnmem + lnwi + lnprompt + lnanote
- if(vars.model != "Colab"):
+ if(not vars.model in ["Colab", "OAI"]):
generate(subtxt, lnsub+1, lnsub+vars.genamt)
- else:
+ elif(vars.model == "Colab"):
sendtocolab(subtxt, lnsub+1, lnsub+vars.genamt)
+ elif(vars.model == "OAI"):
+ oairequest(subtxt, lnsub+1, lnsub+vars.genamt)
else:
tokens = []
@@ -643,23 +735,28 @@ def calcsubmit(txt):
# Prepend Memory, WI, and Prompt before action tokens
tokens = memtokens + witokens + prompttkns + tokens
-
-
# Send completed bundle to generator
ln = len(tokens)
- if(vars.model != "Colab"):
+ if(not vars.model in ["Colab", "OAI"]):
generate (
tokenizer.decode(tokens),
ln+1,
ln+vars.genamt
)
- else:
+ elif(vars.model == "Colab"):
sendtocolab(
tokenizer.decode(tokens),
ln+1,
ln+vars.genamt
)
+ elif(vars.model == "OAI"):
+ oairequest(
+ tokenizer.decode(tokens),
+ ln+1,
+ ln+vars.genamt
+ )
+
# For InferKit web API
else:
@@ -1151,6 +1248,55 @@ def ikrequest(txt):
emit('from_server', {'cmd': 'errmsg', 'data': errmsg})
set_aibusy(0)
+#==================================================================#
+# Assembles game data into a request to OpenAI API
+#==================================================================#
+def oairequest(txt, min, max):
+ # Log request to console
+ print("{0}Len:{1}, Txt:{2}{3}".format(colors.YELLOW, len(txt), txt, colors.END))
+
+ # Store context in memory to use it for comparison with generated content
+ vars.lastctx = txt
+
+ # Build request JSON data
+ reqdata = {
+ 'prompt': txt,
+ 'max_tokens': max,
+ 'temperature': vars.temp,
+ 'top_p': vars.top_p,
+ 'n': 1,
+ 'stream': False
+ }
+
+ req = requests.post(
+ vars.oaiurl,
+ json = reqdata,
+ headers = {
+ 'Authorization': 'Bearer '+vars.oaiapikey,
+ 'Content-Type': 'application/json'
+ }
+ )
+
+ # Deal with the response
+ if(req.status_code == 200):
+ genout = req.json()["choices"][0]["text"]
+ print("{0}{1}{2}".format(colors.CYAN, genout, colors.END))
+ vars.actions.append(genout)
+ refresh_story()
+ emit('from_server', {'cmd': 'texteffect', 'data': len(vars.actions)})
+
+ set_aibusy(0)
+ else:
+ # Send error message to web client
+ er = req.json()
+ if("error" in er):
+ type = er["error"]["type"]
+ message = er["error"]["message"]
+
+ errmsg = "OpenAI API Error: {0} - {1}".format(type, message)
+ emit('from_server', {'cmd': 'errmsg', 'data': errmsg})
+ set_aibusy(0)
+
#==================================================================#
# Forces UI to Play mode
#==================================================================#
@@ -1164,11 +1310,44 @@ def exitModes():
vars.mode = "play"
#==================================================================#
-# Save the story to a file
+# Launch in-browser save prompt
#==================================================================#
-def saveRequest():
+def saveas(name):
+ # Check if filename exists already
+ name = utils.cleanfilename(name)
+ if(not fileops.saveexists(name) or (vars.saveow and vars.svowname == name)):
+ # All clear to save
+ saveRequest(getcwd()+"/stories/"+name+".json")
+ emit('from_server', {'cmd': 'hidesaveas', 'data': ''})
+ vars.saveow = False
+ vars.svowname = ""
+ else:
+ # File exists, prompt for overwrite
+ vars.saveow = True
+ vars.svowname = name
+ emit('from_server', {'cmd': 'askforoverwrite', 'data': ''})
+
+#==================================================================#
+# Save the currently running story
+#==================================================================#
+def save():
+ # Check if a file is currently open
+ if(".json" in vars.savedir):
+ saveRequest(vars.savedir)
+ else:
+ emit('from_server', {'cmd': 'saveas', 'data': ''})
+
+#==================================================================#
+# Save the story via file browser
+#==================================================================#
+def savetofile():
savpath = fileops.getsavepath(vars.savedir, "Save Story As", [("Json", "*.json")])
-
+ saveRequest(savpath)
+
+#==================================================================#
+# Save the story to specified path
+#==================================================================#
+def saveRequest(savpath):
if(savpath):
# Leave Edit/Memory mode before continuing
exitModes()
@@ -1200,12 +1379,23 @@ def saveRequest():
finally:
file.close()
+#==================================================================#
+# Load a saved story via file browser
+#==================================================================#
+def getloadlist():
+ emit('from_server', {'cmd': 'buildload', 'data': fileops.getstoryfiles()})
+
+#==================================================================#
+# Load a saved story via file browser
+#==================================================================#
+def loadfromfile():
+ loadpath = fileops.getloadpath(vars.savedir, "Select Story File", [("Json", "*.json")])
+ loadRequest(loadpath)
+
#==================================================================#
# Load a stored story from a file
#==================================================================#
-def loadRequest():
- loadpath = fileops.getloadpath(vars.savedir, "Select Story File", [("Json", "*.json")])
-
+def loadRequest(loadpath):
if(loadpath):
# Leave Edit/Memory mode before continuing
exitModes()
@@ -1242,6 +1432,12 @@ def loadRequest():
file.close()
+ # Save path for save button
+ vars.savedir = loadpath
+
+ # Clear loadselect var
+ vars.loadselect = ""
+
# Refresh game screen
sendwi()
refresh_story()
@@ -1351,6 +1547,9 @@ def importgame():
# Clear import data
vars.importjs = {}
+ # Reset current save
+ vars.savedir = getcwd()+"\stories"
+
# Refresh game screen
sendwi()
refresh_story()
@@ -1388,6 +1587,9 @@ def importAidgRequest(id):
})
num += 1
+ # Reset current save
+ vars.savedir = getcwd()+"\stories"
+
# Refresh game screen
sendwi()
refresh_story()
@@ -1397,30 +1599,26 @@ def importAidgRequest(id):
# Starts a new story
#==================================================================#
def newGameRequest():
- # Ask for confirmation
- root = tk.Tk()
- root.attributes("-topmost", True)
- confirm = tk.messagebox.askquestion("Confirm New Game", "Really start new Story?")
- root.destroy()
+ # Leave Edit/Memory mode before continuing
+ exitModes()
- if(confirm == "yes"):
- # Leave Edit/Memory mode before continuing
- exitModes()
-
- # Clear vars values
- vars.gamestarted = False
- vars.prompt = ""
- vars.memory = ""
- vars.actions = []
- vars.savedir = getcwd()+"\stories"
- vars.authornote = ""
- vars.worldinfo = []
- vars.lastact = ""
- vars.lastctx = ""
-
- # Refresh game screen
- sendwi()
- setStartState()
+ # Clear vars values
+ vars.gamestarted = False
+ vars.prompt = ""
+ vars.memory = ""
+ vars.actions = []
+
+ vars.authornote = ""
+ vars.worldinfo = []
+ vars.lastact = ""
+ vars.lastctx = ""
+
+ # Reset current save
+ vars.savedir = getcwd()+"\stories"
+
+ # Refresh game screen
+ sendwi()
+ setStartState()
#==================================================================#
diff --git a/fileops.py b/fileops.py
index e17d85c0..dc5cb66e 100644
--- a/fileops.py
+++ b/fileops.py
@@ -1,5 +1,7 @@
import tkinter as tk
from tkinter import filedialog
+from os import getcwd, listdir, path
+import json
#==================================================================#
# Generic Method for prompting for file path
@@ -50,4 +52,26 @@ def getdirpath(dir, title):
if(path != "" and path != None):
return path
else:
- return None
\ No newline at end of file
+ return None
+
+#==================================================================#
+# Returns an array of dicts containing story files in /stories
+#==================================================================#
+def getstoryfiles():
+ list = []
+ for file in listdir(getcwd()+"/stories"):
+ if file.endswith(".json"):
+ ob = {}
+ ob["name"] = file.replace(".json", "")
+ f = open(getcwd()+"/stories/"+file, "r")
+ js = json.load(f)
+ f.close()
+ ob["actions"] = len(js["actions"])
+ list.append(ob)
+ return list
+
+#==================================================================#
+# Returns True if json file exists with requested save name
+#==================================================================#
+def saveexists(name):
+ return path.exists(getcwd()+"/stories/"+name+".json")
\ No newline at end of file
diff --git a/install_requirements.bat b/install_requirements.bat
index 39a9971b..4f5e5b11 100644
--- a/install_requirements.bat
+++ b/install_requirements.bat
@@ -1 +1 @@
-pip install -r requirements.txt
\ No newline at end of file
+start cmd /k pip install -r requirements.txt
\ No newline at end of file
diff --git a/play.bat b/play.bat
index 75f00927..2d9db428 100644
--- a/play.bat
+++ b/play.bat
@@ -1 +1 @@
-py -3 aiserver.py
\ No newline at end of file
+start cmd /k py -3 aiserver.py
\ No newline at end of file
diff --git a/static/application.js b/static/application.js
index 8ac1ef61..7adbbc62 100644
--- a/static/application.js
+++ b/static/application.js
@@ -9,6 +9,8 @@ var socket;
var connect_status;
var button_newgame;
var button_save;
+var button_saveas;
+var button_savetofile;
var button_load;
var button_import;
var button_impaidg;
@@ -40,6 +42,18 @@ var aidgpopup;
var aidgpromptnum;
var aidg_accept;
var aidg_close;
+var saveaspopup;
+var saveasinput;
+var saveas_accept;
+var saveas_close;
+var saveasoverwrite;
+var loadpopup;
+var loadcontent;
+var load_accept;
+var load_close;
+var nspopup;
+var ns_accept;
+var ns_close;
// Key states
var shift_down = false;
@@ -201,6 +215,7 @@ function hideWiDeleteConfirm(num) {
function highlightImportLine(ref) {
$("#popupcontent > div").removeClass("popuplistselected");
ref.addClass("popuplistselected");
+ enableButtons([popup_accept]);
}
function enableButtons(refs) {
@@ -270,6 +285,7 @@ function popupShow(state) {
if(state) {
popup.removeClass("hidden");
popup.addClass("flex");
+ disableButtons([popup_accept]);
} else {
popup.removeClass("flex");
popup.addClass("hidden");
@@ -385,6 +401,68 @@ function sendAidgImportRequest() {
aidgpromptnum.val("");
}
+function showSaveAsPopup() {
+ disableButtons([saveas_accept]);
+ saveaspopup.removeClass("hidden");
+ saveaspopup.addClass("flex");
+ saveasinput.focus();
+}
+
+function hideSaveAsPopup() {
+ saveaspopup.removeClass("flex");
+ saveaspopup.addClass("hidden");
+ saveasinput.val("");
+ hide([saveasoverwrite]);
+}
+
+function sendSaveAsRequest() {
+ socket.send({'cmd': 'saveasrequest', 'data': saveasinput.val()});
+}
+
+function showLoadPopup() {
+ loadpopup.removeClass("hidden");
+ loadpopup.addClass("flex");
+}
+
+function hideLoadPopup() {
+ loadpopup.removeClass("flex");
+ loadpopup.addClass("hidden");
+ loadcontent.html("");
+}
+
+function buildLoadList(ar) {
+ disableButtons([load_accept]);
+ loadcontent.html("");
+ showLoadPopup();
+ var i;
+ for(i=0; i\
+ "+ar[i].name+"
\
+ "+ar[i].actions+"
\
+ ");
+ $("#load"+i).on("click", function () {
+ enableButtons([load_accept]);
+ socket.send({'cmd': 'loadselect', 'data': $(this).attr("name")});
+ highlightLoadLine($(this));
+ });
+ }
+}
+
+function highlightLoadLine(ref) {
+ $("#loadlistcontent > div").removeClass("popuplistselected");
+ ref.addClass("popuplistselected");
+}
+
+function showNewStoryPopup() {
+ nspopup.removeClass("hidden");
+ nspopup.addClass("flex");
+}
+
+function hideNewStoryPopup() {
+ nspopup.removeClass("flex");
+ nspopup.addClass("hidden");
+}
+
//=================================================================//
// READY/RUNTIME
//=================================================================//
@@ -392,40 +470,55 @@ function sendAidgImportRequest() {
$(document).ready(function(){
// Bind UI references
- connect_status = $('#connectstatus');
- button_newgame = $('#btn_newgame');
- button_save = $('#btn_save');
- button_load = $('#btn_load');
- button_import = $("#btn_import");
- button_impaidg = $("#btn_impaidg");
- button_settings = $('#btn_settings');
- button_format = $('#btn_format');
- button_send = $('#btnsend');
- button_actedit = $('#btn_actedit');
- button_actmem = $('#btn_actmem');
- button_actback = $('#btn_actundo');
- button_actretry = $('#btn_actretry');
- button_delete = $('#btn_delete');
- button_actwi = $('#btn_actwi');
- game_text = $('#gametext');
- input_text = $('#input_text');
- message_text = $('#messagefield');
- settings_menu = $("#settingsmenu");
- format_menu = $('#formatmenu');
- anote_menu = $('#anoterowcontainer');
- wi_menu = $('#wimenu');
- anote_input = $('#anoteinput');
- anote_labelcur = $('#anotecur');
- anote_slider = $('#anotedepth');
- popup = $("#popupcontainer");
- popup_title = $("#popuptitletext");
- popup_content = $("#popupcontent");
- popup_accept = $("#btn_popupaccept");
- popup_close = $("#btn_popupclose");
- aidgpopup = $("#aidgpopupcontainer");
- aidgpromptnum = $("#aidgpromptnum");
- aidg_accept = $("#btn_aidgpopupaccept");
- aidg_close = $("#btn_aidgpopupclose");
+ connect_status = $('#connectstatus');
+ button_newgame = $('#btn_newgame');
+ button_save = $('#btn_save');
+ button_saveas = $('#btn_saveas');
+ button_savetofile = $('#btn_savetofile');
+ button_load = $('#btn_load');
+ button_loadfrfile = $('#btn_loadfromfile');
+ button_import = $("#btn_import");
+ button_impaidg = $("#btn_impaidg");
+ button_settings = $('#btn_settings');
+ button_format = $('#btn_format');
+ button_send = $('#btnsend');
+ button_actedit = $('#btn_actedit');
+ button_actmem = $('#btn_actmem');
+ button_actback = $('#btn_actundo');
+ button_actretry = $('#btn_actretry');
+ button_delete = $('#btn_delete');
+ button_actwi = $('#btn_actwi');
+ game_text = $('#gametext');
+ input_text = $('#input_text');
+ message_text = $('#messagefield');
+ settings_menu = $("#settingsmenu");
+ format_menu = $('#formatmenu');
+ anote_menu = $('#anoterowcontainer');
+ wi_menu = $('#wimenu');
+ anote_input = $('#anoteinput');
+ anote_labelcur = $('#anotecur');
+ anote_slider = $('#anotedepth');
+ popup = $("#popupcontainer");
+ popup_title = $("#popuptitletext");
+ popup_content = $("#popupcontent");
+ popup_accept = $("#btn_popupaccept");
+ popup_close = $("#btn_popupclose");
+ aidgpopup = $("#aidgpopupcontainer");
+ aidgpromptnum = $("#aidgpromptnum");
+ aidg_accept = $("#btn_aidgpopupaccept");
+ aidg_close = $("#btn_aidgpopupclose");
+ saveaspopup = $("#saveascontainer");
+ saveasinput = $("#savename");
+ saveas_accept = $("#btn_saveasaccept");
+ saveas_close = $("#btn_saveasclose");
+ saveasoverwrite = $("#saveasoverwrite");
+ loadpopup = $("#loadcontainer");
+ loadcontent = $("#loadlistcontent");
+ load_accept = $("#btn_loadaccept");
+ load_close = $("#btn_loadclose");
+ nspopup = $("#newgamecontainer");
+ ns_accept = $("#btn_nsaccept");
+ ns_close = $("#btn_nsclose");
// Connect to SocketIO server
loc = window.document.location;
@@ -465,10 +558,14 @@ $(document).ready(function(){
hide([wi_menu, button_delete]);
show([game_text, button_actedit, button_actmem, button_actwi, button_actback, button_actretry]);
hideMessage();
+ hideWaitAnimation();
button_actedit.html("Edit");
button_actmem.html("Memory");
button_actwi.html("W Info");
hideAidgPopup();
+ hideSaveAsPopup();
+ hideLoadPopup();
+ hideNewStoryPopup();
}
} else if(msg.cmd == "editmode") {
// Enable or Disable edit mode
@@ -598,8 +695,20 @@ $(document).ready(function(){
} else if(msg.cmd == "requestwiitem") {
// Package WI contents and send back to server
returnWiList(msg.data);
+ } else if(msg.cmd == "saveas") {
+ // Show Save As prompt
+ showSaveAsPopup();
+ } else if(msg.cmd == "hidesaveas") {
+ // Hide Save As prompt
+ hideSaveAsPopup();
+ } else if(msg.cmd == "buildload") {
+ // Send array of save files to load UI
+ buildLoadList(msg.data);
+ } else if(msg.cmd == "askforoverwrite") {
+ // Show overwrite warning
+ show([saveasoverwrite]);
}
- });
+ });
socket.on('disconnect', function() {
connect_status.html("Lost connection...");
@@ -632,22 +741,18 @@ $(document).ready(function(){
socket.send({'cmd': 'memory', 'data': ''});
});
- button_save.on("click", function(ev) {
- socket.send({'cmd': 'save', 'data': ''});
+ button_savetofile.on("click", function(ev) {
+ socket.send({'cmd': 'savetofile', 'data': ''});
});
- button_load.on("click", function(ev) {
- socket.send({'cmd': 'load', 'data': ''});
+ button_loadfrfile.on("click", function(ev) {
+ socket.send({'cmd': 'loadfromfile', 'data': ''});
});
button_import.on("click", function(ev) {
socket.send({'cmd': 'import', 'data': ''});
});
- button_newgame.on("click", function(ev) {
- socket.send({'cmd': 'newgame', 'data': ''});
- });
-
button_settings.on("click", function(ev) {
$('#settingsmenu').slideToggle("slow");
});
@@ -680,6 +785,58 @@ $(document).ready(function(){
sendAidgImportRequest();
});
+ button_save.on("click", function(ev) {
+ socket.send({'cmd': 'saverequest', 'data': ''});
+ });
+
+ button_saveas.on("click", function(ev) {
+ showSaveAsPopup();
+ });
+
+ saveas_close.on("click", function(ev) {
+ hideSaveAsPopup();
+ socket.send({'cmd': 'clearoverwrite', 'data': ''});
+ });
+
+ saveas_accept.on("click", function(ev) {
+ sendSaveAsRequest();
+ });
+
+ button_load.on("click", function(ev) {
+ socket.send({'cmd': 'loadlistrequest', 'data': ''});
+ });
+
+ load_close.on("click", function(ev) {
+ hideLoadPopup();
+ });
+
+ load_accept.on("click", function(ev) {
+ socket.send({'cmd': 'loadrequest', 'data': ''});
+ hideLoadPopup();
+ });
+
+ button_newgame.on("click", function(ev) {
+ showNewStoryPopup();
+ });
+
+ ns_accept.on("click", function(ev) {
+ socket.send({'cmd': 'newgame', 'data': ''});
+ hideNewStoryPopup();
+ });
+
+ ns_close.on("click", function(ev) {
+ hideNewStoryPopup();
+ });
+
+ saveasinput.on("input", function () {
+ if(saveasinput.val() == "") {
+ disableButtons([saveas_accept]);
+ } else {
+ enableButtons([saveas_accept]);
+ }
+ hide([saveasoverwrite]);
+ });
+
// Bind Enter button to submit
input_text.keydown(function (ev) {
if (ev.which == 13 && !shift_down) {
@@ -705,5 +862,11 @@ $(document).ready(function(){
sendAidgImportRequest();
}
});
+
+ saveasinput.keydown(function (ev) {
+ if (ev.which == 13 && saveasinput.val() != "") {
+ sendSaveAsRequest();
+ }
+ });
});
diff --git a/static/custom.css b/static/custom.css
index 8ee0791b..56db8eae 100644
--- a/static/custom.css
+++ b/static/custom.css
@@ -226,6 +226,43 @@ chunk {
color: #ffffff;
}
+#saveaspopup {
+ width: 350px;
+ background-color: #262626;
+ margin-top: 200px;
+}
+
+#saveasoverwrite {
+ color: #ff9900;
+ font-weight: bold;
+ text-align: center;
+}
+
+#loadpopup {
+ width: 500px;
+ background-color: #262626;
+ margin-top: 100px;
+}
+
+@media (max-width: 768px) {
+ #loadpopup {
+ width: 100%;
+ background-color: #262626;
+ margin-top: 100px;
+ }
+}
+
+#loadlistcontent {
+ height: 325px;
+ overflow-y: scroll;
+}
+
+#nspopup {
+ width: 350px;
+ background-color: #262626;
+ margin-top: 200px;
+}
+
/*================= Classes =================*/
.aidgpopupcontent {
@@ -381,6 +418,29 @@ chunk {
text-align: right;
}
+.loadlistheader {
+ padding-left: 10px;
+ display: grid;
+ grid-template-columns: 80% 20%;
+ color: #737373;
+}
+
+.loadlistitem {
+ padding: 5px 10px 5px 10px;
+ display: grid;
+ grid-template-columns: 80% 20%;
+ color: #ffffff;
+
+ -moz-transition: background-color 0.25s ease-in;
+ -o-transition: background-color 0.25s ease-in;
+ -webkit-transition: background-color 0.25s ease-in;
+}
+
+.loadlistitem:hover {
+ cursor: pointer;
+ background-color: #688f1f;
+}
+
.navbar .navbar-nav .nav-link:hover {
border-radius: 5px;
background-color: #98bcdb;
diff --git a/templates/index.html b/templates/index.html
index 6ad40e70..569dd6ed 100644
--- a/templates/index.html
+++ b/templates/index.html
@@ -6,13 +6,13 @@
-
+
-
+
+
+
+