Bugfix for save function not appending .json extension by default

Bugfix for New Story function not clearing World Info from previous story
Torch will not be initialized unless you select a local model, as there's no reason to invoke it for InferKit/Colab
Changed JSON file writes to use indentation for readability
This commit is contained in:
KoboldAI Dev 2021-05-15 19:29:41 -04:00
parent 429c9b13f5
commit 2cef3bceaf
2 changed files with 40 additions and 39 deletions

View File

@ -9,7 +9,6 @@ from os import path, getcwd
import tkinter as tk import tkinter as tk
from tkinter import messagebox from tkinter import messagebox
import json import json
import torch
# KoboldAI # KoboldAI
import fileops import fileops
@ -84,15 +83,10 @@ class vars:
# Function to get model selection at startup # Function to get model selection at startup
#==================================================================# #==================================================================#
def getModelSelection(): def getModelSelection():
print(" # Model {0}\n ===================================" print(" # Model V/RAM\n =========================================")
.format("VRAM" if vars.hascuda else " "))
i = 1 i = 1
for m in modellist: for m in modellist:
if(vars.hascuda): print(" {0} - {1}\t\t{2}".format("{:<2}".format(i), m[0].ljust(15), m[2]))
print(" {0} - {1}\t\t{2}".format(i, m[0].ljust(15), m[2]))
else:
print(" {0} - {1}".format(i, m[0]))
i += 1 i += 1
print(" "); print(" ");
modelsel = 0 modelsel = 0
@ -123,7 +117,14 @@ def getModelSelection():
# Startup # Startup
#==================================================================# #==================================================================#
# Select a model to run
print("{0}Welcome to the KoboldAI Client!\nSelect an AI model to continue:{1}\n".format(colors.CYAN, colors.END))
getModelSelection()
# If transformers model was selected & GPU available, ask to use CPU or GPU
if(not vars.model in ["InferKit", "Colab"]):
# Test for GPU support # Test for GPU support
import torch
print("{0}Looking for GPU support...{1}".format(colors.PURPLE, colors.END), end="") print("{0}Looking for GPU support...{1}".format(colors.PURPLE, colors.END), end="")
vars.hascuda = torch.cuda.is_available() vars.hascuda = torch.cuda.is_available()
if(vars.hascuda): if(vars.hascuda):
@ -131,13 +132,9 @@ if(vars.hascuda):
else: else:
print("{0}NOT FOUND!{1}".format(colors.YELLOW, colors.END)) print("{0}NOT FOUND!{1}".format(colors.YELLOW, colors.END))
# Select a model to run
print("{0}Welcome to the KoboldAI Client!\nSelect an AI model to continue:{1}\n".format(colors.CYAN, colors.END))
getModelSelection()
# If transformers model was selected & GPU available, ask to use CPU or GPU
if((not vars.model in ["InferKit", "Colab"]) and vars.hascuda):
print("{0}Use GPU or CPU for generation?: (Default GPU){1}\n".format(colors.CYAN, colors.END)) print("{0}Use GPU or CPU for generation?: (Default GPU){1}\n".format(colors.CYAN, colors.END))
if(vars.hascuda):
print(" 1 - GPU\n 2 - CPU\n") print(" 1 - GPU\n 2 - CPU\n")
genselected = False genselected = False
while(genselected == False): while(genselected == False):
@ -163,7 +160,8 @@ if(vars.model == "InferKit"):
# Write API key to file # Write API key to file
file = open("client.settings", "w") file = open("client.settings", "w")
try: try:
file.write("{\"apikey\": \""+vars.apikey+"\"}") js = {"apikey": vars.apikey}
file.write(json.dumps(js, indent=3))
finally: finally:
file.close() file.close()
else: else:
@ -183,7 +181,7 @@ if(vars.model == "InferKit"):
# Write API key to file # Write API key to file
file = open("client.settings", "w") file = open("client.settings", "w")
try: try:
file.write(json.dumps(js)) file.write(json.dumps(js, indent=3))
finally: finally:
file.close() file.close()
@ -456,7 +454,7 @@ def savesettings():
# Write it # Write it
file = open("client.settings", "w") file = open("client.settings", "w")
try: try:
file.write(json.dumps(js)) file.write(json.dumps(js, indent=3))
finally: finally:
file.close() file.close()
@ -712,7 +710,7 @@ def generate(txt, min, max):
#==================================================================# #==================================================================#
def sendtocolab(txt, min, max): def sendtocolab(txt, min, max):
# Log request to console # Log request to console
print("{0}Len:{1}, Txt:{2}{3}".format(colors.YELLOW, len(txt), txt, colors.END)) print("{0}Tokens:{1}, Txt:{2}{3}".format(colors.YELLOW, min-1, txt, colors.END))
# Build request JSON data # Build request JSON data
reqdata = { reqdata = {
@ -752,7 +750,7 @@ def sendtocolab(txt, min, max):
elif("errors" in er): elif("errors" in er):
code = er["errors"][0]["extensions"]["code"] code = er["errors"][0]["extensions"]["code"]
errmsg = "InferKit API Error: {0} - {1}".format(req.status_code, code) errmsg = "Colab API Error: {0} - {1}".format(req.status_code, code)
emit('from_server', {'cmd': 'errmsg', 'data': errmsg}) emit('from_server', {'cmd': 'errmsg', 'data': errmsg})
set_aibusy(0) set_aibusy(0)
@ -1148,7 +1146,7 @@ def saveRequest():
# Write it # Write it
file = open(savpath, "w") file = open(savpath, "w")
try: try:
file.write(json.dumps(js)) file.write(json.dumps(js, indent=3))
finally: finally:
file.close() file.close()
@ -1303,8 +1301,10 @@ def newGameRequest():
vars.actions = [] vars.actions = []
vars.savedir = getcwd()+"\stories" vars.savedir = getcwd()+"\stories"
vars.authornote = "" vars.authornote = ""
vars.worldinfo = []
# Refresh game screen # Refresh game screen
sendwi()
setStartState() setStartState()

View File

@ -10,7 +10,8 @@ def getsavepath(dir, title, types):
path = tk.filedialog.asksaveasfile( path = tk.filedialog.asksaveasfile(
initialdir=dir, initialdir=dir,
title=title, title=title,
filetypes = types filetypes = types,
defaultextension="*.*"
) )
root.destroy() root.destroy()
if(path != "" and path != None): if(path != "" and path != None):