More polishing

Improved the default settings, better distinction on client / server. The python parts have been renamed to server, the browser to the client to be conform what you'd expect from a client and a server. The model name will also be shown now instead of NeoCustom.
This commit is contained in:
henk717 2021-09-18 21:50:23 +02:00
parent 1df051a420
commit b264823fed
2 changed files with 6 additions and 6 deletions

View File

@ -1,5 +1,5 @@
#==================================================================# #==================================================================#
# KoboldAI Client # KoboldAI
# Version: 1.16.0 # Version: 1.16.0
# By: KoboldAIDev and the KoboldAI Community # By: KoboldAIDev and the KoboldAI Community
#==================================================================# #==================================================================#
@ -99,7 +99,7 @@ class vars:
hascuda = False # Whether torch has detected CUDA on the system hascuda = False # Whether torch has detected CUDA on the system
usegpu = False # Whether to launch pipeline with GPU support usegpu = False # Whether to launch pipeline with GPU support
custmodpth = "" # Filesystem location of custom model to run custmodpth = "" # Filesystem location of custom model to run
formatoptns = {} # Container for state of formatting options formatoptns = {'frmttriminc': True, 'frmtrmblln': False, 'frmtrmspch': False, 'frmtadsnsp': False} # Container for state of formatting options
importnum = -1 # Selection on import popup list importnum = -1 # Selection on import popup list
importjs = {} # Temporary storage for import data importjs = {} # Temporary storage for import data
loadselect = "" # Temporary storage for filename to load loadselect = "" # Temporary storage for filename to load
@ -210,7 +210,7 @@ if args.model:
vars.colaburl = args.path + "/request"; # Lets just use the same parameter to keep it simple vars.colaburl = args.path + "/request"; # Lets just use the same parameter to keep it simple
else: else:
print("{0}Welcome to the KoboldAI Client!\nSelect an AI model to continue:{1}\n".format(colors.CYAN, colors.END)) print("{0}Welcome to the KoboldAI Server!\nSelect an AI model to continue:{1}\n".format(colors.CYAN, colors.END))
getModelSelection() getModelSelection()
# If transformers model was selected & GPU available, ask to use CPU or GPU # If transformers model was selected & GPU available, ask to use CPU or GPU
@ -785,7 +785,7 @@ def get_message(msg):
# Send start message and tell Javascript to set UI state # Send start message and tell Javascript to set UI state
#==================================================================# #==================================================================#
def setStartState(): def setStartState():
txt = "<span>Welcome to <span class=\"color_cyan\">KoboldAI Client</span>! You are running <span class=\"color_green\">"+vars.model+"</span>.<br/>" txt = "<span>Welcome to <span class=\"color_cyan\">KoboldAI</span>! You are running <span class=\"color_green\">"+getmodelname()+"</span>.<br/>"
if(not vars.noai): if(not vars.noai):
txt = txt + "Please load a game or enter a prompt below to begin!</span>" txt = txt + "Please load a game or enter a prompt below to begin!</span>"
else: else:
@ -833,7 +833,7 @@ def savesettings():
js["widepth"] = vars.widepth js["widepth"] = vars.widepth
js["useprompt"] = vars.useprompt js["useprompt"] = vars.useprompt
js["adventure"] = vars.adventure js["adventure"] = vars.adventure
# Write it # Write it
if not os.path.exists('settings'): if not os.path.exists('settings'):
os.mkdir('settings') os.mkdir('settings')

View File

@ -1,6 +1,6 @@
@echo off @echo off
cd %~dp0 cd %~dp0
TITLE KoboldAI - Client TITLE KoboldAI - Server
SET /P M=<loader.settings SET /P M=<loader.settings
IF %M%==1 GOTO drivemap IF %M%==1 GOTO drivemap
IF %M%==2 GOTO subfolder IF %M%==2 GOTO subfolder