mirror of
https://github.com/KoboldAI/KoboldAI-Client.git
synced 2025-06-05 21:59:24 +02:00
Fix for united merge
This commit is contained in:
104
aiserver.py
104
aiserver.py
@@ -475,8 +475,8 @@ def get_config_filename(model_name = None):
|
|||||||
return(f"settings/{model_name.replace('/', '_')}.settings")
|
return(f"settings/{model_name.replace('/', '_')}.settings")
|
||||||
elif args.configname:
|
elif args.configname:
|
||||||
return(f"settings/{args.configname.replace('/', '_')}.settings")
|
return(f"settings/{args.configname.replace('/', '_')}.settings")
|
||||||
elif vars.configname != '':
|
elif koboldai_vars.configname != '':
|
||||||
return(f"settings/{vars.configname.replace('/', '_')}.settings")
|
return(f"settings/{koboldai_vars.configname.replace('/', '_')}.settings")
|
||||||
else:
|
else:
|
||||||
print(f"Empty configfile name sent back. Defaulting to ReadOnly")
|
print(f"Empty configfile name sent back. Defaulting to ReadOnly")
|
||||||
return(f"settings/ReadOnly.settings")
|
return(f"settings/ReadOnly.settings")
|
||||||
@@ -591,8 +591,8 @@ def check_if_dir_is_model(path):
|
|||||||
# Return Model Name
|
# Return Model Name
|
||||||
#==================================================================#
|
#==================================================================#
|
||||||
def getmodelname():
|
def getmodelname():
|
||||||
if(vars.online_model != ''):
|
if(koboldai_vars.online_model != ''):
|
||||||
return(f"{vars.model}/{vars.online_model}")
|
return(f"{koboldai_vars.model}/{koboldai_vars.online_model}")
|
||||||
if(koboldai_vars.model in ("NeoCustom", "GPT2Custom", "TPUMeshTransformerGPTJ", "TPUMeshTransformerGPTNeoX")):
|
if(koboldai_vars.model in ("NeoCustom", "GPT2Custom", "TPUMeshTransformerGPTJ", "TPUMeshTransformerGPTNeoX")):
|
||||||
modelname = os.path.basename(os.path.normpath(koboldai_vars.custmodpth))
|
modelname = os.path.basename(os.path.normpath(koboldai_vars.custmodpth))
|
||||||
return modelname
|
return modelname
|
||||||
@@ -1073,9 +1073,9 @@ def general_startup(override_args=None):
|
|||||||
koboldai_vars.revision = args.revision
|
koboldai_vars.revision = args.revision
|
||||||
|
|
||||||
if args.apikey:
|
if args.apikey:
|
||||||
vars.apikey = args.apikey
|
koboldai_vars.apikey = args.apikey
|
||||||
if args.req_model:
|
if args.req_model:
|
||||||
vars.cluster_requested_models = args.req_model
|
koboldai_vars.cluster_requested_models = args.req_model
|
||||||
|
|
||||||
if args.colab:
|
if args.colab:
|
||||||
args.remote = True;
|
args.remote = True;
|
||||||
@@ -1336,8 +1336,8 @@ def get_oai_models(data):
|
|||||||
emit('from_server', {'cmd': 'errmsg', 'data': req.json()})
|
emit('from_server', {'cmd': 'errmsg', 'data': req.json()})
|
||||||
|
|
||||||
def get_cluster_models(msg):
|
def get_cluster_models(msg):
|
||||||
vars.oaiapikey = msg['key']
|
koboldai_vars.oaiapikey = msg['key']
|
||||||
vars.apikey = vars.oaiapikey
|
koboldai_vars.apikey = koboldai_vars.oaiapikey
|
||||||
url = msg['url']
|
url = msg['url']
|
||||||
|
|
||||||
|
|
||||||
@@ -1362,20 +1362,20 @@ def get_cluster_models(msg):
|
|||||||
# If the client settings file doesn't exist, create it
|
# If the client settings file doesn't exist, create it
|
||||||
# Write API key to file
|
# Write API key to file
|
||||||
os.makedirs('settings', exist_ok=True)
|
os.makedirs('settings', exist_ok=True)
|
||||||
if path.exists(get_config_filename(vars.model_selected)):
|
if path.exists(get_config_filename(koboldai_vars.model_selected)):
|
||||||
with open(get_config_filename(vars.model_selected), "r") as file:
|
with open(get_config_filename(koboldai_vars.model_selected), "r") as file:
|
||||||
js = json.load(file)
|
js = json.load(file)
|
||||||
if 'online_model' in js:
|
if 'online_model' in js:
|
||||||
online_model = js['online_model']
|
online_model = js['online_model']
|
||||||
if "apikey" in js:
|
if "apikey" in js:
|
||||||
if js['apikey'] != vars.oaiapikey:
|
if js['apikey'] != koboldai_vars.oaiapikey:
|
||||||
changed=True
|
changed=True
|
||||||
else:
|
else:
|
||||||
changed=True
|
changed=True
|
||||||
if changed:
|
if changed:
|
||||||
js={}
|
js={}
|
||||||
with open(get_config_filename(vars.model_selected), "w") as file:
|
with open(get_config_filename(koboldai_vars.model_selected), "w") as file:
|
||||||
js["apikey"] = vars.oaiapikey
|
js["apikey"] = koboldai_vars.oaiapikey
|
||||||
file.write(json.dumps(js, indent=3))
|
file.write(json.dumps(js, indent=3))
|
||||||
|
|
||||||
emit('from_server', {'cmd': 'oai_engines', 'data': engines, 'online_model': online_model}, broadcast=True)
|
emit('from_server', {'cmd': 'oai_engines', 'data': engines, 'online_model': online_model}, broadcast=True)
|
||||||
@@ -1896,7 +1896,7 @@ def load_model(use_gpu=True, gpu_layers=None, disk_layers=None, initial_load=Fal
|
|||||||
model = None
|
model = None
|
||||||
generator = None
|
generator = None
|
||||||
model_config = None
|
model_config = None
|
||||||
vars.online_model = ''
|
koboldai_vars.online_model = ''
|
||||||
with torch.no_grad():
|
with torch.no_grad():
|
||||||
with warnings.catch_warnings():
|
with warnings.catch_warnings():
|
||||||
warnings.filterwarnings("ignore", message="torch.distributed.reduce_op is deprecated")
|
warnings.filterwarnings("ignore", message="torch.distributed.reduce_op is deprecated")
|
||||||
@@ -1917,7 +1917,7 @@ def load_model(use_gpu=True, gpu_layers=None, disk_layers=None, initial_load=Fal
|
|||||||
koboldai_vars.badwordsids = koboldai_settings.badwordsids_default
|
koboldai_vars.badwordsids = koboldai_settings.badwordsids_default
|
||||||
|
|
||||||
if online_model == "":
|
if online_model == "":
|
||||||
vars.configname = vars.model.replace('/', '_')
|
koboldai_vars.configname = koboldai_vars.model.replace('/', '_')
|
||||||
#Let's set the GooseAI or OpenAI server URLs if that's applicable
|
#Let's set the GooseAI or OpenAI server URLs if that's applicable
|
||||||
else:
|
else:
|
||||||
koboldai_vars.online_model = online_model
|
koboldai_vars.online_model = online_model
|
||||||
@@ -1954,7 +1954,7 @@ def load_model(use_gpu=True, gpu_layers=None, disk_layers=None, initial_load=Fal
|
|||||||
koboldai_vars.oaiengines = "https://api.goose.ai/v1/engines"
|
koboldai_vars.oaiengines = "https://api.goose.ai/v1/engines"
|
||||||
koboldai_vars.model = "OAI"
|
koboldai_vars.model = "OAI"
|
||||||
args.configname = "GooseAI" + "/" + online_model
|
args.configname = "GooseAI" + "/" + online_model
|
||||||
elif vars.model != "CLUSTER":
|
elif koboldai_vars.model != "CLUSTER":
|
||||||
args.configname = koboldai_vars.model + "/" + online_model
|
args.configname = koboldai_vars.model + "/" + online_model
|
||||||
koboldai_vars.oaiurl = koboldai_vars.oaiengines + "/{0}/completions".format(online_model)
|
koboldai_vars.oaiurl = koboldai_vars.oaiengines + "/{0}/completions".format(online_model)
|
||||||
|
|
||||||
@@ -2042,12 +2042,12 @@ def load_model(use_gpu=True, gpu_layers=None, disk_layers=None, initial_load=Fal
|
|||||||
if(koboldai_vars.model == "GooseAI"):
|
if(koboldai_vars.model == "GooseAI"):
|
||||||
koboldai_vars.oaiengines = "https://api.goose.ai/v1/engines"
|
koboldai_vars.oaiengines = "https://api.goose.ai/v1/engines"
|
||||||
koboldai_vars.model = "OAI"
|
koboldai_vars.model = "OAI"
|
||||||
vars.configname = "GooseAI"
|
koboldai_vars.configname = "GooseAI"
|
||||||
|
|
||||||
# Ask for API key if OpenAI was selected
|
# Ask for API key if OpenAI was selected
|
||||||
if(koboldai_vars.model == "OAI"):
|
if(koboldai_vars.model == "OAI"):
|
||||||
if not vars.configname:
|
if not koboldai_vars.configname:
|
||||||
vars.configname = "OAI"
|
koboldai_vars.configname = "OAI"
|
||||||
|
|
||||||
if(koboldai_vars.model == "ReadOnly"):
|
if(koboldai_vars.model == "ReadOnly"):
|
||||||
koboldai_vars.noai = True
|
koboldai_vars.noai = True
|
||||||
@@ -3670,7 +3670,7 @@ def get_message(msg):
|
|||||||
sendModelSelection(menu=msg['data'])
|
sendModelSelection(menu=msg['data'])
|
||||||
elif(msg['cmd'] == 'load_model'):
|
elif(msg['cmd'] == 'load_model'):
|
||||||
print(msg)
|
print(msg)
|
||||||
print(vars.model_selected)
|
print(koboldai_vars.model_selected)
|
||||||
if not os.path.exists("settings/"):
|
if not os.path.exists("settings/"):
|
||||||
os.mkdir("settings")
|
os.mkdir("settings")
|
||||||
changed = True
|
changed = True
|
||||||
@@ -3694,14 +3694,14 @@ def get_message(msg):
|
|||||||
f.close()
|
f.close()
|
||||||
koboldai_vars.colaburl = msg['url'] + "/request"
|
koboldai_vars.colaburl = msg['url'] + "/request"
|
||||||
koboldai_vars.model = koboldai_vars.model_selected
|
koboldai_vars.model = koboldai_vars.model_selected
|
||||||
if vars.model == "CLUSTER":
|
if koboldai_vars.model == "CLUSTER":
|
||||||
if type(msg['online_model']) is not list:
|
if type(msg['online_model']) is not list:
|
||||||
if msg['online_model'] == '':
|
if msg['online_model'] == '':
|
||||||
vars.cluster_requested_models = []
|
koboldai_vars.cluster_requested_models = []
|
||||||
else:
|
else:
|
||||||
vars.cluster_requested_models = [msg['online_model']]
|
koboldai_vars.cluster_requested_models = [msg['online_model']]
|
||||||
else:
|
else:
|
||||||
vars.cluster_requested_models = msg['online_model']
|
koboldai_vars.cluster_requested_models = msg['online_model']
|
||||||
load_model(use_gpu=msg['use_gpu'], gpu_layers=msg['gpu_layers'], disk_layers=msg['disk_layers'], online_model=msg['online_model'])
|
load_model(use_gpu=msg['use_gpu'], gpu_layers=msg['gpu_layers'], disk_layers=msg['disk_layers'], online_model=msg['online_model'])
|
||||||
elif(msg['cmd'] == 'show_model'):
|
elif(msg['cmd'] == 'show_model'):
|
||||||
print("Model Name: {}".format(getmodelname()))
|
print("Model Name: {}".format(getmodelname()))
|
||||||
@@ -4406,7 +4406,7 @@ def calcsubmitbudget(actionlen, winfo, mem, anotetxt, actions, submission=None,
|
|||||||
if(actionlen == 0):
|
if(actionlen == 0):
|
||||||
# First/Prompt action
|
# First/Prompt action
|
||||||
tokens = (tokenizer._koboldai_header if koboldai_vars.model not in ("Colab", "API", "CLUSTER", "OAI") else []) + memtokens + witokens + anotetkns + prompttkns
|
tokens = (tokenizer._koboldai_header if koboldai_vars.model not in ("Colab", "API", "CLUSTER", "OAI") else []) + memtokens + witokens + anotetkns + prompttkns
|
||||||
assert len(tokens) <= koboldai_vars.max_length - lnsp - vars.genamt - budget_deduction
|
assert len(tokens) <= koboldai_vars.max_length - lnsp - koboldai_vars.genamt - budget_deduction
|
||||||
ln = len(tokens) + lnsp
|
ln = len(tokens) + lnsp
|
||||||
return tokens, ln+1, ln+koboldai_vars.genamt
|
return tokens, ln+1, ln+koboldai_vars.genamt
|
||||||
else:
|
else:
|
||||||
@@ -4978,37 +4978,37 @@ def sendtoapi(txt, min, max):
|
|||||||
#==================================================================#
|
#==================================================================#
|
||||||
def sendtocluster(txt, min, max):
|
def sendtocluster(txt, min, max):
|
||||||
# Log request to console
|
# Log request to console
|
||||||
if not vars.quiet:
|
if not koboldai_vars.quiet:
|
||||||
print("{0}Tokens:{1}, Txt:{2}{3}".format(colors.YELLOW, min-1, txt, colors.END))
|
print("{0}Tokens:{1}, Txt:{2}{3}".format(colors.YELLOW, min-1, txt, colors.END))
|
||||||
|
|
||||||
# Store context in memory to use it for comparison with generated content
|
# Store context in memory to use it for comparison with generated content
|
||||||
vars.lastctx = txt
|
koboldai_vars.lastctx = txt
|
||||||
|
|
||||||
# Build request JSON data
|
# Build request JSON data
|
||||||
reqdata = {
|
reqdata = {
|
||||||
'max_length': max - min + 1,
|
'max_length': max - min + 1,
|
||||||
'max_context_length': vars.max_length,
|
'max_context_length': koboldai_vars.max_length,
|
||||||
'rep_pen': vars.rep_pen,
|
'rep_pen': koboldai_vars.rep_pen,
|
||||||
'rep_pen_slope': vars.rep_pen_slope,
|
'rep_pen_slope': koboldai_vars.rep_pen_slope,
|
||||||
'rep_pen_range': vars.rep_pen_range,
|
'rep_pen_range': koboldai_vars.rep_pen_range,
|
||||||
'temperature': vars.temp,
|
'temperature': koboldai_vars.temp,
|
||||||
'top_p': vars.top_p,
|
'top_p': koboldai_vars.top_p,
|
||||||
'top_k': vars.top_k,
|
'top_k': koboldai_vars.top_k,
|
||||||
'top_a': vars.top_a,
|
'top_a': koboldai_vars.top_a,
|
||||||
'tfs': vars.tfs,
|
'tfs': koboldai_vars.tfs,
|
||||||
'typical': vars.typical,
|
'typical': koboldai_vars.typical,
|
||||||
'n': vars.numseqs,
|
'n': koboldai_vars.numseqs,
|
||||||
}
|
}
|
||||||
cluster_metadata = {
|
cluster_metadata = {
|
||||||
'prompt': txt,
|
'prompt': txt,
|
||||||
'params': reqdata,
|
'params': reqdata,
|
||||||
'username': vars.apikey,
|
'username': koboldai_vars.apikey,
|
||||||
'models': vars.cluster_requested_models,
|
'models': koboldai_vars.cluster_requested_models,
|
||||||
}
|
}
|
||||||
|
|
||||||
# Create request
|
# Create request
|
||||||
req = requests.post(
|
req = requests.post(
|
||||||
vars.colaburl[:-8] + "/generate/sync",
|
koboldai_vars.colaburl[:-8] + "/generate/sync",
|
||||||
json=cluster_metadata,
|
json=cluster_metadata,
|
||||||
)
|
)
|
||||||
js = req.json()
|
js = req.json()
|
||||||
@@ -5026,15 +5026,15 @@ def sendtocluster(txt, min, max):
|
|||||||
return
|
return
|
||||||
genout = js
|
genout = js
|
||||||
|
|
||||||
for i in range(vars.numseqs):
|
for i in range(koboldai_vars.numseqs):
|
||||||
vars.lua_koboldbridge.outputs[i+1] = genout[i]
|
koboldai_vars.lua_koboldbridge.outputs[i+1] = genout[i]
|
||||||
|
|
||||||
execute_outmod()
|
execute_outmod()
|
||||||
if(vars.lua_koboldbridge.regeneration_required):
|
if(koboldai_vars.lua_koboldbridge.regeneration_required):
|
||||||
vars.lua_koboldbridge.regeneration_required = False
|
koboldai_vars.lua_koboldbridge.regeneration_required = False
|
||||||
genout = []
|
genout = []
|
||||||
for i in range(vars.numseqs):
|
for i in range(koboldai_vars.numseqs):
|
||||||
genout.append(vars.lua_koboldbridge.outputs[i+1])
|
genout.append(koboldai_vars.lua_koboldbridge.outputs[i+1])
|
||||||
assert type(genout[-1]) is str
|
assert type(genout[-1]) is str
|
||||||
|
|
||||||
if(len(genout) == 1):
|
if(len(genout) == 1):
|
||||||
@@ -5047,8 +5047,8 @@ def sendtocluster(txt, min, max):
|
|||||||
seqs = []
|
seqs = []
|
||||||
for seq in adjusted_genout:
|
for seq in adjusted_genout:
|
||||||
seqs.append({"generated_text": seq})
|
seqs.append({"generated_text": seq})
|
||||||
if(vars.lua_koboldbridge.restart_sequence is not None and vars.lua_koboldbridge.restart_sequence > 0):
|
if(koboldai_vars.lua_koboldbridge.restart_sequence is not None and koboldai_vars.lua_koboldbridge.restart_sequence > 0):
|
||||||
genresult(adjusted_genout[vars.lua_koboldbridge.restart_sequence-1]["generated_text"])
|
genresult(adjusted_genout[koboldai_vars.lua_koboldbridge.restart_sequence-1]["generated_text"])
|
||||||
else:
|
else:
|
||||||
genselect(adjusted_genout)
|
genselect(adjusted_genout)
|
||||||
|
|
||||||
@@ -5884,8 +5884,8 @@ def oairequest(txt, min, max):
|
|||||||
|
|
||||||
# Build request JSON data
|
# Build request JSON data
|
||||||
# GooseAI is a subntype of OAI. So to check if it's this type, we check the configname as a workaround
|
# GooseAI is a subntype of OAI. So to check if it's this type, we check the configname as a workaround
|
||||||
# as the vars.model will always be OAI
|
# as the koboldai_vars.model will always be OAI
|
||||||
if 'GooseAI' in vars.configname:
|
if 'GooseAI' in koboldai_vars.configname:
|
||||||
reqdata = {
|
reqdata = {
|
||||||
'prompt': txt,
|
'prompt': txt,
|
||||||
'max_tokens': koboldai_vars.genamt,
|
'max_tokens': koboldai_vars.genamt,
|
||||||
@@ -10182,7 +10182,7 @@ def get_config_soft_prompts_list():
|
|||||||
values: []
|
values: []
|
||||||
"""
|
"""
|
||||||
splist = []
|
splist = []
|
||||||
for sp in fileops.getspfiles(vars.modeldim):
|
for sp in fileops.getspfiles(koboldai_vars.modeldim):
|
||||||
|
|
||||||
splist.append({"value":sp["filename"]})
|
splist.append({"value":sp["filename"]})
|
||||||
return {"values": splist}
|
return {"values": splist}
|
||||||
|
@@ -768,11 +768,6 @@ class system_settings(settings):
|
|||||||
self._horde_pid = None
|
self._horde_pid = None
|
||||||
|
|
||||||
|
|
||||||
def start_horde_bridge(port):
|
|
||||||
while True:
|
|
||||||
print("Running horde")
|
|
||||||
time.sleep(10)
|
|
||||||
|
|
||||||
def __setattr__(self, name, value):
|
def __setattr__(self, name, value):
|
||||||
new_variable = name not in self.__dict__
|
new_variable = name not in self.__dict__
|
||||||
old_value = getattr(self, name, None)
|
old_value = getattr(self, name, None)
|
||||||
@@ -802,8 +797,10 @@ class system_settings(settings):
|
|||||||
'--username', 'new_ui_user', '--password', '3589yhusd*YT$^', '--kai_name', 'Test New UI',
|
'--username', 'new_ui_user', '--password', '3589yhusd*YT$^', '--kai_name', 'Test New UI',
|
||||||
'--kai_url', 'http://127.0.0.1:{}'.format(self.port), '--cluster_url', "http://koboldai.net"])
|
'--kai_url', 'http://127.0.0.1:{}'.format(self.port), '--cluster_url', "http://koboldai.net"])
|
||||||
else:
|
else:
|
||||||
print("kill bridge")
|
if self._horde_pid is not None:
|
||||||
self._horde_pid.terminate()
|
print("kill bridge")
|
||||||
|
self._horde_pid.terminate()
|
||||||
|
self._horde_pid = None
|
||||||
|
|
||||||
|
|
||||||
class KoboldStoryRegister(object):
|
class KoboldStoryRegister(object):
|
||||||
|
Reference in New Issue
Block a user