Universal Model Settings

No longer depends on a local config file enabling the configuration to work in --colab mode.
This commit is contained in:
henk717 2022-01-20 10:11:11 +01:00
parent c25e2a33f1
commit 9532b56cb8
1 changed files with 29 additions and 30 deletions

View File

@ -2259,9 +2259,8 @@ def loadsettings():
# Allow the models to override some settings
#==================================================================#
def loadmodelsettings():
if(path.exists(vars.custmodpth.replace('/', '_') + "/config.json")):
model_config = open(vars.custmodpth.replace('/', '_') + "/config.json", "r")
js = json.load(model_config)
model_js_config = str(model_config).partition(' ')[2]
js = json.loads(model_js_config)
if("badwordsids" in js):
vars.badwordsids = js["badwordsids"]
if("temp" in js):
@ -2286,7 +2285,6 @@ def loadmodelsettings():
vars.setauthornotetemplate = js["antemplate"]
if(not vars.gamestarted):
vars.authornotetemplate = vars.setauthornotetemplate
model_config.close()
#==================================================================#
# Don't save settings unless 2 seconds have passed without modification
@ -4552,8 +4550,9 @@ def randomGameRequest(topic, memory=""):
actionsubmit("", force_submit=True, force_prompt_gen=True)
vars.memory = memory
# Load settings from client.settings
loadmodelsettings()
# Load desired settings from both the model and the users config file
if(not vars.model in ["InferKit", "Colab", "OAI", "ReadOnly", "TPUMeshTransformerGPTJ"]):
loadmodelsettings()
loadsettings()
# Prevent tokenizer from taking extra time the first time it's used