Working on Deterministic AI UI changes and dynamic world info bug

This commit is contained in:
ebolam
2022-11-10 13:38:29 -05:00
parent 534cc1e859
commit 7c7b68e4dd
4 changed files with 66 additions and 30 deletions

View File

@@ -2375,18 +2375,18 @@ def patch_transformers():
if not koboldai_vars.inference_config.do_dynamic_wi:
return False
if not koboldai_vars.dynamicscan:
return False
if len(self.excluded_world_info) != input_ids.shape[0]:
print(tokenizer.decode(self.excluded_world_info))
print(tokenizer.decode(input_ids.shape[0]))
assert len(self.excluded_world_info) == input_ids.shape[0]
if not koboldai_vars.dynamicscan:
return False
tail = input_ids[..., -koboldai_vars.generated_tkns:]
for i, t in enumerate(tail):
decoded = utils.decodenewlines(tokenizer.decode(t))
_, _, _, found = koboldai_vars.calc_ai_text(submitted_text=decoded)
_, _, _, found = koboldai_vars.calc_ai_text(submitted_text=decoded, send_context=False)
found = list(set(found) - set(self.excluded_world_info[i]))
if len(found) != 0:
print("Found: {}".format(found))
@@ -5279,7 +5279,7 @@ def core_generate(text: list, min: int, max: int, found_entries: set, is_core: b
assert genout.shape[0] == koboldai_vars.numseqs
if(koboldai_vars.lua_koboldbridge.generated_cols and koboldai_vars.generated_tkns != koboldai_vars.lua_koboldbridge.generated_cols):
raise RuntimeError("Inconsistency detected between KoboldAI Python and Lua backends")
raise RuntimeError(f"Inconsistency detected between KoboldAI Python and Lua backends ({koboldai_vars.generated_tkns} != {koboldai_vars.lua_koboldbridge.generated_cols})")
if(already_generated != koboldai_vars.generated_tkns):
raise RuntimeError("WI scanning error")
@@ -7769,18 +7769,7 @@ def final_startup():
)
# Set the initial RNG seed
if(koboldai_vars.seed is not None):
if(koboldai_vars.use_colab_tpu):
if(koboldai_vars.seed_specified):
__import__("tpu_mtj_backend").set_rng_seed(koboldai_vars.seed)
else:
__import__("tpu_mtj_backend").randomize_rng_seed()
else:
if(koboldai_vars.seed_specified):
__import__("torch").manual_seed(koboldai_vars.seed)
else:
__import__("torch").seed()
koboldai_vars.seed = __import__("tpu_mtj_backend").get_rng_seed() if koboldai_vars.use_colab_tpu else __import__("torch").initial_seed()
set_seed()
def send_debug():
if koboldai_vars.debug:
@@ -8249,8 +8238,30 @@ def UI_2_var_change(data):
with open(filename, "w") as settings_file:
settings_file.write(getattr(koboldai_vars, "_{}".format(classname)).to_json())
if name in ['seed', 'seed_specified']:
set_seed()
return {'id': data['ID'], 'status': "Saved"}
#==================================================================#
# Set the random seed (or constant seed) for generation
#==================================================================#
def set_seed():
print("Setting Seed")
if(koboldai_vars.seed is not None):
if(koboldai_vars.use_colab_tpu):
if(koboldai_vars.seed_specified):
__import__("tpu_mtj_backend").set_rng_seed(koboldai_vars.seed)
else:
__import__("tpu_mtj_backend").randomize_rng_seed()
else:
if(koboldai_vars.seed_specified):
__import__("torch").manual_seed(koboldai_vars.seed)
else:
__import__("torch").seed()
koboldai_vars.seed = __import__("tpu_mtj_backend").get_rng_seed() if koboldai_vars.use_colab_tpu else __import__("torch").initial_seed()
#==================================================================#
# Saving Story
#==================================================================#

View File

@@ -316,17 +316,6 @@ gensettingstf = [
"classname": "user",
"name": "nogenmod"
},
{
"uitype": "toggle",
"unit": "bool",
"label": "Full Determinism",
"id": "setfulldeterminism",
"min": 0,
"max": 1,
"step": 1,
"default": 0,
"tooltip": "Causes generation to be fully deterministic. The model will always generate the same thing as long as your story, settings and RNG seed are the same. If disabled, only the sequence of outputs the model generates is deterministic."
},
{
"uitype": "toggle",
"unit": "bool",
@@ -611,6 +600,37 @@ gensettingstf = [
"classname": "user",
"name": "privacy_password"
},
{
"uitype": "toggle",
"unit": "bool",
"label": "Full Determinism",
"id": "setfulldeterminism",
"min": 0,
"max": 1,
"step": 1,
"default": 0,
"tooltip": "Causes generation to be fully deterministic. The model will always generate the same thing as long as your story, settings and RNG seed are the same. If disabled, only the sequence of outputs the model generates is deterministic.",
"menu_path": "Settings",
"sub_path": "Other",
"classname": "system",
"name": "seed_specified"
},
{
"uitype": "text",
"unit": "text",
"label": "RNG seed",
"id": "seed",
"min": 0,
"max": 1,
"step": 1,
"default": 0,
"tooltip": "The seed number used to generate the AI text. Output will change if this number is changed.",
"menu_path": "Settings",
"sub_path": "Other",
"classname": "system",
"name": "seed",
"extra_classes": "var_sync_alt_system_seed_specified"
},
]
gensettingsik =[{

View File

@@ -197,7 +197,7 @@ class koboldai_vars(object):
# TODO: This might be ineffecient, should we cache some of this?
return [[token, self.tokenizer.decode(token)] for token in encoded]
def calc_ai_text(self, submitted_text="", return_text=False):
def calc_ai_text(self, submitted_text="", return_text=False, send_context=True):
#start_time = time.time()
if self.tokenizer is None:
if return_text:
@@ -443,7 +443,8 @@ class koboldai_vars(object):
for item in context:
tokens.extend([x[0] for x in item['tokens']])
self.context = context
if send_context:
self.context = context
#logger.debug("Calc_AI_text: {}s".format(time.time()-start_time))
logger.debug("Token Budget: {}. Used Tokens: {}".format(token_budget, used_tokens))

View File

@@ -2953,3 +2953,7 @@ select {
filter: blur(0px) !important;
-webkit-filter: blur(0px) !important;
}
#seed_card[system_seed_specified=false] {
display: none;
}