From 8593bf339b9d387ac77daceeee4fdcc234cf5f8f Mon Sep 17 00:00:00 2001 From: Gnome Ann <> Date: Tue, 21 Jun 2022 15:36:25 -0400 Subject: [PATCH] Another typo fix --- aiserver.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/aiserver.py b/aiserver.py index a0d4d688..4bbd89b0 100644 --- a/aiserver.py +++ b/aiserver.py @@ -1995,7 +1995,7 @@ def load_model(use_gpu=True, gpu_layers=None, disk_layers=None, initial_load=Fal shutil.rmtree("cache/") if(vars.badwordsids is vars.badwordsids_default and vars.model_type not in ("gpt2", "gpt_neo", "gptj")): - vars.badwordsids = [[v] for k, v in tokenizer.get_vocab().items() if any(c in k for c in "<>[]")] + vars.badwordsids = [[v] for k, v in tokenizer.get_vocab().items() if any(c in str(k) for c in "<>[]")] patch_causallm(model) @@ -2162,7 +2162,7 @@ def load_model(use_gpu=True, gpu_layers=None, disk_layers=None, initial_load=Fal vars.modeldim = int(tpu_mtj_backend.params.get("d_embed", tpu_mtj_backend.params["d_model"])) tokenizer = tpu_mtj_backend.tokenizer if(vars.badwordsids is vars.badwordsids_default and vars.model_type not in ("gpt2", "gpt_neo", "gptj")): - vars.badwordsids = [[str(v)] for k, v in tokenizer.get_vocab().items() if any(c in str(k) for c in "<>[]")] + vars.badwordsids = [[v] for k, v in tokenizer.get_vocab().items() if any(c in str(k) for c in "<>[]")] else: loadsettings()