fix: tpu tokenizers errors

This commit is contained in:
Bogdan Drema
2023-05-08 18:24:34 +01:00
parent cb4af7e56e
commit d53726bed6
2 changed files with 3 additions and 3 deletions

View File

@@ -17,6 +17,7 @@ from modeling.inference_model import (
ModelCapabilities,
)
from modeling.inference_models.hf import HFInferenceModel
from modeling.tokenizer import GenericTokenizer
# This file shouldn't be imported unless using the TPU
assert utils.koboldai_vars.use_colab_tpu
@@ -193,8 +194,7 @@ class HFMTJInferenceModel(HFInferenceModel):
utils.koboldai_vars.modeldim = int(
tpu_mtj_backend.params.get("d_embed", tpu_mtj_backend.params["d_model"])
)
self.tokenizer = tpu_mtj_backend.tokenizer
self.tokenizer = GenericTokenizer(tpu_mtj_backend.tokenizer)
if (
utils.koboldai_vars.badwordsids is koboldai_settings.badwordsids_default