fix: tpu tokenizers errors

This commit is contained in:
Bogdan Drema
2023-05-08 18:24:34 +01:00
parent cb4af7e56e
commit d53726bed6
2 changed files with 3 additions and 3 deletions

View File

@@ -59,7 +59,7 @@ class HFInferenceModel(InferenceModel):
token_ids = [first]
elif len(token_ids) > 0:
first = int(token_ids[0])
elif token_ids:
elif token_ids is not None and len(token_ids) > 0:
first = token_ids[0]
result = original_decode(self, token_ids, *args, **kwargs)
if first is not None and first in has_prefix_space:

View File

@@ -17,6 +17,7 @@ from modeling.inference_model import (
ModelCapabilities,
)
from modeling.inference_models.hf import HFInferenceModel
from modeling.tokenizer import GenericTokenizer
# This file shouldn't be imported unless using the TPU
assert utils.koboldai_vars.use_colab_tpu
@@ -193,8 +194,7 @@ class HFMTJInferenceModel(HFInferenceModel):
utils.koboldai_vars.modeldim = int(
tpu_mtj_backend.params.get("d_embed", tpu_mtj_backend.params["d_model"])
)
self.tokenizer = tpu_mtj_backend.tokenizer
self.tokenizer = GenericTokenizer(tpu_mtj_backend.tokenizer)
if (
utils.koboldai_vars.badwordsids is koboldai_settings.badwordsids_default