mirror of
https://github.com/KoboldAI/KoboldAI-Client.git
synced 2025-06-05 21:59:24 +02:00
Merge pull request #353 from Zurnaz/llama_tpu_tokenizer_fix
fix: tpu tokenizers errors
This commit is contained in:
@@ -59,7 +59,7 @@ class HFInferenceModel(InferenceModel):
|
||||
token_ids = [first]
|
||||
elif len(token_ids) > 0:
|
||||
first = int(token_ids[0])
|
||||
elif token_ids:
|
||||
elif token_ids is not None and len(token_ids) > 0:
|
||||
first = token_ids[0]
|
||||
result = original_decode(self, token_ids, *args, **kwargs)
|
||||
if first is not None and first in has_prefix_space:
|
||||
|
@@ -17,6 +17,7 @@ from modeling.inference_model import (
|
||||
ModelCapabilities,
|
||||
)
|
||||
from modeling.inference_models.hf import HFInferenceModel
|
||||
from modeling.tokenizer import GenericTokenizer
|
||||
|
||||
# This file shouldn't be imported unless using the TPU
|
||||
assert utils.koboldai_vars.use_colab_tpu
|
||||
@@ -193,8 +194,7 @@ class HFMTJInferenceModel(HFInferenceModel):
|
||||
utils.koboldai_vars.modeldim = int(
|
||||
tpu_mtj_backend.params.get("d_embed", tpu_mtj_backend.params["d_model"])
|
||||
)
|
||||
|
||||
self.tokenizer = tpu_mtj_backend.tokenizer
|
||||
self.tokenizer = GenericTokenizer(tpu_mtj_backend.tokenizer)
|
||||
|
||||
if (
|
||||
utils.koboldai_vars.badwordsids is koboldai_settings.badwordsids_default
|
||||
|
Reference in New Issue
Block a user