Fix Qwen and Command tokenizers not used for logit bias

This commit is contained in:
Cohee
2024-09-17 13:01:19 +00:00
parent 0b0bd27321
commit 0207794a2b
2 changed files with 7 additions and 3 deletions

View File

@@ -33,18 +33,22 @@ export const tokenizers = {
BEST_MATCH: 99,
};
export const SENTENCEPIECE_TOKENIZERS = [
// A list of local tokenizers that support encoding and decoding token ids.
export const ENCODE_TOKENIZERS = [
tokenizers.LLAMA,
tokenizers.MISTRAL,
tokenizers.YI,
tokenizers.LLAMA3,
tokenizers.GEMMA,
tokenizers.JAMBA,
tokenizers.QWEN2,
tokenizers.COMMAND_R,
// uncomment when NovelAI releases Kayra and Clio weights, lol
//tokenizers.NERD,
//tokenizers.NERD2,
];
// A list of Text Completion sources that support remote tokenization.
export const TEXTGEN_TOKENIZERS = [OOBA, TABBY, KOBOLDCPP, LLAMACPP, VLLM, APHRODITE];
const TOKENIZER_URLS = {