mirror of
https://github.com/SillyTavern/SillyTavern.git
synced 2025-03-02 19:07:40 +01:00
Fix Qwen and Command tokenizers not used for logit bias
This commit is contained in:
parent
0b0bd27321
commit
0207794a2b
@ -15,7 +15,7 @@ import { BIAS_CACHE, createNewLogitBiasEntry, displayLogitBias, getLogitBiasList
|
|||||||
import { power_user, registerDebugFunction } from './power-user.js';
|
import { power_user, registerDebugFunction } from './power-user.js';
|
||||||
import { getEventSourceStream } from './sse-stream.js';
|
import { getEventSourceStream } from './sse-stream.js';
|
||||||
import { getCurrentDreamGenModelTokenizer, getCurrentOpenRouterModelTokenizer } from './textgen-models.js';
|
import { getCurrentDreamGenModelTokenizer, getCurrentOpenRouterModelTokenizer } from './textgen-models.js';
|
||||||
import { SENTENCEPIECE_TOKENIZERS, TEXTGEN_TOKENIZERS, getTextTokens, tokenizers } from './tokenizers.js';
|
import { ENCODE_TOKENIZERS, TEXTGEN_TOKENIZERS, getTextTokens, tokenizers } from './tokenizers.js';
|
||||||
import { getSortableDelay, onlyUnique } from './utils.js';
|
import { getSortableDelay, onlyUnique } from './utils.js';
|
||||||
|
|
||||||
export {
|
export {
|
||||||
@ -353,7 +353,7 @@ function getTokenizerForTokenIds() {
|
|||||||
return tokenizers.API_CURRENT;
|
return tokenizers.API_CURRENT;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (SENTENCEPIECE_TOKENIZERS.includes(power_user.tokenizer)) {
|
if (ENCODE_TOKENIZERS.includes(power_user.tokenizer)) {
|
||||||
return power_user.tokenizer;
|
return power_user.tokenizer;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -33,18 +33,22 @@ export const tokenizers = {
|
|||||||
BEST_MATCH: 99,
|
BEST_MATCH: 99,
|
||||||
};
|
};
|
||||||
|
|
||||||
export const SENTENCEPIECE_TOKENIZERS = [
|
// A list of local tokenizers that support encoding and decoding token ids.
|
||||||
|
export const ENCODE_TOKENIZERS = [
|
||||||
tokenizers.LLAMA,
|
tokenizers.LLAMA,
|
||||||
tokenizers.MISTRAL,
|
tokenizers.MISTRAL,
|
||||||
tokenizers.YI,
|
tokenizers.YI,
|
||||||
tokenizers.LLAMA3,
|
tokenizers.LLAMA3,
|
||||||
tokenizers.GEMMA,
|
tokenizers.GEMMA,
|
||||||
tokenizers.JAMBA,
|
tokenizers.JAMBA,
|
||||||
|
tokenizers.QWEN2,
|
||||||
|
tokenizers.COMMAND_R,
|
||||||
// uncomment when NovelAI releases Kayra and Clio weights, lol
|
// uncomment when NovelAI releases Kayra and Clio weights, lol
|
||||||
//tokenizers.NERD,
|
//tokenizers.NERD,
|
||||||
//tokenizers.NERD2,
|
//tokenizers.NERD2,
|
||||||
];
|
];
|
||||||
|
|
||||||
|
// A list of Text Completion sources that support remote tokenization.
|
||||||
export const TEXTGEN_TOKENIZERS = [OOBA, TABBY, KOBOLDCPP, LLAMACPP, VLLM, APHRODITE];
|
export const TEXTGEN_TOKENIZERS = [OOBA, TABBY, KOBOLDCPP, LLAMACPP, VLLM, APHRODITE];
|
||||||
|
|
||||||
const TOKENIZER_URLS = {
|
const TOKENIZER_URLS = {
|
||||||
|
Loading…
x
Reference in New Issue
Block a user