mirror of
https://github.com/SillyTavern/SillyTavern.git
synced 2025-06-05 21:59:27 +02:00
Add lazy loading of sentencepiece tokenizers
This commit is contained in:
@@ -2795,13 +2795,13 @@ app.post("/openai_bias", jsonParser, async function (request, response) {
|
||||
|
||||
if (sentencepieceTokenizers.includes(model)) {
|
||||
const tokenizer = getSentencepiceTokenizer(model);
|
||||
encodeFunction = (text) => new Uint32Array(tokenizer.encodeIds(text));
|
||||
const instance = await tokenizer?.get();
|
||||
encodeFunction = (text) => new Uint32Array(instance?.encodeIds(text));
|
||||
} else {
|
||||
const tokenizer = getTiktokenTokenizer(model);
|
||||
encodeFunction = (tokenizer.encode.bind(tokenizer));
|
||||
}
|
||||
|
||||
|
||||
for (const entry of request.body) {
|
||||
if (!entry || !entry.text) {
|
||||
continue;
|
||||
|
Reference in New Issue
Block a user