mirror of
https://github.com/SillyTavern/SillyTavern.git
synced 2025-06-05 21:59:27 +02:00
Add error handing to OpenAI bias calculation
This commit is contained in:
@@ -2963,11 +2963,15 @@ app.post("/openai_bias", jsonParser, async function (request, response) {
|
||||
continue;
|
||||
}
|
||||
|
||||
try {
|
||||
const tokens = tokenizer.encode(entry.text);
|
||||
|
||||
for (const token of tokens) {
|
||||
result[token] = entry.value;
|
||||
}
|
||||
} catch {
|
||||
console.warn('Tokenizer failed to encode:', entry.text);
|
||||
}
|
||||
}
|
||||
|
||||
// not needed for cached tokenizers
|
||||
@@ -3348,6 +3352,7 @@ app.post("/tokenize_openai", jsonParser, function (request, response_tokenize_op
|
||||
const tokenizer = getTiktokenTokenizer(model);
|
||||
|
||||
for (const msg of request.body) {
|
||||
try {
|
||||
num_tokens += tokensPerMessage;
|
||||
for (const [key, value] of Object.entries(msg)) {
|
||||
num_tokens += tokenizer.encode(value).length;
|
||||
@@ -3355,6 +3360,9 @@ app.post("/tokenize_openai", jsonParser, function (request, response_tokenize_op
|
||||
num_tokens += tokensPerName;
|
||||
}
|
||||
}
|
||||
} catch {
|
||||
console.warn("Error tokenizing message:", msg);
|
||||
}
|
||||
}
|
||||
num_tokens += tokensPadding;
|
||||
|
||||
|
Reference in New Issue
Block a user