Add error handing to OpenAI bias calculation

This commit is contained in:
Cohee
2023-07-11 17:06:40 +03:00
parent c53e496687
commit 0fc10e8f7d

View File

@@ -2963,11 +2963,15 @@ app.post("/openai_bias", jsonParser, async function (request, response) {
continue; continue;
} }
try {
const tokens = tokenizer.encode(entry.text); const tokens = tokenizer.encode(entry.text);
for (const token of tokens) { for (const token of tokens) {
result[token] = entry.value; result[token] = entry.value;
} }
} catch {
console.warn('Tokenizer failed to encode:', entry.text);
}
} }
// not needed for cached tokenizers // not needed for cached tokenizers
@@ -3348,6 +3352,7 @@ app.post("/tokenize_openai", jsonParser, function (request, response_tokenize_op
const tokenizer = getTiktokenTokenizer(model); const tokenizer = getTiktokenTokenizer(model);
for (const msg of request.body) { for (const msg of request.body) {
try {
num_tokens += tokensPerMessage; num_tokens += tokensPerMessage;
for (const [key, value] of Object.entries(msg)) { for (const [key, value] of Object.entries(msg)) {
num_tokens += tokenizer.encode(value).length; num_tokens += tokenizer.encode(value).length;
@@ -3355,6 +3360,9 @@ app.post("/tokenize_openai", jsonParser, function (request, response_tokenize_op
num_tokens += tokensPerName; num_tokens += tokensPerName;
} }
} }
} catch {
console.warn("Error tokenizing message:", msg);
}
} }
num_tokens += tokensPadding; num_tokens += tokensPadding;