From 0fc10e8f7de6c5adc426f7c0448b53a0a5ae5e5f Mon Sep 17 00:00:00 2001 From: Cohee Date: Tue, 11 Jul 2023 17:06:40 +0300 Subject: [PATCH] Add error handing to OpenAI bias calculation --- server.js | 24 ++++++++++++++++-------- 1 file changed, 16 insertions(+), 8 deletions(-) diff --git a/server.js b/server.js index 9b543ecd9..1774aeba1 100644 --- a/server.js +++ b/server.js @@ -2963,10 +2963,14 @@ app.post("/openai_bias", jsonParser, async function (request, response) { continue; } - const tokens = tokenizer.encode(entry.text); + try { + const tokens = tokenizer.encode(entry.text); - for (const token of tokens) { - result[token] = entry.value; + for (const token of tokens) { + result[token] = entry.value; + } + } catch { + console.warn('Tokenizer failed to encode:', entry.text); } } @@ -3348,12 +3352,16 @@ app.post("/tokenize_openai", jsonParser, function (request, response_tokenize_op const tokenizer = getTiktokenTokenizer(model); for (const msg of request.body) { - num_tokens += tokensPerMessage; - for (const [key, value] of Object.entries(msg)) { - num_tokens += tokenizer.encode(value).length; - if (key == "name") { - num_tokens += tokensPerName; + try { + num_tokens += tokensPerMessage; + for (const [key, value] of Object.entries(msg)) { + num_tokens += tokenizer.encode(value).length; + if (key == "name") { + num_tokens += tokensPerName; + } } + } catch { + console.warn("Error tokenizing message:", msg); } } num_tokens += tokensPadding;