mirror of
https://github.com/SillyTavern/SillyTavern.git
synced 2025-06-05 21:59:27 +02:00
Fix memory leak in OAI tokenizer
This commit is contained in:
@ -1854,6 +1854,8 @@ app.post("/tokenize_openai", jsonParser, function (request, response_tokenize_op
|
|||||||
}
|
}
|
||||||
num_tokens += 2;
|
num_tokens += 2;
|
||||||
|
|
||||||
|
tokenizer.free();
|
||||||
|
|
||||||
response_tokenize_openai.send({ "token_count": num_tokens });
|
response_tokenize_openai.send({ "token_count": num_tokens });
|
||||||
});
|
});
|
||||||
|
|
||||||
|
Reference in New Issue
Block a user