mirror of
https://github.com/SillyTavern/SillyTavern.git
synced 2025-06-05 21:59:27 +02:00
Add raw token ids support to OAI logit bias. Fix token counting for turbo models
This commit is contained in:
@ -182,6 +182,7 @@ export function getTokenizerModel() {
|
||||
return oai_settings.openai_model;
|
||||
}
|
||||
|
||||
const turbo0301Tokenizer = 'gpt-3.5-turbo-0301';
|
||||
const turboTokenizer = 'gpt-3.5-turbo';
|
||||
const gpt4Tokenizer = 'gpt-4';
|
||||
const gpt2Tokenizer = 'gpt2';
|
||||
@ -197,6 +198,9 @@ export function getTokenizerModel() {
|
||||
if (oai_settings.windowai_model.includes('gpt-4')) {
|
||||
return gpt4Tokenizer;
|
||||
}
|
||||
else if (oai_settings.windowai_model.includes('gpt-3.5-turbo-0301')) {
|
||||
return turbo0301Tokenizer;
|
||||
}
|
||||
else if (oai_settings.windowai_model.includes('gpt-3.5-turbo')) {
|
||||
return turboTokenizer;
|
||||
}
|
||||
@ -213,6 +217,9 @@ export function getTokenizerModel() {
|
||||
if (oai_settings.openrouter_model.includes('gpt-4')) {
|
||||
return gpt4Tokenizer;
|
||||
}
|
||||
else if (oai_settings.openrouter_model.includes('gpt-3.5-turbo-0301')) {
|
||||
return turbo0301Tokenizer;
|
||||
}
|
||||
else if (oai_settings.openrouter_model.includes('gpt-3.5-turbo')) {
|
||||
return turboTokenizer;
|
||||
}
|
||||
|
Reference in New Issue
Block a user