Support Qwen tokenizer fro Groq

This commit is contained in:
Cohee 2025-02-22 14:29:04 +02:00
parent 159852233f
commit 9b631ed048

View File

@ -694,6 +694,9 @@ export function getTokenizerModel() {
}
if (oai_settings.chat_completion_source === chat_completion_sources.GROQ) {
if (oai_settings.groq_model.includes('qwen')) {
return qwen2Tokenizer;
}
if (oai_settings.groq_model.includes('llama-3') || oai_settings.groq_model.includes('llama3')) {
return llama3Tokenizer;
}