Perplexity: Add reasoning model, remove deprecated models

This commit is contained in:
Cohee
2025-01-29 20:57:38 +02:00
parent ee57675c12
commit 15a3cfcb8a
3 changed files with 19 additions and 38 deletions

View File

@ -258,7 +258,7 @@ const default_settings = {
ai21_model: 'jamba-1.5-large',
mistralai_model: 'mistral-large-latest',
cohere_model: 'command-r-plus',
perplexity_model: 'llama-3.1-70b-instruct',
perplexity_model: 'sonar-pro',
groq_model: 'llama-3.1-70b-versatile',
nanogpt_model: 'gpt-4o-mini',
zerooneai_model: 'yi-large',
@ -337,7 +337,7 @@ const oai_settings = {
ai21_model: 'jamba-1.5-large',
mistralai_model: 'mistral-large-latest',
cohere_model: 'command-r-plus',
perplexity_model: 'llama-3.1-70b-instruct',
perplexity_model: 'sonar-pro',
groq_model: 'llama-3.1-70b-versatile',
nanogpt_model: 'gpt-4o-mini',
zerooneai_model: 'yi-large',
@ -4380,28 +4380,19 @@ async function onModelChange() {
if (oai_settings.max_context_unlocked) {
$('#openai_max_context').attr('max', unlocked_max);
}
else if (['sonar', 'sonar-reasoning'].includes(oai_settings.perplexity_model)) {
$('#openai_max_context').attr('max', 127000);
}
else if (['sonar-pro'].includes(oai_settings.perplexity_model)) {
$('#openai_max_context').attr('max', 200000);
}
else if (oai_settings.perplexity_model.includes('llama-3.1')) {
const isOnline = oai_settings.perplexity_model.includes('online');
const contextSize = isOnline ? 128 * 1024 - 4000 : 128 * 1024;
$('#openai_max_context').attr('max', contextSize);
}
else if (['llama-3-sonar-small-32k-chat', 'llama-3-sonar-large-32k-chat'].includes(oai_settings.perplexity_model)) {
$('#openai_max_context').attr('max', max_32k);
}
else if (['llama-3-sonar-small-32k-online', 'llama-3-sonar-large-32k-online'].includes(oai_settings.perplexity_model)) {
$('#openai_max_context').attr('max', 28000);
}
else if (['sonar-small-chat', 'sonar-medium-chat', 'codellama-70b-instruct', 'mistral-7b-instruct', 'mixtral-8x7b-instruct', 'mixtral-8x22b-instruct'].includes(oai_settings.perplexity_model)) {
$('#openai_max_context').attr('max', max_16k);
}
else if (['llama-3-8b-instruct', 'llama-3-70b-instruct'].includes(oai_settings.perplexity_model)) {
$('#openai_max_context').attr('max', max_8k);
}
else if (['sonar-small-online', 'sonar-medium-online'].includes(oai_settings.perplexity_model)) {
$('#openai_max_context').attr('max', 12000);
}
else {
$('#openai_max_context').attr('max', max_4k);
$('#openai_max_context').attr('max', max_128k);
}
oai_settings.openai_max_context = Math.min(Number($('#openai_max_context').attr('max')), oai_settings.openai_max_context);
$('#openai_max_context').val(oai_settings.openai_max_context).trigger('input');