mirror of
https://github.com/SillyTavern/SillyTavern.git
synced 2025-03-02 10:57:45 +01:00
Add Perplexity L3-sonar models
This commit is contained in:
parent
ef5499c8dc
commit
31f1b34911
@ -2682,6 +2682,10 @@
|
||||
<h4 data-i18n="Perplexity Model">Perplexity Model</h4>
|
||||
<select id="model_perplexity_select">
|
||||
<optgroup label="Perplexity Models">
|
||||
<option value="llama-3-sonar-small-32k-chat">llama-3-sonar-small-32k-chat</option>
|
||||
<option value="llama-3-sonar-small-32k-online">llama-3-sonar-small-32k-online</option>
|
||||
<option value="llama-3-sonar-large-32k-chat">llama-3-sonar-large-32k-chat</option>
|
||||
<option value="llama-3-sonar-large-32k-online">llama-3-sonar-large-32k-online</option>
|
||||
<option value="sonar-small-chat">sonar-small-chat</option>
|
||||
<option value="sonar-small-online">sonar-small-online</option>
|
||||
<option value="sonar-medium-chat">sonar-medium-chat</option>
|
||||
|
@ -3772,6 +3772,12 @@ async function onModelChange() {
|
||||
if (oai_settings.max_context_unlocked) {
|
||||
$('#openai_max_context').attr('max', unlocked_max);
|
||||
}
|
||||
else if (['llama-3-sonar-small-32k-chat', 'llama-3-sonar-large-32k-chat'].includes(oai_settings.perplexity_model)) {
|
||||
$('#openai_max_context').attr('max', max_32k);
|
||||
}
|
||||
else if (['llama-3-sonar-small-32k-online', 'llama-3-sonar-large-32k-online'].includes(oai_settings.perplexity_model)) {
|
||||
$('#openai_max_context').attr('max', 28000);
|
||||
}
|
||||
else if (['sonar-small-chat', 'sonar-medium-chat', 'codellama-70b-instruct', 'mistral-7b-instruct', 'mixtral-8x7b-instruct', 'mixtral-8x22b-instruct'].includes(oai_settings.perplexity_model)) {
|
||||
$('#openai_max_context').attr('max', max_16k);
|
||||
}
|
||||
|
Loading…
x
Reference in New Issue
Block a user