mirror of
https://github.com/SillyTavern/SillyTavern.git
synced 2025-06-05 21:59:27 +02:00
Add fallback option for OpenRouter
This commit is contained in:
@ -1972,6 +1972,17 @@
|
||||
<option data-i18n="Connect to the API">-- Connect to the API --</option>
|
||||
</select>
|
||||
</div>
|
||||
<div class="marginTopBot5">
|
||||
<label for="openrouter_use_fallback" class="checkbox_label">
|
||||
<input id="openrouter_use_fallback" type="checkbox" />
|
||||
<span data-i18n="Allow fallback routes">Allow fallback routes</span>
|
||||
</label>
|
||||
<div class="toggle-description justifyLeft">
|
||||
<span data-i18n="Allow fallback routes Description">
|
||||
Automatically chooses an alternative model if the chosen model can't serve your request.
|
||||
</span>
|
||||
</div>
|
||||
</div>
|
||||
<h4 data-i18n="OpenRouter API Key">OpenRouter API Key</h4>
|
||||
<div>
|
||||
<small>
|
||||
|
@ -174,6 +174,7 @@ const default_settings = {
|
||||
ai21_model: 'j2-ultra',
|
||||
windowai_model: '',
|
||||
openrouter_model: openrouter_website_model,
|
||||
openrouter_use_fallback: true,
|
||||
jailbreak_system: false,
|
||||
reverse_proxy: '',
|
||||
legacy_streaming: false,
|
||||
@ -217,6 +218,7 @@ const oai_settings = {
|
||||
ai21_model: 'j2-ultra',
|
||||
windowai_model: '',
|
||||
openrouter_model: openrouter_website_model,
|
||||
openrouter_use_fallback: true,
|
||||
jailbreak_system: false,
|
||||
reverse_proxy: '',
|
||||
legacy_streaming: false,
|
||||
@ -1158,6 +1160,7 @@ async function sendOpenAIRequest(type, openai_msgs_tosend, signal) {
|
||||
if (isOpenRouter) {
|
||||
generate_data['use_openrouter'] = true;
|
||||
generate_data['top_k'] = Number(oai_settings.top_k_openai);
|
||||
generate_data['use_fallback'] = oai_settings.openrouter_use_fallback;
|
||||
}
|
||||
|
||||
if (isScale) {
|
||||
@ -1842,6 +1845,7 @@ function loadOpenAISettings(data, settings) {
|
||||
oai_settings.claude_model = settings.claude_model ?? default_settings.claude_model;
|
||||
oai_settings.windowai_model = settings.windowai_model ?? default_settings.windowai_model;
|
||||
oai_settings.openrouter_model = settings.openrouter_model ?? default_settings.openrouter_model;
|
||||
oai_settings.openrouter_use_fallback = settings.openrouter_use_fallback ?? default_settings.openrouter_use_fallback;
|
||||
oai_settings.ai21_model = settings.ai21_model ?? default_settings.ai21_model;
|
||||
oai_settings.chat_completion_source = settings.chat_completion_source ?? default_settings.chat_completion_source;
|
||||
oai_settings.api_url_scale = settings.api_url_scale ?? default_settings.api_url_scale;
|
||||
@ -1891,6 +1895,7 @@ function loadOpenAISettings(data, settings) {
|
||||
$('#use_ai21_tokenizer').prop('checked', oai_settings.use_ai21_tokenizer);
|
||||
$('#exclude_assistant').prop('checked', oai_settings.exclude_assistant);
|
||||
$('#scale-alt').prop('checked', oai_settings.use_alt_scale);
|
||||
$('#openrouter_use_fallback').prop('checked', oai_settings.openrouter_use_fallback);
|
||||
if (settings.impersonation_prompt !== undefined) oai_settings.impersonation_prompt = settings.impersonation_prompt;
|
||||
|
||||
$('#impersonation_prompt_textarea').val(oai_settings.impersonation_prompt);
|
||||
@ -2055,6 +2060,7 @@ async function saveOpenAIPreset(name, settings, triggerUi = true) {
|
||||
claude_model: settings.claude_model,
|
||||
windowai_model: settings.windowai_model,
|
||||
openrouter_model: settings.openrouter_model,
|
||||
openrouter_use_fallback: settings.openrouter_use_fallback,
|
||||
ai21_model: settings.ai21_model,
|
||||
temperature: settings.temp_openai,
|
||||
frequency_penalty: settings.freq_pen_openai,
|
||||
@ -2413,6 +2419,7 @@ function onSettingsPresetChange() {
|
||||
claude_model: ['#model_claude_select', 'claude_model', false],
|
||||
windowai_model: ['#model_windowai_select', 'windowai_model', false],
|
||||
openrouter_model: ['#model_openrouter_select', 'openrouter_model', false],
|
||||
openrouter_use_fallback: ['#openrouter_use_fallback', 'openrouter_use_fallback', true],
|
||||
ai21_model: ['#model_ai21_select', 'ai21_model', false],
|
||||
openai_max_context: ['#openai_max_context', 'openai_max_context', false],
|
||||
openai_max_tokens: ['#openai_max_tokens', 'openai_max_tokens', false],
|
||||
@ -3125,6 +3132,11 @@ $(document).ready(async function () {
|
||||
saveSettingsDebounced();
|
||||
});
|
||||
|
||||
$('#openrouter_use_fallback').on('input', function () {
|
||||
oai_settings.openrouter_use_fallback = !!$(this).prop('checked');
|
||||
saveSettingsDebounced();
|
||||
});
|
||||
|
||||
$("#api_button_openai").on("click", onConnectButtonClick);
|
||||
$("#openai_reverse_proxy").on("input", onReverseProxyInput);
|
||||
$("#model_openai_select").on("change", onModelChange);
|
||||
|
@ -3478,6 +3478,10 @@ app.post("/generate_openai", jsonParser, function (request, response_generate_op
|
||||
// OpenRouter needs to pass the referer: https://openrouter.ai/docs
|
||||
headers = { 'HTTP-Referer': request.headers.referer };
|
||||
bodyParams = { 'transforms': ["middle-out"] };
|
||||
|
||||
if (request.body.use_fallback) {
|
||||
bodyParams['route'] = 'fallback';
|
||||
}
|
||||
}
|
||||
|
||||
if (!api_key_openai && !request.body.reverse_proxy) {
|
||||
|
Reference in New Issue
Block a user