Add direct OpenRouter connection and PaLM models to Window selection

This commit is contained in:
Cohee
2023-06-30 00:32:52 +03:00
parent 757e9b672a
commit f532192726
5 changed files with 137 additions and 31 deletions

View File

@ -1318,7 +1318,7 @@
<option value="koboldhorde">KoboldAI Horde</option> <option value="koboldhorde">KoboldAI Horde</option>
<option value="textgenerationwebui">Text Gen WebUI (ooba)</option> <option value="textgenerationwebui">Text Gen WebUI (ooba)</option>
<option value="novel">NovelAI</option> <option value="novel">NovelAI</option>
<option value="openai">Chat Completion (OpenAI, Claude, Window.ai)</option> <option value="openai">Chat Completion (OpenAI, Claude, Window/OpenRouter)</option>
<option value="poe">Poe</option> <option value="poe">Poe</option>
</select> </select>
</div> </div>
@ -1366,8 +1366,8 @@
<input id="horde_api_key" name="horde_api_key" class="text_pole flex1" maxlength="500" type="text" placeholder="0000000000" autocomplete="off"> <input id="horde_api_key" name="horde_api_key" class="text_pole flex1" maxlength="500" type="text" placeholder="0000000000" autocomplete="off">
<div title="Clear your API key" class="menu_button fa-solid fa-circle-xmark clear-api-key" data-key="api_key_horde"></div> <div title="Clear your API key" class="menu_button fa-solid fa-circle-xmark clear-api-key" data-key="api_key_horde"></div>
</div> </div>
<div class="neutral_warning">For privacy reasons, your API key will be hidden after you <div data-for="horde_api_key" class="neutral_warning">
reload the page.</div> For privacy reasons, your API key will be hidden after you reload the page.</div>
<h4 class="horde_model_title"> <h4 class="horde_model_title">
Models Models
<div id="horde_refresh" title="Refresh models" class="right_menu_button"> <div id="horde_refresh" title="Refresh models" class="right_menu_button">
@ -1417,8 +1417,9 @@
<div title="Clear your API key" class="menu_button fa-solid fa-circle-xmark clear-api-key" data-key="api_key_novel"> <div title="Clear your API key" class="menu_button fa-solid fa-circle-xmark clear-api-key" data-key="api_key_novel">
</div> </div>
</div> </div>
<div class="neutral_warning">For privacy reasons, your API key will be hidden after you <div data-for="api_key_novel" class="neutral_warning">
reload the page.</div> For privacy reasons, your API key will be hidden after you reload the page.
</div>
<input id="api_button_novel" class="menu_button" type="submit" value="Connect"> <input id="api_button_novel" class="menu_button" type="submit" value="Connect">
<div id="api_loading_novel" class="api-load-icon fa-solid fa-hourglass fa-spin"></div> <div id="api_loading_novel" class="api-load-icon fa-solid fa-hourglass fa-spin"></div>
<h4><span data-i18n="Novel AI Model">Novel AI Model</span> <h4><span data-i18n="Novel AI Model">Novel AI Model</span>
@ -1473,7 +1474,7 @@
</h3> </h3>
<select id="chat_completion_source"> <select id="chat_completion_source">
<option value="openai">OpenAI</option> <option value="openai">OpenAI</option>
<option value="windowai">Window.ai</option> <option value="windowai">Window AI / OpenRouter</option>
<option value="claude">Claude</option> <option value="claude">Claude</option>
</select> </select>
<form id="openai_form" data-source="openai"> <form id="openai_form" data-source="openai">
@ -1491,8 +1492,9 @@
<input id="api_key_openai" name="api_key_openai" class="text_pole flex1" maxlength="500" value="" type="text" autocomplete="off"> <input id="api_key_openai" name="api_key_openai" class="text_pole flex1" maxlength="500" value="" type="text" autocomplete="off">
<div title="Clear your API key" class="menu_button fa-solid fa-circle-xmark clear-api-key" data-key="api_key_openai"></div> <div title="Clear your API key" class="menu_button fa-solid fa-circle-xmark clear-api-key" data-key="api_key_openai"></div>
</div> </div>
<div class="neutral_warning">For privacy reasons, your API key will be hidden after you <div data-for="api_key_openai" class="neutral_warning">
reload the page.</div> For privacy reasons, your API key will be hidden after you reload the page.
</div>
<div> <div>
<h4 data-i18n="OpenAI Model">OpenAI Model</h4> <h4 data-i18n="OpenAI Model">OpenAI Model</h4>
<select id="model_openai_select"> <select id="model_openai_select">
@ -1527,8 +1529,9 @@
<input id="api_key_claude" name="api_key_claude" class="text_pole flex1" maxlength="500" value="" type="text" autocomplete="off"> <input id="api_key_claude" name="api_key_claude" class="text_pole flex1" maxlength="500" value="" type="text" autocomplete="off">
<div title="Clear your API key" class="menu_button fa-solid fa-circle-xmark clear-api-key" data-key="api_key_claude"></div> <div title="Clear your API key" class="menu_button fa-solid fa-circle-xmark clear-api-key" data-key="api_key_claude"></div>
</div> </div>
<div class="neutral_warning">For privacy reasons, your API key will be hidden after you <div data-for="api_key_claude" class="neutral_warning">
reload the page.</div> For privacy reasons, your API key will be hidden after you reload the page.
</div>
<div> <div>
<h4 data-i18n="Claude Model">Claude Model</h4> <h4 data-i18n="Claude Model">Claude Model</h4>
<select id="model_claude_select"> <select id="model_claude_select">
@ -1563,10 +1566,29 @@
<option value="anthropic/claude-instant-v1-100k">anthropic/claude-instant-v1-100k</option> <option value="anthropic/claude-instant-v1-100k">anthropic/claude-instant-v1-100k</option>
<option value="anthropic/claude-v1">anthropic/claude-v1</option> <option value="anthropic/claude-v1">anthropic/claude-v1</option>
<option value="anthropic/claude-v1-100k">anthropic/claude-v1-100k</option> <option value="anthropic/claude-v1-100k">anthropic/claude-v1-100k</option>
<option value="google/palm-2-chat-bison">google/palm-2-chat-bison</option>
<option value="google/palm-2-codechat-bison">google/palm-2-codechat-bison</option>
<option value="togethercomputer/GPT-NeoXT-Chat-Base-20B">togethercomputer/GPT-NeoXT-Chat-Base-20B</option> <option value="togethercomputer/GPT-NeoXT-Chat-Base-20B">togethercomputer/GPT-NeoXT-Chat-Base-20B</option>
<option value="cohere/command-nightly">cohere/command-nightly</option> <option value="cohere/command-nightly">cohere/command-nightly</option>
</select> </select>
</div> </div>
<h4 data-i18n="OpenRouter API Key">OpenRouter API Key</h4>
<div>
<small>
Get your key from <a target="_blank" href="https://openrouter.ai/keys/">OpenRouter</a>.
</small>
</div>
<div class="flex-container">
<input id="api_key_openrouter" name="api_key_openrouter" class="text_pole flex1" maxlength="500" value="" type="text" autocomplete="off">
<div title="Clear your API key" class="menu_button fa-solid fa-circle-xmark clear-api-key" data-key="api_key_openrouter"></div>
</div>
<div data-for="api_key_openrouter" class="neutral_warning">
For privacy reasons, your API key will be hidden after you reload the page.
</div>
<label class="checkbox_label" for="use_openrouter">
<input id="use_openrouter" type="checkbox" />
<span data-i18n="Use OpenRouter">Use OpenRouter directly</span>
</label>
</form> </form>
<div class="flex-container flex"> <div class="flex-container flex">
@ -1598,8 +1620,9 @@
<div title="Clear your API key" class="menu_button fa-solid fa-circle-xmark clear-api-key" data-key="api_key_poe"> <div title="Clear your API key" class="menu_button fa-solid fa-circle-xmark clear-api-key" data-key="api_key_poe">
</div> </div>
</div> </div>
<div class="neutral_warning">For privacy reasons, your API key will be hidden after you <div data-for="poe_token" class="neutral_warning">
reload the page.</div> For privacy reasons, your API key will be hidden after you reload the page.
</div>
</div> </div>
<input id="poe_connect" class="menu_button" type="button" value="Connect" /> <input id="poe_connect" class="menu_button" type="button" value="Connect" />

View File

@ -86,7 +86,8 @@ const gpt3_16k_max = 16383;
const gpt4_max = 8191; const gpt4_max = 8191;
const gpt_neox_max = 2048; const gpt_neox_max = 2048;
const gpt4_32k_max = 32767; const gpt4_32k_max = 32767;
const claude_max = 7500; const claude_max = 8000; // We have a proper tokenizer, so theoretically could be larger (up to 9k)
const palm2_max = 8000; // The real context window is 8192, spare some for padding due to using turbo tokenizer
const claude_100k_max = 99000; const claude_100k_max = 99000;
const unlocked_max = 100 * 1024; const unlocked_max = 100 * 1024;
const oai_max_temp = 2.0; const oai_max_temp = 2.0;
@ -132,6 +133,7 @@ const default_settings = {
legacy_streaming: false, legacy_streaming: false,
chat_completion_source: chat_completion_sources.OPENAI, chat_completion_source: chat_completion_sources.OPENAI,
max_context_unlocked: false, max_context_unlocked: false,
use_openrouter: false,
}; };
const oai_settings = { const oai_settings = {
@ -165,6 +167,7 @@ const oai_settings = {
legacy_streaming: false, legacy_streaming: false,
chat_completion_source: chat_completion_sources.OPENAI, chat_completion_source: chat_completion_sources.OPENAI,
max_context_unlocked: false, max_context_unlocked: false,
use_openrouter: false,
}; };
let openai_setting_names; let openai_setting_names;
@ -568,8 +571,8 @@ async function sendWindowAIRequest(openai_msgs_tosend, signal, stream) {
const currentModel = await window.ai.getCurrentModel(); const currentModel = await window.ai.getCurrentModel();
let temperature = parseFloat(oai_settings.temp_openai); let temperature = parseFloat(oai_settings.temp_openai);
if (currentModel.includes('claude') && temperature > claude_max_temp) { if ((currentModel.includes('claude') || currentModel.includes('palm-2')) && temperature > claude_max_temp) {
console.warn(`Claude model only supports temperature up to ${claude_max_temp}. Clamping ${temperature} to ${claude_max_temp}.`); console.warn(`Claude and PaLM models only supports temperature up to ${claude_max_temp}. Clamping ${temperature} to ${claude_max_temp}.`);
temperature = claude_max_temp; temperature = claude_max_temp;
} }
@ -649,6 +652,19 @@ async function sendWindowAIRequest(openai_msgs_tosend, signal, stream) {
} }
} }
function getChatCompletionModel() {
switch (oai_settings.chat_completion_source) {
case chat_completion_sources.CLAUDE:
return oai_settings.claude_model;
case chat_completion_sources.OPENAI:
return oai_settings.openai_model;
case chat_completion_sources.WINDOWAI:
return oai_settings.windowai_model;
default:
throw new Error(`Unknown chat completion source: ${oai_settings.chat_completion_source}`);
}
}
async function sendOpenAIRequest(type, openai_msgs_tosend, signal) { async function sendOpenAIRequest(type, openai_msgs_tosend, signal) {
// Provide default abort signal // Provide default abort signal
if (!signal) { if (!signal) {
@ -661,23 +677,24 @@ async function sendOpenAIRequest(type, openai_msgs_tosend, signal) {
let logit_bias = {}; let logit_bias = {};
const isClaude = oai_settings.chat_completion_source == chat_completion_sources.CLAUDE; const isClaude = oai_settings.chat_completion_source == chat_completion_sources.CLAUDE;
const isOpenRouter = oai_settings.use_openrouter && oai_settings.chat_completion_source == chat_completion_sources.WINDOWAI;
const stream = type !== 'quiet' && oai_settings.stream_openai; const stream = type !== 'quiet' && oai_settings.stream_openai;
// If we're using the window.ai extension, use that instead // If we're using the window.ai extension, use that instead
// Doesn't support logit bias yet // Doesn't support logit bias yet
if (oai_settings.chat_completion_source == chat_completion_sources.WINDOWAI) { if (oai_settings.chat_completion_source == chat_completion_sources.WINDOWAI && !oai_settings.use_openrouter) {
return sendWindowAIRequest(openai_msgs_tosend, signal, stream); return sendWindowAIRequest(openai_msgs_tosend, signal, stream);
} }
if (oai_settings.bias_preset_selected if (oai_settings.bias_preset_selected
&& !isClaude // Claude doesn't support logit bias && oai_settings.chat_completion_source == chat_completion_sources.OPENAI
&& Array.isArray(oai_settings.bias_presets[oai_settings.bias_preset_selected]) && Array.isArray(oai_settings.bias_presets[oai_settings.bias_preset_selected])
&& oai_settings.bias_presets[oai_settings.bias_preset_selected].length) { && oai_settings.bias_presets[oai_settings.bias_preset_selected].length) {
logit_bias = biasCache || await calculateLogitBias(); logit_bias = biasCache || await calculateLogitBias();
biasCache = logit_bias; biasCache = logit_bias;
} }
const model = isClaude ? oai_settings.claude_model : oai_settings.openai_model; const model = getChatCompletionModel();
const generate_data = { const generate_data = {
"messages": openai_msgs_tosend, "messages": openai_msgs_tosend,
"model": model, "model": model,
@ -691,6 +708,7 @@ async function sendOpenAIRequest(type, openai_msgs_tosend, signal) {
"reverse_proxy": oai_settings.reverse_proxy, "reverse_proxy": oai_settings.reverse_proxy,
"logit_bias": logit_bias, "logit_bias": logit_bias,
"use_claude": isClaude, "use_claude": isClaude,
"use_openrouter": isOpenRouter,
}; };
const generate_url = '/generate_openai'; const generate_url = '/generate_openai';
@ -767,7 +785,7 @@ function getStreamingReply(getMessage, data) {
if (oai_settings.chat_completion_source == chat_completion_sources.CLAUDE) { if (oai_settings.chat_completion_source == chat_completion_sources.CLAUDE) {
getMessage = data.completion || ""; getMessage = data.completion || "";
} else { } else {
getMessage += data.choices[0]["delta"]["content"] || ""; getMessage += data.choices[0]?.delta?.content || data.choices[0]?.message?.content || "";
} }
return getMessage; return getMessage;
} }
@ -979,6 +997,7 @@ function loadOpenAISettings(data, settings) {
oai_settings.claude_model = settings.claude_model ?? default_settings.claude_model; oai_settings.claude_model = settings.claude_model ?? default_settings.claude_model;
oai_settings.windowai_model = settings.windowai_model ?? default_settings.windowai_model; oai_settings.windowai_model = settings.windowai_model ?? default_settings.windowai_model;
oai_settings.chat_completion_source = settings.chat_completion_source ?? default_settings.chat_completion_source; oai_settings.chat_completion_source = settings.chat_completion_source ?? default_settings.chat_completion_source;
oai_settings.use_openrouter = settings.use_openrouter ?? default_settings.use_openrouter;
if (settings.nsfw_toggle !== undefined) oai_settings.nsfw_toggle = !!settings.nsfw_toggle; if (settings.nsfw_toggle !== undefined) oai_settings.nsfw_toggle = !!settings.nsfw_toggle;
if (settings.keep_example_dialogue !== undefined) oai_settings.keep_example_dialogue = !!settings.keep_example_dialogue; if (settings.keep_example_dialogue !== undefined) oai_settings.keep_example_dialogue = !!settings.keep_example_dialogue;
@ -1055,11 +1074,12 @@ function loadOpenAISettings(data, settings) {
$('#chat_completion_source').val(oai_settings.chat_completion_source).trigger('change'); $('#chat_completion_source').val(oai_settings.chat_completion_source).trigger('change');
$('#oai_max_context_unlocked').prop('checked', oai_settings.max_context_unlocked); $('#oai_max_context_unlocked').prop('checked', oai_settings.max_context_unlocked);
$('#use_openrouter').prop('checked', oai_settings.use_openrouter);
} }
async function getStatusOpen() { async function getStatusOpen() {
if (is_get_status_openai) { if (is_get_status_openai) {
if (oai_settings.chat_completion_source == chat_completion_sources.WINDOWAI) { if (oai_settings.chat_completion_source == chat_completion_sources.WINDOWAI && !oai_settings.use_openrouter) {
let status; let status;
if ('ai' in window) { if ('ai' in window) {
@ -1082,6 +1102,7 @@ async function getStatusOpen() {
let data = { let data = {
reverse_proxy: oai_settings.reverse_proxy, reverse_proxy: oai_settings.reverse_proxy,
use_openrouter: oai_settings.use_openrouter && oai_settings.chat_completion_source == chat_completion_sources.WINDOWAI,
}; };
return jQuery.ajax({ return jQuery.ajax({
@ -1089,7 +1110,7 @@ async function getStatusOpen() {
url: '/getstatus_openai', // url: '/getstatus_openai', //
data: JSON.stringify(data), data: JSON.stringify(data),
beforeSend: function () { beforeSend: function () {
if (oai_settings.reverse_proxy) { if (oai_settings.reverse_proxy && !data.use_openrouter) {
validateReverseProxy(); validateReverseProxy();
} }
}, },
@ -1157,6 +1178,7 @@ async function saveOpenAIPreset(name, settings) {
openai_model: settings.openai_model, openai_model: settings.openai_model,
claude_model: settings.claude_model, claude_model: settings.claude_model,
windowai_model: settings.windowai_model, windowai_model: settings.windowai_model,
use_openrouter: settings.use_openrouter,
temperature: settings.temp_openai, temperature: settings.temp_openai,
frequency_penalty: settings.freq_pen_openai, frequency_penalty: settings.freq_pen_openai,
presence_penalty: settings.pres_pen_openai, presence_penalty: settings.pres_pen_openai,
@ -1530,6 +1552,7 @@ function onSettingsPresetChange() {
nsfw_avoidance_prompt: ['#nsfw_avoidance_prompt_textarea', 'nsfw_avoidance_prompt', false], nsfw_avoidance_prompt: ['#nsfw_avoidance_prompt_textarea', 'nsfw_avoidance_prompt', false],
wi_format: ['#wi_format_textarea', 'wi_format', false], wi_format: ['#wi_format_textarea', 'wi_format', false],
stream_openai: ['#stream_toggle', 'stream_openai', true], stream_openai: ['#stream_toggle', 'stream_openai', true],
use_openrouter: ['#use_openrouter', 'use_openrouter', true],
}; };
for (const [key, [selector, setting, isCheckbox]] of Object.entries(settingsToUpdate)) { for (const [key, [selector, setting, isCheckbox]] of Object.entries(settingsToUpdate)) {
@ -1605,6 +1628,9 @@ function onModelChange() {
else if (value.includes('gpt-4')) { else if (value.includes('gpt-4')) {
$('#openai_max_context').attr('max', gpt4_max); $('#openai_max_context').attr('max', gpt4_max);
} }
else if (value.includes('palm-2')) {
$('#openai_max_context').attr('max', palm2_max);
}
else if (value.includes('GPT-NeoXT')) { else if (value.includes('GPT-NeoXT')) {
$('#openai_max_context').attr('max', gpt_neox_max); $('#openai_max_context').attr('max', gpt_neox_max);
} }
@ -1616,7 +1642,7 @@ function onModelChange() {
oai_settings.openai_max_context = Math.min(Number($('#openai_max_context').attr('max')), oai_settings.openai_max_context); oai_settings.openai_max_context = Math.min(Number($('#openai_max_context').attr('max')), oai_settings.openai_max_context);
$('#openai_max_context').val(oai_settings.openai_max_context).trigger('input'); $('#openai_max_context').val(oai_settings.openai_max_context).trigger('input');
if (value.includes('claude')) { if (value.includes('claude') || value.includes('palm-2')) {
oai_settings.temp_openai = Math.min(claude_max_temp, oai_settings.temp_openai); oai_settings.temp_openai = Math.min(claude_max_temp, oai_settings.temp_openai);
$('#temp_openai').attr('max', claude_max_temp).val(oai_settings.temp_openai).trigger('input'); $('#temp_openai').attr('max', claude_max_temp).val(oai_settings.temp_openai).trigger('input');
} }
@ -1682,6 +1708,17 @@ async function onConnectButtonClick(e) {
if (oai_settings.chat_completion_source == chat_completion_sources.WINDOWAI) { if (oai_settings.chat_completion_source == chat_completion_sources.WINDOWAI) {
is_get_status_openai = true; is_get_status_openai = true;
is_api_button_press_openai = true; is_api_button_press_openai = true;
const api_key_openrouter = $('#api_key_openrouter').val().trim();
if (api_key_openrouter.length) {
await writeSecret(SECRET_KEYS.OPENROUTER, api_key_openrouter);
}
if (oai_settings.use_openrouter && !secret_state[SECRET_KEYS.OPENROUTER]) {
console.log('No secret key saved for OpenRouter');
return;
}
return await getStatusOpen(); return await getStatusOpen();
} }
@ -1955,6 +1992,12 @@ $(document).ready(function () {
saveSettingsDebounced(); saveSettingsDebounced();
}); });
$('#use_openrouter').on('input', function () {
oai_settings.use_openrouter = !!$(this).prop('checked');
reconnectOpenAi();
saveSettingsDebounced();
});
$("#api_button_openai").on("click", onConnectButtonClick); $("#api_button_openai").on("click", onConnectButtonClick);
$("#openai_reverse_proxy").on("input", onReverseProxyInput); $("#openai_reverse_proxy").on("input", onReverseProxyInput);
$("#model_openai_select").on("change", onModelChange); $("#model_openai_select").on("change", onModelChange);

View File

@ -6,6 +6,7 @@ export const SECRET_KEYS = {
POE: 'api_key_poe', POE: 'api_key_poe',
NOVEL: 'api_key_novel', NOVEL: 'api_key_novel',
CLAUDE: 'api_key_claude', CLAUDE: 'api_key_claude',
OPENROUTER: 'api_key_openrouter',
} }
const INPUT_MAP = { const INPUT_MAP = {
@ -14,6 +15,7 @@ const INPUT_MAP = {
[SECRET_KEYS.POE]: '#poe_token', [SECRET_KEYS.POE]: '#poe_token',
[SECRET_KEYS.NOVEL]: '#api_key_novel', [SECRET_KEYS.NOVEL]: '#api_key_novel',
[SECRET_KEYS.CLAUDE]: '#api_key_claude', [SECRET_KEYS.CLAUDE]: '#api_key_claude',
[SECRET_KEYS.OPENROUTER]: '#api_key_openrouter',
} }
async function clearSecret() { async function clearSecret() {
@ -103,4 +105,10 @@ export async function readSecretState() {
jQuery(() => { jQuery(() => {
$('#viewSecrets').on('click', viewSecrets); $('#viewSecrets').on('click', viewSecrets);
$(document).on('click', '.clear-api-key', clearSecret); $(document).on('click', '.clear-api-key', clearSecret);
$(document).on('input', Object.values(INPUT_MAP).join(','), function() {
const id = $(this).attr('id');
const value = $(this).val();
const warningElement = $(`[data-for="${id}"]`);
warningElement.toggle(value.length > 0);
});
}); });

View File

@ -4223,6 +4223,10 @@ toolcool-color-picker {
font-weight: 800; font-weight: 800;
} }
.neutral_warning[data-for] {
display: none;
}
.max_context_unlocked_block .checkbox_label { .max_context_unlocked_block .checkbox_label {
flex-wrap: wrap; flex-wrap: wrap;
} }

View File

@ -2889,15 +2889,30 @@ app.get('/thumbnail', jsonParser, async function (request, response) {
app.post("/getstatus_openai", jsonParser, function (request, response_getstatus_openai = response) { app.post("/getstatus_openai", jsonParser, function (request, response_getstatus_openai = response) {
if (!request.body) return response_getstatus_openai.sendStatus(400); if (!request.body) return response_getstatus_openai.sendStatus(400);
const api_key_openai = readSecret(SECRET_KEYS.OPENAI); let api_url;
let api_key_openai;
let headers;
if (!api_key_openai) { if (request.body.use_openrouter == false) {
return response_getstatus_openai.sendStatus(401); api_url = new URL(request.body.reverse_proxy || api_openai).toString();
api_key_openai = readSecret(SECRET_KEYS.OPENAI);
headers = {};
} else {
api_url = 'https://openrouter.ai/api/v1';
api_key_openai = readSecret(SECRET_KEYS.OPENROUTER);
// OpenRouter needs to pass the referer: https://openrouter.ai/docs
headers = { 'HTTP-Referer': request.headers.referer };
}
if (!api_key_openai) {
return response_getstatus_openai.status(401).send({ error: true });
} }
const api_url = new URL(request.body.reverse_proxy || api_openai).toString();
const args = { const args = {
headers: { "Authorization": "Bearer " + api_key_openai } headers: {
"Authorization": "Bearer " + api_key_openai,
...headers,
},
}; };
client.get(api_url + "/models", args, function (data, response) { client.get(api_url + "/models", args, function (data, response) {
if (response.statusCode == 200) { if (response.statusCode == 200) {
@ -3132,9 +3147,20 @@ app.post("/generate_openai", jsonParser, function (request, response_generate_op
return sendClaudeRequest(request, response_generate_openai); return sendClaudeRequest(request, response_generate_openai);
} }
const api_url = new URL(request.body.reverse_proxy || api_openai).toString(); let api_url;
let api_key_openai;
let headers;
const api_key_openai = readSecret(SECRET_KEYS.OPENAI); if (request.body.use_openrouter == false) {
api_url = new URL(request.body.reverse_proxy || api_openai).toString();
api_key_openai = readSecret(SECRET_KEYS.OPENAI);
headers = {};
} else {
api_url = 'https://openrouter.ai/api/v1';
api_key_openai = readSecret(SECRET_KEYS.OPENROUTER);
// OpenRouter needs to pass the referer: https://openrouter.ai/docs
headers = { 'HTTP-Referer': request.headers.referer };
}
if (!api_key_openai) { if (!api_key_openai) {
return response_generate_openai.status(401).send({ error: true }); return response_generate_openai.status(401).send({ error: true });
@ -3152,7 +3178,8 @@ app.post("/generate_openai", jsonParser, function (request, response_generate_op
url: api_url + '/chat/completions', url: api_url + '/chat/completions',
headers: { headers: {
'Content-Type': 'application/json', 'Content-Type': 'application/json',
'Authorization': 'Bearer ' + api_key_openai 'Authorization': 'Bearer ' + api_key_openai,
...headers,
}, },
data: { data: {
"messages": request.body.messages, "messages": request.body.messages,
@ -3498,6 +3525,7 @@ const SECRET_KEYS = {
NOVEL: 'api_key_novel', NOVEL: 'api_key_novel',
CLAUDE: 'api_key_claude', CLAUDE: 'api_key_claude',
DEEPL: 'deepl', DEEPL: 'deepl',
OPENROUTER: 'api_key_openrouter',
} }
function migrateSecrets() { function migrateSecrets() {