Add OpenRouter as a text completion source
This commit is contained in:
parent
9e5505a7d4
commit
f962ad5c02
|
@ -1915,6 +1915,7 @@
|
|||
<option value="tabby">TabbyAPI</option>
|
||||
<option value="koboldcpp">KoboldCpp</option>
|
||||
<option value="llamacpp">llama.cpp</option>
|
||||
<option value="openrouter">OpenRouter</option>
|
||||
<option value="ollama">Ollama</option>
|
||||
<option value="togetherai">TogetherAI</option>
|
||||
<option value="infermaticai">InfermaticAI</option>
|
||||
|
@ -1938,6 +1939,30 @@
|
|||
</select>
|
||||
</div>
|
||||
</div>
|
||||
<div data-tg-type="openrouter" class="flex-container flexFlowColumn">
|
||||
<h4 data-i18n="OpenRouter API Key">OpenRouter API Key</h4>
|
||||
<div>
|
||||
<small data-i18n="Click Authorize below or get the key from">
|
||||
Click "Authorize" below or get the key from </small> <a target="_blank" href="https://openrouter.ai/keys/">OpenRouter</a>.
|
||||
<br>
|
||||
<a href="https://openrouter.ai/account" target="_blank" data-i18n="View Remaining Credits">View Remaining Credits</a>
|
||||
</div>
|
||||
<div class="flex-container">
|
||||
<input id="api_key_openrouter-tg" name="api_key_openrouter" class="text_pole flex1 api_key_openrouter" maxlength="500" value="" type="text" autocomplete="off">
|
||||
<div title="Clear your API key" data-i18n="[title]Clear your API key" class="menu_button fa-solid fa-circle-xmark clear-api-key" data-key="api_key_openrouter"></div>
|
||||
</div>
|
||||
<div data-for="api_key_openrouter" class="neutral_warning">
|
||||
For privacy reasons, your API key will be hidden after you reload the page.
|
||||
</div>
|
||||
<div>
|
||||
<h4 data-i18n="OpenRouter Model">OpenRouter Model</h4>
|
||||
<select id="openrouter_model">
|
||||
<option data-i18n="-- Connect to the API --">
|
||||
-- Connect to the API --
|
||||
</option>
|
||||
</select>
|
||||
</div>
|
||||
</div>
|
||||
<div data-tg-type="infermaticai" class="flex-container flexFlowColumn">
|
||||
<h4 data-i18n="InfermaticAI API Key">InfermaticAI API Key</h4>
|
||||
<div class="flex-container">
|
||||
|
@ -2105,6 +2130,7 @@
|
|||
</div>
|
||||
<div class="flex-container">
|
||||
<div id="api_button_textgenerationwebui" class="api_button menu_button" type="submit" data-i18n="Connect" data-server-connect="ooba_blocking,aphrodite,tabby,koboldcpp">Connect</div>
|
||||
<div data-tg-type="openrouter" class="menu_button menu_button_icon openrouter_authorize" title="Get your OpenRouter API token using OAuth flow. You will be redirected to openrouter.ai" data-i18n="[title]Get your OpenRouter API token using OAuth flow. You will be redirected to openrouter.ai">Authorize</div>
|
||||
<div class="api_loading menu_button" data-i18n="Cancel">Cancel</div>
|
||||
</div>
|
||||
<label data-tg-type="ooba,aphrodite" class="checkbox_label margin-bot-10px" for="legacy_api_textgenerationwebui">
|
||||
|
@ -2416,7 +2442,7 @@
|
|||
<a href="https://openrouter.ai/account" target="_blank" data-i18n="View Remaining Credits">View Remaining Credits</a>
|
||||
</div>
|
||||
<div class="flex-container">
|
||||
<input id="api_key_openrouter" name="api_key_openrouter" class="text_pole flex1" maxlength="500" value="" type="text" autocomplete="off">
|
||||
<input id="api_key_openrouter" name="api_key_openrouter" class="text_pole flex1 api_key_openrouter" maxlength="500" value="" type="text" autocomplete="off">
|
||||
<div title="Clear your API key" data-i18n="[title]Clear your API key" class="menu_button fa-solid fa-circle-xmark clear-api-key" data-key="api_key_openrouter"></div>
|
||||
</div>
|
||||
<div data-for="api_key_openrouter" class="neutral_warning">
|
||||
|
@ -2546,7 +2572,7 @@
|
|||
<div id="api_button_openai" class="api_button menu_button menu_button_icon" type="submit" data-i18n="Connect">Connect</div>
|
||||
<div class="api_loading menu_button" data-i18n="Cancel">Cancel</div>
|
||||
<div data-source="custom" id="customize_additional_parameters" class="menu_button menu_button_icon">Additional Parameters</div>
|
||||
<div data-source="openrouter" id="openrouter_authorize" class="menu_button menu_button_icon" title="Get your OpenRouter API token using OAuth flow. You will be redirected to openrouter.ai" data-i18n="[title]Get your OpenRouter API token using OAuth flow. You will be redirected to openrouter.ai">Authorize</div>
|
||||
<div data-source="openrouter" class="menu_button menu_button_icon openrouter_authorize" title="Get your OpenRouter API token using OAuth flow. You will be redirected to openrouter.ai" data-i18n="[title]Get your OpenRouter API token using OAuth flow. You will be redirected to openrouter.ai">Authorize</div>
|
||||
<div id="test_api_button" class="menu_button menu_button_icon" title="Verifies your API connection by sending a short test message. Be aware that you'll be credited for it!" data-i18n="[title]Verifies your API connection by sending a short test message. Be aware that you'll be credited for it!"><span data-i18n="Test Message">Test Message</span></div>
|
||||
</div>
|
||||
<div class="online_status">
|
||||
|
|
|
@ -22,7 +22,7 @@ import {
|
|||
parseTabbyLogprobs,
|
||||
} from './scripts/textgen-settings.js';
|
||||
|
||||
const { MANCER, TOGETHERAI, OOBA, APHRODITE, OLLAMA, INFERMATICAI } = textgen_types;
|
||||
const { MANCER, TOGETHERAI, OOBA, APHRODITE, OLLAMA, INFERMATICAI, OPENROUTER } = textgen_types;
|
||||
|
||||
import {
|
||||
world_info,
|
||||
|
@ -196,7 +196,7 @@ import { createPersona, initPersonas, selectCurrentPersona, setPersonaDescriptio
|
|||
import { getBackgrounds, initBackgrounds, loadBackgroundSettings, background_settings } from './scripts/backgrounds.js';
|
||||
import { hideLoader, showLoader } from './scripts/loader.js';
|
||||
import { BulkEditOverlay, CharacterContextMenu } from './scripts/BulkEditOverlay.js';
|
||||
import { loadMancerModels, loadOllamaModels, loadTogetherAIModels, loadInfermaticAIModels } from './scripts/textgen-models.js';
|
||||
import { loadMancerModels, loadOllamaModels, loadTogetherAIModels, loadInfermaticAIModels, loadOpenRouterModels } from './scripts/textgen-models.js';
|
||||
import { appendFileContent, hasPendingFileAttachment, populateFileAttachment, decodeStyleTags, encodeStyleTags } from './scripts/chats.js';
|
||||
import { initPresetManager } from './scripts/preset-manager.js';
|
||||
import { evaluateMacros } from './scripts/macros.js';
|
||||
|
@ -1060,6 +1060,9 @@ async function getStatusTextgen() {
|
|||
} else if (textgen_settings.type === INFERMATICAI) {
|
||||
loadInfermaticAIModels(data?.data);
|
||||
online_status = textgen_settings.infermaticai_model;
|
||||
} else if (textgen_settings.type === OPENROUTER) {
|
||||
loadOpenRouterModels(data?.data);
|
||||
online_status = textgen_settings.openrouter_model;
|
||||
} else {
|
||||
online_status = data?.result;
|
||||
}
|
||||
|
@ -7706,6 +7709,11 @@ const CONNECT_API_MAP = {
|
|||
button: '#api_button_textgenerationwebui',
|
||||
type: textgen_types.INFERMATICAI,
|
||||
},
|
||||
'openrouter-text': {
|
||||
selected: 'textgenerationwebui',
|
||||
button: '#api_button_textgenerationwebui',
|
||||
type: textgen_types.OPENROUTER,
|
||||
},
|
||||
};
|
||||
|
||||
async function selectContextCallback(_, name) {
|
||||
|
@ -8643,6 +8651,11 @@ jQuery(async function () {
|
|||
await writeSecret(SECRET_KEYS.INFERMATICAI, infermaticAIKey);
|
||||
}
|
||||
|
||||
const openRouterKey = String($('#api_key_openrouter-tg').val()).trim();
|
||||
if (openRouterKey.length) {
|
||||
await writeSecret(SECRET_KEYS.OPENROUTER, openRouterKey);
|
||||
}
|
||||
|
||||
validateTextGenUrl();
|
||||
startStatusLoading();
|
||||
main_api = 'textgenerationwebui';
|
||||
|
|
|
@ -391,7 +391,8 @@ function RA_autoconnect(PrevApi) {
|
|||
case 'textgenerationwebui':
|
||||
if ((textgen_settings.type === textgen_types.MANCER && secret_state[SECRET_KEYS.MANCER])
|
||||
|| (textgen_settings.type === textgen_types.TOGETHERAI && secret_state[SECRET_KEYS.TOGETHERAI])
|
||||
|| (textgen_settings.type === textgen_types.INFERMATICAI && secret_state[SECRET_KEYS.INFERMATICAI])
|
||||
|| (textgen_settings.type === textgen_types.INFERMATICAI && secret_state[SECRET_KEYS.INFERMATICAI]
|
||||
|| (textgen_settings.type === textgen_types.OPENROUTER && secret_state[SECRET_KEYS.OPENROUTER]))
|
||||
) {
|
||||
$('#api_button_textgenerationwebui').trigger('click');
|
||||
}
|
||||
|
|
|
@ -313,6 +313,8 @@ class PresetManager {
|
|||
'type',
|
||||
'custom_model',
|
||||
'bypass_status_check',
|
||||
'infermaticai_model',
|
||||
'openrouter_model',
|
||||
];
|
||||
const settings = Object.assign({}, getSettingsByApiId(this.apiId));
|
||||
|
||||
|
|
|
@ -27,7 +27,7 @@ const INPUT_MAP = {
|
|||
[SECRET_KEYS.OPENAI]: '#api_key_openai',
|
||||
[SECRET_KEYS.NOVEL]: '#api_key_novel',
|
||||
[SECRET_KEYS.CLAUDE]: '#api_key_claude',
|
||||
[SECRET_KEYS.OPENROUTER]: '#api_key_openrouter',
|
||||
[SECRET_KEYS.OPENROUTER]: '.api_key_openrouter',
|
||||
[SECRET_KEYS.SCALE]: '#api_key_scale',
|
||||
[SECRET_KEYS.AI21]: '#api_key_ai21',
|
||||
[SECRET_KEYS.SCALE_COOKIE]: '#scale_cookie',
|
||||
|
@ -199,5 +199,5 @@ jQuery(async () => {
|
|||
const warningElement = $(`[data-for="${id}"]`);
|
||||
warningElement.toggle(value.length > 0);
|
||||
});
|
||||
$('#openrouter_authorize').on('click', authorizeOpenRouter);
|
||||
$('.openrouter_authorize').on('click', authorizeOpenRouter);
|
||||
});
|
||||
|
|
|
@ -1,10 +1,12 @@
|
|||
import { callPopup, getRequestHeaders, setGenerationParamsFromPreset } from '../script.js';
|
||||
import { isMobile } from './RossAscends-mods.js';
|
||||
import { textgenerationwebui_settings as textgen_settings, textgen_types } from './textgen-settings.js';
|
||||
import { tokenizers } from './tokenizers.js';
|
||||
|
||||
let mancerModels = [];
|
||||
let togetherModels = [];
|
||||
let infermaticAIModels = [];
|
||||
export let openRouterModels = [];
|
||||
|
||||
export async function loadOllamaModels(data) {
|
||||
if (!Array.isArray(data)) {
|
||||
|
@ -101,6 +103,28 @@ export async function loadMancerModels(data) {
|
|||
}
|
||||
}
|
||||
|
||||
export async function loadOpenRouterModels(data) {
|
||||
if (!Array.isArray(data)) {
|
||||
console.error('Invalid OpenRouter models data', data);
|
||||
return;
|
||||
}
|
||||
|
||||
openRouterModels = data;
|
||||
|
||||
if (!data.find(x => x.id === textgen_settings.openrouter_model)) {
|
||||
textgen_settings.openrouter_model = data[0]?.id || '';
|
||||
}
|
||||
|
||||
$('#openrouter_model').empty();
|
||||
for (const model of data) {
|
||||
const option = document.createElement('option');
|
||||
option.value = model.id;
|
||||
option.text = model.id;
|
||||
option.selected = model.id === textgen_settings.openrouter_model;
|
||||
$('#openrouter_model').append(option);
|
||||
}
|
||||
}
|
||||
|
||||
function onMancerModelSelect() {
|
||||
const modelId = String($('#mancer_model').val());
|
||||
textgen_settings.mancer_model = modelId;
|
||||
|
@ -132,6 +156,14 @@ function onOllamaModelSelect() {
|
|||
$('#api_button_textgenerationwebui').trigger('click');
|
||||
}
|
||||
|
||||
function onOpenRouterModelSelect() {
|
||||
const modelId = String($('#openrouter_model').val());
|
||||
textgen_settings.openrouter_model = modelId;
|
||||
$('#api_button_textgenerationwebui').trigger('click');
|
||||
const model = openRouterModels.find(x => x.id === modelId);
|
||||
setGenerationParamsFromPreset({ max_length: model.context_length });
|
||||
}
|
||||
|
||||
function getMancerModelTemplate(option) {
|
||||
const model = mancerModels.find(x => x.id === option?.element?.value);
|
||||
|
||||
|
@ -179,6 +211,25 @@ function getInfermaticAIModelTemplate(option) {
|
|||
`));
|
||||
}
|
||||
|
||||
function getOpenRouterModelTemplate(option) {
|
||||
const model = openRouterModels.find(x => x.id === option?.element?.value);
|
||||
|
||||
if (!option.id || !model) {
|
||||
return option.text;
|
||||
}
|
||||
|
||||
let tokens_dollar = Number(1 / (1000 * model.pricing?.prompt));
|
||||
let tokens_rounded = (Math.round(tokens_dollar * 1000) / 1000).toFixed(0);
|
||||
|
||||
const price = 0 === Number(model.pricing?.prompt) ? 'Free' : `${tokens_rounded}k t/$ `;
|
||||
|
||||
return $((`
|
||||
<div class="flex-container flexFlowColumn" title="${DOMPurify.sanitize(model.id)}">
|
||||
<div><strong>${DOMPurify.sanitize(model.name)}</strong> | ${model.context_length} ctx | <small>${price}</small></div>
|
||||
</div>
|
||||
`));
|
||||
}
|
||||
|
||||
async function downloadOllamaModel() {
|
||||
try {
|
||||
const serverUrl = textgen_settings.server_urls[textgen_types.OLLAMA];
|
||||
|
@ -220,11 +271,25 @@ async function downloadOllamaModel() {
|
|||
}
|
||||
}
|
||||
|
||||
export function getCurrentOpenRouterModelTokenizer() {
|
||||
const modelId = textgen_settings.openrouter_model;
|
||||
const model = openRouterModels.find(x => x.id === modelId);
|
||||
switch (model?.architecture?.tokenizer) {
|
||||
case 'Llama2':
|
||||
return tokenizers.LLAMA;
|
||||
case 'Mistral':
|
||||
return tokenizers.MISTRAL;
|
||||
default:
|
||||
return tokenizers.OPENAI;
|
||||
}
|
||||
}
|
||||
|
||||
jQuery(function () {
|
||||
$('#mancer_model').on('change', onMancerModelSelect);
|
||||
$('#model_togetherai_select').on('change', onTogetherModelSelect);
|
||||
$('#model_infermaticai_select').on('change', onInfermaticAIModelSelect);
|
||||
$('#ollama_model').on('change', onOllamaModelSelect);
|
||||
$('#openrouter_model').on('change', onOpenRouterModelSelect);
|
||||
$('#ollama_download_model').on('click', downloadOllamaModel);
|
||||
|
||||
if (!isMobile()) {
|
||||
|
@ -255,5 +320,12 @@ jQuery(function () {
|
|||
width: '100%',
|
||||
templateResult: getInfermaticAIModelTemplate,
|
||||
});
|
||||
$('#openrouter_model').select2({
|
||||
placeholder: 'Select a model',
|
||||
searchInputPlaceholder: 'Search models...',
|
||||
searchInputCssClass: 'text_pole',
|
||||
width: '100%',
|
||||
templateResult: getOpenRouterModelTemplate,
|
||||
});
|
||||
}
|
||||
});
|
||||
|
|
|
@ -14,6 +14,7 @@ import { BIAS_CACHE, createNewLogitBiasEntry, displayLogitBias, getLogitBiasList
|
|||
|
||||
import { power_user, registerDebugFunction } from './power-user.js';
|
||||
import EventSourceStream from './sse-stream.js';
|
||||
import { getCurrentOpenRouterModelTokenizer } from './textgen-models.js';
|
||||
import { SENTENCEPIECE_TOKENIZERS, TEXTGEN_TOKENIZERS, getTextTokens, tokenizers } from './tokenizers.js';
|
||||
import { getSortableDelay, onlyUnique } from './utils.js';
|
||||
|
||||
|
@ -34,9 +35,10 @@ export const textgen_types = {
|
|||
LLAMACPP: 'llamacpp',
|
||||
OLLAMA: 'ollama',
|
||||
INFERMATICAI: 'infermaticai',
|
||||
OPENROUTER: 'openrouter',
|
||||
};
|
||||
|
||||
const { MANCER, APHRODITE, TABBY, TOGETHERAI, OOBA, OLLAMA, LLAMACPP, INFERMATICAI } = textgen_types;
|
||||
const { MANCER, APHRODITE, TABBY, TOGETHERAI, OOBA, OLLAMA, LLAMACPP, INFERMATICAI, OPENROUTER } = textgen_types;
|
||||
|
||||
const LLAMACPP_DEFAULT_ORDER = [
|
||||
'top_k',
|
||||
|
@ -69,6 +71,7 @@ const MANCER_SERVER_DEFAULT = 'https://neuro.mancer.tech';
|
|||
let MANCER_SERVER = localStorage.getItem(MANCER_SERVER_KEY) ?? MANCER_SERVER_DEFAULT;
|
||||
let TOGETHERAI_SERVER = 'https://api.together.xyz';
|
||||
let INFERMATICAI_SERVER = 'https://api.totalgpt.ai';
|
||||
let OPENROUTER_SERVER = 'https://openrouter.ai/api';
|
||||
|
||||
const SERVER_INPUTS = {
|
||||
[textgen_types.OOBA]: '#textgenerationwebui_api_url_text',
|
||||
|
@ -137,6 +140,7 @@ const settings = {
|
|||
togetherai_model: 'Gryphe/MythoMax-L2-13b',
|
||||
infermaticai_model: '',
|
||||
ollama_model: '',
|
||||
openrouter_model: 'openrouter/auto',
|
||||
legacy_api: false,
|
||||
sampler_order: KOBOLDCPP_ORDER,
|
||||
logit_bias: [],
|
||||
|
@ -240,6 +244,10 @@ export function getTextGenServer() {
|
|||
return INFERMATICAI_SERVER;
|
||||
}
|
||||
|
||||
if (settings.type === OPENROUTER) {
|
||||
return OPENROUTER_SERVER;
|
||||
}
|
||||
|
||||
return settings.server_urls[settings.type] ?? '';
|
||||
}
|
||||
|
||||
|
@ -264,7 +272,7 @@ async function selectPreset(name) {
|
|||
function formatTextGenURL(value) {
|
||||
try {
|
||||
// Mancer/Together/InfermaticAI doesn't need any formatting (it's hardcoded)
|
||||
if (settings.type === MANCER || settings.type === TOGETHERAI || settings.type === INFERMATICAI) {
|
||||
if (settings.type === MANCER || settings.type === TOGETHERAI || settings.type === INFERMATICAI || settings.type === OPENROUTER) {
|
||||
return value;
|
||||
}
|
||||
|
||||
|
@ -297,6 +305,10 @@ function getTokenizerForTokenIds() {
|
|||
return power_user.tokenizer;
|
||||
}
|
||||
|
||||
if (settings.type === OPENROUTER) {
|
||||
return getCurrentOpenRouterModelTokenizer();
|
||||
}
|
||||
|
||||
return tokenizers.LLAMA;
|
||||
}
|
||||
|
||||
|
@ -922,6 +934,10 @@ function getModel() {
|
|||
return settings.infermaticai_model;
|
||||
}
|
||||
|
||||
if (settings.type === OPENROUTER) {
|
||||
return settings.openrouter_model;
|
||||
}
|
||||
|
||||
if (settings.type === APHRODITE) {
|
||||
return online_status;
|
||||
}
|
||||
|
|
|
@ -5,8 +5,9 @@ import { groups, selected_group } from './group-chats.js';
|
|||
import { getStringHash } from './utils.js';
|
||||
import { kai_flags } from './kai-settings.js';
|
||||
import { textgen_types, textgenerationwebui_settings as textgen_settings, getTextGenServer } from './textgen-settings.js';
|
||||
import { getCurrentOpenRouterModelTokenizer, openRouterModels } from './textgen-models.js';
|
||||
|
||||
const { OOBA, TABBY, KOBOLDCPP, APHRODITE, LLAMACPP } = textgen_types;
|
||||
const { OOBA, TABBY, KOBOLDCPP, APHRODITE, LLAMACPP, OPENROUTER } = textgen_types;
|
||||
|
||||
export const CHARACTERS_PER_TOKEN_RATIO = 3.35;
|
||||
const TOKENIZER_WARNING_KEY = 'tokenizationWarningShown';
|
||||
|
@ -202,6 +203,9 @@ export function getTokenizerBestMatch(forApi) {
|
|||
if (forApi === 'textgenerationwebui' && isTokenizerSupported) {
|
||||
return tokenizers.API_TEXTGENERATIONWEBUI;
|
||||
}
|
||||
if (forApi === 'textgenerationwebui' && textgen_settings.type === OPENROUTER) {
|
||||
return getCurrentOpenRouterModelTokenizer();
|
||||
}
|
||||
}
|
||||
|
||||
return tokenizers.LLAMA;
|
||||
|
@ -349,8 +353,11 @@ export function getTokenizerModel() {
|
|||
}
|
||||
|
||||
// And for OpenRouter (if not a site model, then it's impossible to determine the tokenizer)
|
||||
if (oai_settings.chat_completion_source == chat_completion_sources.OPENROUTER && oai_settings.openrouter_model) {
|
||||
const model = model_list.find(x => x.id === oai_settings.openrouter_model);
|
||||
if (main_api == 'openai' && oai_settings.chat_completion_source == chat_completion_sources.OPENROUTER && oai_settings.openrouter_model ||
|
||||
main_api == 'textgenerationwebui' && textgen_settings.type === OPENROUTER && textgen_settings.openrouter_model) {
|
||||
const model = main_api == 'openai'
|
||||
? model_list.find(x => x.id === oai_settings.openrouter_model)
|
||||
: openRouterModels.find(x => x.id === textgen_settings.openrouter_model);
|
||||
|
||||
if (model?.architecture?.tokenizer === 'Llama2') {
|
||||
return llamaTokenizer;
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
const { TEXTGEN_TYPES } = require('./constants');
|
||||
const { TEXTGEN_TYPES, OPENROUTER_HEADERS } = require('./constants');
|
||||
const { SECRET_KEYS, readSecret } = require('./endpoints/secrets');
|
||||
const { getConfigValue } = require('./util');
|
||||
|
||||
|
@ -27,6 +27,13 @@ function getInfermaticAIHeaders() {
|
|||
}) : {};
|
||||
}
|
||||
|
||||
function getOpenRouterHeaders() {
|
||||
const apiKey = readSecret(SECRET_KEYS.OPENROUTER);
|
||||
const baseHeaders = { ...OPENROUTER_HEADERS };
|
||||
|
||||
return apiKey ? Object.assign(baseHeaders, { 'Authorization': `Bearer ${apiKey}` }) : baseHeaders;
|
||||
}
|
||||
|
||||
function getAphroditeHeaders() {
|
||||
const apiKey = readSecret(SECRET_KEYS.APHRODITE);
|
||||
|
||||
|
@ -91,6 +98,9 @@ function setAdditionalHeaders(request, args, server) {
|
|||
case TEXTGEN_TYPES.INFERMATICAI:
|
||||
headers = getInfermaticAIHeaders();
|
||||
break;
|
||||
case TEXTGEN_TYPES.OPENROUTER:
|
||||
headers = getOpenRouterHeaders();
|
||||
break;
|
||||
default:
|
||||
headers = server ? getOverrideHeaders((new URL(server))?.host) : {};
|
||||
break;
|
||||
|
|
|
@ -177,6 +177,7 @@ const TEXTGEN_TYPES = {
|
|||
LLAMACPP: 'llamacpp',
|
||||
OLLAMA: 'ollama',
|
||||
INFERMATICAI: 'infermaticai',
|
||||
OPENROUTER: 'openrouter',
|
||||
};
|
||||
|
||||
const INFERMATICAI_KEYS = [
|
||||
|
@ -226,6 +227,29 @@ const OLLAMA_KEYS = [
|
|||
const AVATAR_WIDTH = 400;
|
||||
const AVATAR_HEIGHT = 600;
|
||||
|
||||
const OPENROUTER_HEADERS = {
|
||||
'HTTP-Referer': 'https://sillytavern.app',
|
||||
'X-Title': 'SillyTavern',
|
||||
};
|
||||
|
||||
const OPENROUTER_KEYS = [
|
||||
'max_tokens',
|
||||
'temperature',
|
||||
'top_k',
|
||||
'top_p',
|
||||
'presence_penalty',
|
||||
'frequency_penalty',
|
||||
'repetition_penalty',
|
||||
'min_p',
|
||||
'top_a',
|
||||
'seed',
|
||||
'logit_bias',
|
||||
'model',
|
||||
'stream',
|
||||
'prompt',
|
||||
'stop',
|
||||
];
|
||||
|
||||
module.exports = {
|
||||
DIRECTORIES,
|
||||
UNSAFE_EXTENSIONS,
|
||||
|
@ -239,4 +263,6 @@ module.exports = {
|
|||
TOGETHERAI_KEYS,
|
||||
OLLAMA_KEYS,
|
||||
INFERMATICAI_KEYS,
|
||||
OPENROUTER_HEADERS,
|
||||
OPENROUTER_KEYS,
|
||||
};
|
||||
|
|
|
@ -3,7 +3,7 @@ const fetch = require('node-fetch').default;
|
|||
const { Readable } = require('stream');
|
||||
|
||||
const { jsonParser } = require('../../express-common');
|
||||
const { CHAT_COMPLETION_SOURCES, GEMINI_SAFETY, BISON_SAFETY } = require('../../constants');
|
||||
const { CHAT_COMPLETION_SOURCES, GEMINI_SAFETY, BISON_SAFETY, OPENROUTER_HEADERS } = require('../../constants');
|
||||
const { forwardFetchResponse, getConfigValue, tryParse, uuidv4, mergeObjectWithYaml, excludeKeysByYaml, color } = require('../../util');
|
||||
const { convertClaudePrompt, convertGooglePrompt, convertTextCompletionPrompt } = require('../prompt-converters');
|
||||
|
||||
|
@ -514,10 +514,7 @@ router.post('/status', jsonParser, async function (request, response_getstatus_o
|
|||
api_url = 'https://openrouter.ai/api/v1';
|
||||
api_key_openai = readSecret(SECRET_KEYS.OPENROUTER);
|
||||
// OpenRouter needs to pass the Referer and X-Title: https://openrouter.ai/docs#requests
|
||||
headers = {
|
||||
'HTTP-Referer': 'https://sillytavern.app',
|
||||
'X-Title': 'SillyTavern',
|
||||
};
|
||||
headers = { ...OPENROUTER_HEADERS };
|
||||
} else if (request.body.chat_completion_source === CHAT_COMPLETION_SOURCES.MISTRALAI) {
|
||||
api_url = new URL(request.body.reverse_proxy || API_MISTRAL).toString();
|
||||
api_key_openai = request.body.reverse_proxy ? request.body.proxy_password : readSecret(SECRET_KEYS.MISTRALAI);
|
||||
|
@ -704,10 +701,7 @@ router.post('/generate', jsonParser, function (request, response) {
|
|||
apiUrl = 'https://openrouter.ai/api/v1';
|
||||
apiKey = readSecret(SECRET_KEYS.OPENROUTER);
|
||||
// OpenRouter needs to pass the Referer and X-Title: https://openrouter.ai/docs#requests
|
||||
headers = {
|
||||
'HTTP-Referer': 'https://sillytavern.app',
|
||||
'X-Title': 'SillyTavern',
|
||||
};
|
||||
headers = { ...OPENROUTER_HEADERS };
|
||||
bodyParams = { 'transforms': ['middle-out'] };
|
||||
|
||||
if (request.body.min_p !== undefined) {
|
||||
|
|
|
@ -4,7 +4,7 @@ const _ = require('lodash');
|
|||
const Readable = require('stream').Readable;
|
||||
|
||||
const { jsonParser } = require('../../express-common');
|
||||
const { TEXTGEN_TYPES, TOGETHERAI_KEYS, OLLAMA_KEYS, INFERMATICAI_KEYS } = require('../../constants');
|
||||
const { TEXTGEN_TYPES, TOGETHERAI_KEYS, OLLAMA_KEYS, INFERMATICAI_KEYS, OPENROUTER_KEYS } = require('../../constants');
|
||||
const { forwardFetchResponse, trimV1 } = require('../../util');
|
||||
const { setAdditionalHeaders } = require('../../additional-headers');
|
||||
|
||||
|
@ -107,6 +107,7 @@ router.post('/status', jsonParser, async function (request, response) {
|
|||
case TEXTGEN_TYPES.KOBOLDCPP:
|
||||
case TEXTGEN_TYPES.LLAMACPP:
|
||||
case TEXTGEN_TYPES.INFERMATICAI:
|
||||
case TEXTGEN_TYPES.OPENROUTER:
|
||||
url += '/v1/models';
|
||||
break;
|
||||
case TEXTGEN_TYPES.MANCER:
|
||||
|
@ -209,6 +210,7 @@ router.post('/generate', jsonParser, async function (request, response) {
|
|||
request.body.api_server = request.body.api_server.replace('localhost', '127.0.0.1');
|
||||
}
|
||||
|
||||
const apiType = request.body.api_type;
|
||||
const baseUrl = request.body.api_server;
|
||||
console.log(request.body);
|
||||
|
||||
|
@ -245,6 +247,9 @@ router.post('/generate', jsonParser, async function (request, response) {
|
|||
case TEXTGEN_TYPES.OLLAMA:
|
||||
url += '/api/generate';
|
||||
break;
|
||||
case TEXTGEN_TYPES.OPENROUTER:
|
||||
url += '/v1/chat/completions';
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -268,6 +273,11 @@ router.post('/generate', jsonParser, async function (request, response) {
|
|||
args.body = JSON.stringify(request.body);
|
||||
}
|
||||
|
||||
if (request.body.api_type === TEXTGEN_TYPES.OPENROUTER) {
|
||||
request.body = _.pickBy(request.body, (_, key) => OPENROUTER_KEYS.includes(key));
|
||||
args.body = JSON.stringify(request.body);
|
||||
}
|
||||
|
||||
if (request.body.api_type === TEXTGEN_TYPES.OLLAMA) {
|
||||
args.body = JSON.stringify({
|
||||
model: request.body.model,
|
||||
|
@ -300,7 +310,7 @@ router.post('/generate', jsonParser, async function (request, response) {
|
|||
}
|
||||
|
||||
// Map InfermaticAI response to OAI completions format
|
||||
if (completionsReply.url.includes('https://api.totalgpt.ai')) {
|
||||
if (apiType === TEXTGEN_TYPES.INFERMATICAI) {
|
||||
data['choices'] = (data?.choices || []).map(choice => ({ text: choice.message.content }));
|
||||
}
|
||||
|
||||
|
|
Loading…
Reference in New Issue