OpenRouter website model use option

This commit is contained in:
Cohee
2023-07-12 20:54:11 +03:00
parent f9630acaf4
commit 26b6d48a3d
2 changed files with 8 additions and 8 deletions

View File

@@ -94,6 +94,7 @@ const claude_100k_max = 99000;
const unlocked_max = 100 * 1024; const unlocked_max = 100 * 1024;
const oai_max_temp = 2.0; const oai_max_temp = 2.0;
const claude_max_temp = 1.0; const claude_max_temp = 1.0;
const openrouter_website_model = 'OR_Website';
let biasCache = undefined; let biasCache = undefined;
let model_list = []; let model_list = [];
@@ -133,7 +134,7 @@ const default_settings = {
openai_model: 'gpt-3.5-turbo', openai_model: 'gpt-3.5-turbo',
claude_model: 'claude-instant-v1', claude_model: 'claude-instant-v1',
windowai_model: '', windowai_model: '',
openrouter_model: 'openai/gpt-3.5-turbo', openrouter_model: openrouter_website_model,
jailbreak_system: false, jailbreak_system: false,
reverse_proxy: '', reverse_proxy: '',
legacy_streaming: false, legacy_streaming: false,
@@ -168,7 +169,7 @@ const oai_settings = {
openai_model: 'gpt-3.5-turbo', openai_model: 'gpt-3.5-turbo',
claude_model: 'claude-instant-v1', claude_model: 'claude-instant-v1',
windowai_model: '', windowai_model: '',
openrouter_model: 'openai/gpt-3.5-turbo', openrouter_model: openrouter_website_model,
jailbreak_system: false, jailbreak_system: false,
reverse_proxy: '', reverse_proxy: '',
legacy_streaming: false, legacy_streaming: false,
@@ -678,7 +679,7 @@ function getChatCompletionModel() {
case chat_completion_sources.SCALE: case chat_completion_sources.SCALE:
return ''; return '';
case chat_completion_sources.OPENROUTER: case chat_completion_sources.OPENROUTER:
return oai_settings.openrouter_model; return oai_settings.openrouter_model !== openrouter_website_model ? oai_settings.openrouter_model : null;
default: default:
throw new Error(`Unknown chat completion source: ${oai_settings.chat_completion_source}`); throw new Error(`Unknown chat completion source: ${oai_settings.chat_completion_source}`);
} }
@@ -689,6 +690,7 @@ function saveModelList(data) {
if (oai_settings.chat_completion_source == chat_completion_sources.OPENROUTER) { if (oai_settings.chat_completion_source == chat_completion_sources.OPENROUTER) {
$('#model_openrouter_select').empty(); $('#model_openrouter_select').empty();
$('#model_openrouter_select').append($('<option>', { value: openrouter_website_model, text: 'Use OpenRouter website setting' }));
model_list.forEach((model) => { model_list.forEach((model) => {
const selected = model.id == oai_settings.openrouter_model; const selected = model.id == oai_settings.openrouter_model;
$('#model_openrouter_select').append( $('#model_openrouter_select').append(
@@ -697,10 +699,8 @@ function saveModelList(data) {
text: model.id, text: model.id,
selected: selected, selected: selected,
})); }));
if (selected) {
$('#model_openrouter_select').val(model.id).trigger('change');
}
}); });
$('#model_openrouter_select').val(oai_settings.openrouter_model).trigger('change');
} }
// TODO Add ability to select OpenAI model from endpoint-provided list // TODO Add ability to select OpenAI model from endpoint-provided list
@@ -1752,7 +1752,7 @@ async function onModelChange() {
if (model?.context_length) { if (model?.context_length) {
$('#openai_max_context').attr('max', model.context_length); $('#openai_max_context').attr('max', model.context_length);
} else { } else {
$('#openai_max_context').attr('max', max_4k); // placeholder $('#openai_max_context').attr('max', max_8k);
} }
} }
oai_settings.openai_max_context = Math.min(Number($('#openai_max_context').attr('max')), oai_settings.openai_max_context); oai_settings.openai_max_context = Math.min(Number($('#openai_max_context').attr('max')), oai_settings.openai_max_context);

View File

@@ -3295,7 +3295,7 @@ app.post("/generate_openai", jsonParser, function (request, response_generate_op
return response_generate_openai.status(401).send({ error: true }); return response_generate_openai.status(401).send({ error: true });
} }
const isTextCompletion = request.body.model.startsWith('text-') || request.body.model.startsWith('code-'); const isTextCompletion = Boolean(request.body.model && (request.body.model.startsWith('text-') || request.body.model.startsWith('code-')));
const textPrompt = isTextCompletion ? convertChatMLPrompt(request.body.messages) : ''; const textPrompt = isTextCompletion ? convertChatMLPrompt(request.body.messages) : '';
const endpointUrl = isTextCompletion ? `${api_url}/completions` : `${api_url}/chat/completions`; const endpointUrl = isTextCompletion ? `${api_url}/completions` : `${api_url}/chat/completions`;