Merge remote-tracking branch 'upstream/staging' into staging

This commit is contained in:
DonMoralez
2023-12-21 17:30:37 +02:00
21 changed files with 459 additions and 141 deletions

View File

@ -5,8 +5,7 @@
width="64.000000pt" height="64.000000pt" viewBox="0 0 53.000000 60.000000"
preserveAspectRatio="xMidYMid meet">
<g transform="translate(0.000000,63.000000) scale(0.100000,-0.100000)"
fill="#000000" stroke="none">
<g transform="translate(0.000000,63.000000) scale(0.100000,-0.100000)" stroke="none">
<path d="M40 320 l0 -240 70 0 70 0 0 95 c0 95 0 95 25 95 23 0 25 -3 25 -50
l0 -50 70 0 70 0 0 50 c0 47 2 50 25 50 25 0 25 0 25 -95 l0 -95 70 0 70 0 0
240 0 240 -70 0 -70 0 0 -44 0 -45 -47 -3 -48 -3 -3 -47 c-3 -43 -5 -48 -28

Before

Width:  |  Height:  |  Size: 731 B

After

Width:  |  Height:  |  Size: 716 B

View File

@ -5,8 +5,7 @@
width="176.000000pt" height="176.000000pt" viewBox="0 0 176.000000 176.000000"
preserveAspectRatio="xMidYMid meet">
<g transform="translate(0.000000,176.000000) scale(0.100000,-0.100000)"
fill="#ffffff" stroke="none">
<g transform="translate(0.000000,176.000000) scale(0.100000,-0.100000)" stroke="none">
<path d="M197 1670 c-16 -19 -32 -58 -43 -107 -19 -87 -16 -222 11 -422 21
-162 19 -218 -10 -306 -49 -144 -43 -332 14 -443 54 -106 160 -180 297 -207
164 -33 202 -44 270 -77 59 -28 80 -33 144 -33 66 0 84 4 154 38 53 25 110 43

Before

Width:  |  Height:  |  Size: 1.4 KiB

After

Width:  |  Height:  |  Size: 1.3 KiB

View File

@ -437,14 +437,15 @@
Streaming</span>
</label>
<div class="toggle-description justifyLeft">
<span data-i18n="Display the response bit by bit as it is generated.">Display
the response bit by bit as it is generated.</span><br>
<span data-i18n="When this is off, responses will be displayed all at once when they are complete.">When
this is off, responses will be displayed all at once when they are
complete.</span>
<span data-i18n="Display the response bit by bit as it is generated.">
Display the response bit by bit as it is generated.
</span><br>
<span data-i18n="When this is off, responses will be displayed all at once when they are complete.">
When this is off, responses will be displayed all at once when they are complete.
</span>
</div>
</div>
<div class="range-block" data-source="openai,claude,windowai,openrouter,ai21,scale,makersuite,mistralai">
<div class="range-block" data-source="openai,claude,windowai,openrouter,ai21,scale,makersuite,mistralai,custom">
<div class="range-block-title" data-i18n="Temperature">
Temperature
</div>
@ -457,7 +458,7 @@
</div>
</div>
</div>
<div data-newbie-hidden class="range-block" data-source="openai,openrouter,ai21">
<div data-newbie-hidden class="range-block" data-source="openai,openrouter,ai21,custom">
<div class="range-block-title" data-i18n="Frequency Penalty">
Frequency Penalty
</div>
@ -470,7 +471,7 @@
</div>
</div>
</div>
<div data-newbie-hidden class="range-block" data-source="openai,openrouter,ai21">
<div data-newbie-hidden class="range-block" data-source="openai,openrouter,ai21,custom">
<div class="range-block-title" data-i18n="Presence Penalty">
Presence Penalty
</div>
@ -509,7 +510,7 @@
</div>
</div>
</div>
<div data-newbie-hidden class="range-block" data-source="openai,claude,openrouter,ai21,scale,makersuite,mistralai">
<div data-newbie-hidden class="range-block" data-source="openai,claude,openrouter,ai21,scale,makersuite,mistralai,custom">
<div class="range-block-title" data-i18n="Top-p">
Top P
</div>
@ -577,7 +578,7 @@
</div>
<div class="toggle-description justifyLeft" data-i18n="Wraps activated World Info entries before inserting into the prompt.">
Wraps activated World Info entries before inserting into the prompt. Use
<tt>{0}</tt> to mark a place where the content is inserted.
<code>{0}</code> to mark a place where the content is inserted.
</div>
<div class="wide100p">
<textarea id="wi_format_textarea" class="text_pole textarea_compact autoSetHeight" rows="3" placeholder="&mdash;"></textarea>
@ -591,7 +592,7 @@
</div>
</div>
<div class="toggle-description justifyLeft" data-i18n="Use scenario to mark a place where the content is inserted.">
Use <tt>{{scenario}}</tt> to mark a place where the content is inserted.
Use <code>{{scenario}}</code> to mark a place where the content is inserted.
</div>
<div class="wide100p">
<textarea id="scenario_format_textarea" class="text_pole textarea_compact autoSetHeight" rows="3" placeholder="&mdash;"></textarea>
@ -605,7 +606,7 @@
</div>
</div>
<div class="toggle-description justifyLeft" data-i18n="Use personality to mark a place where the content is inserted.">
Use <tt>{{personality}}</tt> to mark a place where the content is inserted.
Use <code>{{personality}}</code> to mark a place where the content is inserted.
</div>
<div class="wide100p">
<textarea id="personality_format_textarea" class="text_pole textarea_compact autoSetHeight" rows="3" placeholder="&mdash;"></textarea>
@ -729,6 +730,9 @@
</div>
<div class="wide100p">
<input id="openai_reverse_proxy" type="text" class="text_pole" placeholder="https://api.openai.com/v1" maxlength="500" />
<small class="reverse_proxy_warning">
Doesn't work? Try adding <code>/v1</code> at the end!
</small>
</div>
</div>
<div class="range-block" data-source="openai,claude">
@ -745,7 +749,7 @@
<div id="openai_proxy_password_show" title="Peek a password" class="menu_button fa-solid fa-eye-slash fa-fw"></div>
</div>
</div>
<div data-newbie-hidden class="range-block" data-source="openai,openrouter,mistralai">
<div data-newbie-hidden class="range-block" data-source="openai,openrouter,mistralai,custom">
<div class="range-block-title justifyLeft" data-i18n="Seed">
Seed
</div>
@ -1508,7 +1512,7 @@
</span>
</div>
</div>
<div class="range-block" data-source="openai,openrouter,makersuite">
<div class="range-block" data-source="openai,openrouter,makersuite,custom">
<label for="openai_image_inlining" class="checkbox_label flexWrap widthFreeExpand">
<input id="openai_image_inlining" type="checkbox" />
<span data-i18n="Send inline images">Send inline images</span>
@ -1817,7 +1821,7 @@
oobabooga/text-generation-webui
</a>
<span data-i18n="Make sure you run it with">
Make sure you run it with <tt>--api</tt> flag
Make sure you run it with <code>--api</code> flag
</span>
</div>
<div class="flex1">
@ -1945,6 +1949,7 @@
<option value="ai21">AI21</option>
<option value="makersuite">Google MakerSuite</option>
<option value="mistralai">MistralAI</option>
<option value="custom">Custom (OpenAI-compatible)</option>
</select>
<form id="openai_form" data-source="openai" action="javascript:void(null);" method="post" enctype="multipart/form-data">
<h4><span data-i18n="OpenAI API key">OpenAI API key</span></h4>
@ -2249,9 +2254,40 @@
</select>
</div>
</form>
<form id="custom_form" data-source="custom">
<h4 data-i18n="Custom Endpoint (Base URL)">Custom Endpoint (Base URL)</h4>
<div class="flex-container">
<input id="custom_api_url_text" class="text_pole wide100p" maxlength="500" value="" autocomplete="off" placeholder="Example: http://localhost:1234/v1">
</div>
<div>
<small>
Doesn't work? Try adding <code>/v1</code> at the end of the URL!
</small>
</div>
<h4>
<span data-i18n="Custom API Key">Custom API Key</span>
<small>(Optional)</small>
</h4>
<div class="flex-container">
<input id="api_key_custom" name="api_key_custom" class="text_pole flex1" maxlength="500" value="" type="text" autocomplete="off">
<div title="Clear your API key" data-i18n="[title]Clear your API key" class="menu_button fa-solid fa-circle-xmark clear-api-key" data-key="api_key_custom"></div>
</div>
<div data-for="api_key_custom" class="neutral_warning">
For privacy reasons, your API key will be hidden after you reload the page.
</div>
<h4>Enter a Model ID</h4>
<div class="flex-container">
<input id="custom_model_id" class="text_pole wide100p" maxlength="500" value="" autocomplete="off" placeholder="Example: gpt-3.5-turbo">
</div>
<h4 data-i18n="Available Models">Available Models</h4>
<div class="flex-container">
<select id="model_custom_select" class="text_pole"></select>
</div>
</form>
<div class="flex-container flex">
<div id="api_button_openai" class="api_button menu_button menu_button_icon" type="submit" data-i18n="Connect">Connect</div>
<div class="api_loading menu_button" data-i18n="Cancel">Cancel</div>
<div data-source="custom" id="customize_additional_parameters" class="menu_button menu_button_icon">Additional Parameters</div>
<div data-source="openrouter" id="openrouter_authorize" class="menu_button menu_button_icon" title="Get your OpenRouter API token using OAuth flow. You will be redirected to openrouter.ai" data-i18n="[title]Get your OpenRouter API token using OAuth flow. You will be redirected to openrouter.ai">Authorize</div>
<div id="test_api_button" class="menu_button menu_button_icon" title="Verifies your API connection by sending a short test message. Be aware that you'll be credited for it!" data-i18n="[title]Verifies your API connection by sending a short test message. Be aware that you'll be credited for it!">Test Message</div>
</div>
@ -2368,11 +2404,15 @@
<span class="fa-solid fa-circle-question note-link-span"></span>
</a>
</h4>
<div>
<label for="instruct_enabled" class="checkbox_label">
<div class="flex-container">
<label for="instruct_enabled" class="checkbox_label flex1">
<input id="instruct_enabled" type="checkbox" />
<span data-i18n="Enabled">Enabled</span>
</label>
<label for="instruct_bind_to_context" class="checkbox_label flex1" title="If enabled, Context templates will be automatically selected based on selected Instruct template name or by preference.">
<input id="instruct_bind_to_context" type="checkbox" />
<span data-i18n="Bind to Context">Bind to Context</span>
</label>
</div>
<label for="instruct_presets">
<span data-i18n="Presets">Presets</span>

View File

@ -184,7 +184,7 @@ import {
} from './scripts/instruct-mode.js';
import { applyLocale, initLocales } from './scripts/i18n.js';
import { getFriendlyTokenizerName, getTokenCount, getTokenizerModel, initTokenizers, saveTokenCache } from './scripts/tokenizers.js';
import { createPersona, initPersonas, selectCurrentPersona, setPersonaDescription } from './scripts/personas.js';
import { createPersona, initPersonas, selectCurrentPersona, setPersonaDescription, updatePersonaNameIfExists } from './scripts/personas.js';
import { getBackgrounds, initBackgrounds, loadBackgroundSettings, background_settings } from './scripts/backgrounds.js';
import { hideLoader, showLoader } from './scripts/loader.js';
import { BulkEditOverlay, CharacterContextMenu } from './scripts/BulkEditOverlay.js';
@ -3252,6 +3252,7 @@ async function Generate(type, { automatic_trigger, force_name2, quiet_prompt, qu
if (skipWIAN !== true) {
console.log('skipWIAN not active, adding WIAN');
// Add all depth WI entries to prompt
flushWIDepthInjections();
if (Array.isArray(worldInfoDepth)) {
worldInfoDepth.forEach((e) => {
const joinedEntries = e.entries.join('\n');
@ -3938,7 +3939,6 @@ async function Generate(type, { automatic_trigger, force_name2, quiet_prompt, qu
({ type, getMessage } = await saveReply('appendFinal', getMessage, false, title, swipes));
}
}
activateSendButtons();
if (type !== 'quiet') {
playMessageSound();
@ -3995,23 +3995,16 @@ async function Generate(type, { automatic_trigger, force_name2, quiet_prompt, qu
}
} else {
generatedPromptCache = '';
activateSendButtons();
//console.log('runGenerate calling showSwipeBtns');
showSwipeButtons();
if (data?.response) {
toastr.error(data.response, 'API Error');
}
reject(data.response);
}
console.debug('/api/chats/save called by /Generate');
console.debug('/api/chats/save called by /Generate');
await saveChatConditional();
is_send_press = false;
hideStopButton();
activateSendButtons();
showSwipeButtons();
setGenerationProgress(0);
unblockGeneration();
streamingProcessor = null;
if (type !== 'quiet') {
@ -4040,14 +4033,17 @@ async function Generate(type, { automatic_trigger, force_name2, quiet_prompt, qu
is_send_press = false;
}
//console.log('generate ending');
} //generate ends
function flushWIDepthInjections() {
//prevent custom depth WI entries (which have unique random key names) from duplicating
for (let key in extension_prompts) {
if (key.includes('customDepthWI')) {
for (const key of Object.keys(extension_prompts)) {
if (key.startsWith('customDepthWI')) {
delete extension_prompts[key];
}
}
//console.log('generate ending');
} //generate ends
}
function unblockGeneration() {
is_send_press = false;
@ -4055,6 +4051,7 @@ function unblockGeneration() {
showSwipeButtons();
setGenerationProgress(0);
flushEphemeralStoppingStrings();
flushWIDepthInjections();
$('#send_textarea').removeAttr('disabled');
}
@ -5457,6 +5454,7 @@ function changeMainAPI() {
case chat_completion_sources.AI21:
case chat_completion_sources.MAKERSUITE:
case chat_completion_sources.MISTRALAI:
case chat_completion_sources.CUSTOM:
default:
setupChatCompletionPromptManager(oai_settings);
break;
@ -7576,43 +7574,48 @@ const CONNECT_API_MAP = {
},
'oai': {
selected: 'openai',
source: 'openai',
button: '#api_button_openai',
source: chat_completion_sources.OPENAI,
},
'claude': {
selected: 'openai',
source: 'claude',
button: '#api_button_openai',
source: chat_completion_sources.CLAUDE,
},
'windowai': {
selected: 'openai',
source: 'windowai',
button: '#api_button_openai',
source: chat_completion_sources.WINDOWAI,
},
'openrouter': {
selected: 'openai',
source: 'openrouter',
button: '#api_button_openai',
source: chat_completion_sources.OPENROUTER,
},
'scale': {
selected: 'openai',
source: 'scale',
button: '#api_button_openai',
source: chat_completion_sources.SCALE,
},
'ai21': {
selected: 'openai',
source: 'ai21',
button: '#api_button_openai',
source: chat_completion_sources.AI21,
},
'makersuite': {
selected: 'openai',
source: 'makersuite',
button: '#api_button_openai',
source: chat_completion_sources.MAKERSUITE,
},
'mistralai': {
selected: 'openai',
source: 'mistralai',
button: '#api_button_openai',
source: chat_completion_sources.MISTRALAI,
},
'custom': {
selected: 'openai',
button: '#api_button_openai',
source: chat_completion_sources.CUSTOM,
},
};
@ -9153,8 +9156,10 @@ jQuery(async function () {
await messageEditDone($(this));
});
$('#your_name_button').click(function () {
setUserName($('#your_name').val());
$('#your_name_button').click(async function () {
const userName = String($('#your_name').val()).trim();
setUserName(userName);
await updatePersonaNameIfExists(user_avatar, userName);
});
$('#sync_name_button').on('click', async function () {

View File

@ -399,6 +399,7 @@ function RA_autoconnect(PrevApi) {
|| (secret_state[SECRET_KEYS.AI21] && oai_settings.chat_completion_source == chat_completion_sources.AI21)
|| (secret_state[SECRET_KEYS.MAKERSUITE] && oai_settings.chat_completion_source == chat_completion_sources.MAKERSUITE)
|| (secret_state[SECRET_KEYS.MISTRALAI] && oai_settings.chat_completion_source == chat_completion_sources.MISTRALAI)
|| (isValidUrl(oai_settings.custom_url) && oai_settings.chat_completion_source == chat_completion_sources.CUSTOM)
) {
$('#api_button_openai').trigger('click');
}

View File

@ -217,7 +217,16 @@ async function captionHorde(base64Img) {
* @returns {Promise<{caption: string}>} Generated caption
*/
async function captionMultimodal(base64Img) {
const prompt = extension_settings.caption.prompt || PROMPT_DEFAULT;
let prompt = extension_settings.caption.prompt || PROMPT_DEFAULT;
if (extension_settings.caption.prompt_ask) {
const customPrompt = await callPopup('<h3>Enter a comment or question:</h3>', 'input', prompt, { rows: 2 });
if (!customPrompt) {
throw new Error('User aborted the caption sending.');
}
prompt = String(customPrompt).trim();
}
const caption = await getMultimodalCaption(base64Img, prompt);
return { caption };
}
@ -277,6 +286,7 @@ jQuery(function () {
(extension_settings.caption.source === 'multimodal' && extension_settings.caption.multimodal_api === 'google' && secret_state[SECRET_KEYS.MAKERSUITE]) ||
(extension_settings.caption.source === 'multimodal' && extension_settings.caption.multimodal_api === 'ollama' && textgenerationwebui_settings.server_urls[textgen_types.OLLAMA]) ||
(extension_settings.caption.source === 'multimodal' && extension_settings.caption.multimodal_api === 'llamacpp' && textgenerationwebui_settings.server_urls[textgen_types.LLAMACPP]) ||
(extension_settings.caption.source === 'multimodal' && extension_settings.caption.multimodal_api === 'custom') ||
extension_settings.caption.source === 'local' ||
extension_settings.caption.source === 'horde';
@ -345,6 +355,7 @@ jQuery(function () {
<option value="openai">OpenAI</option>
<option value="openrouter">OpenRouter</option>
<option value="google">Google MakerSuite</option>
<option value="custom">Custom (OpenAI-compatible)</option>
</select>
</div>
<div class="flex1 flex-container flexFlowColumn flexNoGap">
@ -358,6 +369,7 @@ jQuery(function () {
<option data-type="ollama" value="bakllava:latest">bakllava:latest</option>
<option data-type="ollama" value="llava:latest">llava:latest</option>
<option data-type="llamacpp" value="llamacpp_current">[Currently loaded]</option>
<option data-type="custom" value="custom_current">[Currently selected]</option>
</select>
</div>
<label data-type="openai" class="checkbox_label flexBasis100p" for="caption_allow_reverse_proxy" title="Allow using reverse proxy if defined and valid.">
@ -371,6 +383,10 @@ jQuery(function () {
<div id="caption_prompt_block">
<label for="caption_prompt">Caption Prompt</label>
<textarea id="caption_prompt" class="text_pole" rows="1" placeholder="&lt; Use default &gt;">${PROMPT_DEFAULT}</textarea>
<label class="checkbox_label margin-bot-10px" for="caption_prompt_ask" title="Ask for a custom prompt every time an image is captioned.">
<input id="caption_prompt_ask" type="checkbox" class="checkbox">
Ask every time
</label>
</div>
<label for="caption_template">Message Template <small>(use <code>{{caption}}</code> macro)</small></label>
<textarea id="caption_template" class="text_pole" rows="2" placeholder="&lt; Use default &gt;">${TEMPLATE_DEFAULT}</textarea>
@ -394,6 +410,7 @@ jQuery(function () {
$('#caption_refine_mode').prop('checked', !!(extension_settings.caption.refine_mode));
$('#caption_allow_reverse_proxy').prop('checked', !!(extension_settings.caption.allow_reverse_proxy));
$('#caption_prompt_ask').prop('checked', !!(extension_settings.caption.prompt_ask));
$('#caption_source').val(extension_settings.caption.source);
$('#caption_prompt').val(extension_settings.caption.prompt);
$('#caption_template').val(extension_settings.caption.template);
@ -415,4 +432,8 @@ jQuery(function () {
extension_settings.caption.allow_reverse_proxy = $('#caption_allow_reverse_proxy').prop('checked');
saveSettingsDebounced();
});
$('#caption_prompt_ask').on('input', () => {
extension_settings.caption.prompt_ask = $('#caption_prompt_ask').prop('checked');
saveSettingsDebounced();
});
});

View File

@ -992,8 +992,7 @@ async function getExpressionsList() {
}
const result = await resolveExpressionsList();
result.push(...extension_settings.expressions.custom);
return result;
return [...result, ...extension_settings.expressions.custom];
}
async function setExpression(character, expression, force) {

View File

@ -24,6 +24,7 @@ export async function getMultimodalCaption(base64Img, prompt) {
const isGoogle = extension_settings.caption.multimodal_api === 'google';
const isOllama = extension_settings.caption.multimodal_api === 'ollama';
const isLlamaCpp = extension_settings.caption.multimodal_api === 'llamacpp';
const isCustom = extension_settings.caption.multimodal_api === 'custom';
const base64Bytes = base64Img.length * 0.75;
const compressionLimit = 2 * 1024 * 1024;
if (['google', 'openrouter'].includes(extension_settings.caption.multimodal_api) && base64Bytes > compressionLimit) {
@ -68,6 +69,14 @@ export async function getMultimodalCaption(base64Img, prompt) {
requestBody.server_url = textgenerationwebui_settings.server_urls[textgen_types.LLAMACPP];
}
if (isCustom) {
requestBody.server_url = oai_settings.custom_url;
requestBody.model = oai_settings.custom_model || 'gpt-4-vision-preview';
requestBody.custom_include_headers = oai_settings.custom_include_headers;
requestBody.custom_include_body = oai_settings.custom_include_body;
requestBody.custom_exclude_body = oai_settings.custom_exclude_body;
}
function getEndpointUrl() {
switch (extension_settings.caption.multimodal_api) {
case 'google':
@ -119,4 +128,8 @@ function throwIfInvalidModel() {
if (extension_settings.caption.multimodal_api === 'llamacpp' && !textgenerationwebui_settings.server_urls[textgen_types.LLAMACPP]) {
throw new Error('LlamaCPP server URL is not set.');
}
if (extension_settings.caption.multimodal_api === 'custom' && !oai_settings.custom_url) {
throw new Error('Custom API URL is not set.');
}
}

View File

@ -29,6 +29,7 @@ const controls = [
{ id: 'instruct_first_output_sequence', property: 'first_output_sequence', isCheckbox: false },
{ id: 'instruct_last_output_sequence', property: 'last_output_sequence', isCheckbox: false },
{ id: 'instruct_activation_regex', property: 'activation_regex', isCheckbox: false },
{ id: 'instruct_bind_to_context', property: 'bind_to_context', isCheckbox: true },
];
/**
@ -409,6 +410,10 @@ jQuery(() => {
});
$('#instruct_enabled').on('change', function () {
if (!power_user.instruct.bind_to_context) {
return;
}
// When instruct mode gets enabled, select context template matching selected instruct preset
if (power_user.instruct.enabled) {
selectMatchingContextTemplate(power_user.instruct.preset);
@ -440,8 +445,10 @@ jQuery(() => {
}
});
if (power_user.instruct.bind_to_context) {
// Select matching context template
selectMatchingContextTemplate(name);
}
highlightDefaultPreset();
});

View File

@ -166,6 +166,7 @@ export const chat_completion_sources = {
AI21: 'ai21',
MAKERSUITE: 'makersuite',
MISTRALAI: 'mistralai',
CUSTOM: 'custom',
};
const prefixMap = selected_group ? {
@ -211,6 +212,11 @@ const default_settings = {
google_model: 'gemini-pro',
ai21_model: 'j2-ultra',
mistralai_model: 'mistral-medium',
custom_model: '',
custom_url: '',
custom_include_body: '',
custom_exclude_body: '',
custom_include_headers: '',
windowai_model: '',
openrouter_model: openrouter_website_model,
openrouter_use_fallback: false,
@ -269,6 +275,11 @@ const oai_settings = {
google_model: 'gemini-pro',
ai21_model: 'j2-ultra',
mistralai_model: 'mistral-medium',
custom_model: '',
custom_url: '',
custom_include_body: '',
custom_exclude_body: '',
custom_include_headers: '',
windowai_model: '',
openrouter_model: openrouter_website_model,
openrouter_use_fallback: false,
@ -1271,6 +1282,8 @@ function getChatCompletionModel() {
return oai_settings.ai21_model;
case chat_completion_sources.MISTRALAI:
return oai_settings.mistralai_model;
case chat_completion_sources.CUSTOM:
return oai_settings.custom_model;
default:
throw new Error(`Unknown chat completion source: ${oai_settings.chat_completion_source}`);
}
@ -1331,6 +1344,23 @@ function saveModelList(data) {
$('#model_openai_select').val(model).trigger('change');
}
}
if (oai_settings.chat_completion_source == chat_completion_sources.CUSTOM) {
$('#model_custom_select').empty();
$('#model_custom_select').append('<option value="">None</option>');
model_list.forEach((model) => {
$('#model_custom_select').append(
$('<option>', {
value: model.id,
text: model.id,
selected: model.id == oai_settings.custom_model,
}));
});
if (!oai_settings.custom_model && model_list.length > 0) {
$('#model_custom_select').val(model_list[0].id).trigger('change');
}
}
}
function appendOpenRouterOptions(model_list, groupModels = false, sort = false) {
@ -1459,6 +1489,7 @@ async function sendOpenAIRequest(type, messages, signal) {
const isGoogle = oai_settings.chat_completion_source == chat_completion_sources.MAKERSUITE;
const isOAI = oai_settings.chat_completion_source == chat_completion_sources.OPENAI;
const isMistral = oai_settings.chat_completion_source == chat_completion_sources.MISTRALAI;
const isCustom = oai_settings.chat_completion_source == chat_completion_sources.CUSTOM;
const isTextCompletion = (isOAI && textCompletionModels.includes(oai_settings.openai_model)) || (isOpenRouter && oai_settings.openrouter_force_instruct && power_user.instruct.enabled);
const isQuiet = type === 'quiet';
const isImpersonate = type === 'impersonate';
@ -1485,7 +1516,7 @@ async function sendOpenAIRequest(type, messages, signal) {
return sendWindowAIRequest(messages, signal, stream);
}
const logitBiasSources = [chat_completion_sources.OPENAI, chat_completion_sources.OPENROUTER, chat_completion_sources.SCALE];
const logitBiasSources = [chat_completion_sources.OPENAI, chat_completion_sources.OPENROUTER, chat_completion_sources.SCALE, chat_completion_sources.CUSTOM];
if (oai_settings.bias_preset_selected
&& logitBiasSources.includes(oai_settings.chat_completion_source)
&& Array.isArray(oai_settings.bias_presets[oai_settings.bias_preset_selected])
@ -1572,7 +1603,14 @@ async function sendOpenAIRequest(type, messages, signal) {
generate_data['safe_mode'] = false; // already defaults to false, but just incase they change that in the future.
}
if ((isOAI || isOpenRouter || isMistral) && oai_settings.seed >= 0) {
if (isCustom) {
generate_data['custom_url'] = oai_settings.custom_url;
generate_data['custom_include_body'] = oai_settings.custom_include_body;
generate_data['custom_exclude_body'] = oai_settings.custom_exclude_body;
generate_data['custom_include_headers'] = oai_settings.custom_include_headers;
}
if ((isOAI || isOpenRouter || isMistral || isCustom) && oai_settings.seed >= 0) {
generate_data['seed'] = oai_settings.seed;
}
@ -2318,6 +2356,11 @@ function loadOpenAISettings(data, settings) {
oai_settings.openrouter_force_instruct = settings.openrouter_force_instruct ?? default_settings.openrouter_force_instruct;
oai_settings.ai21_model = settings.ai21_model ?? default_settings.ai21_model;
oai_settings.mistralai_model = settings.mistralai_model ?? default_settings.mistralai_model;
oai_settings.custom_model = settings.custom_model ?? default_settings.custom_model;
oai_settings.custom_url = settings.custom_url ?? default_settings.custom_url;
oai_settings.custom_include_body = settings.custom_include_body ?? default_settings.custom_include_body;
oai_settings.custom_exclude_body = settings.custom_exclude_body ?? default_settings.custom_exclude_body;
oai_settings.custom_include_headers = settings.custom_include_headers ?? default_settings.custom_include_headers;
oai_settings.google_model = settings.google_model ?? default_settings.google_model;
oai_settings.chat_completion_source = settings.chat_completion_source ?? default_settings.chat_completion_source;
oai_settings.api_url_scale = settings.api_url_scale ?? default_settings.api_url_scale;
@ -2365,6 +2408,8 @@ function loadOpenAISettings(data, settings) {
$(`#model_ai21_select option[value="${oai_settings.ai21_model}"`).attr('selected', true);
$('#model_mistralai_select').val(oai_settings.mistralai_model);
$(`#model_mistralai_select option[value="${oai_settings.mistralai_model}"`).attr('selected', true);
$('#custom_model_id').val(oai_settings.custom_model);
$('#custom_api_url_text').val(oai_settings.custom_url);
$('#openai_max_context').val(oai_settings.openai_max_context);
$('#openai_max_context_counter').val(`${oai_settings.openai_max_context}`);
$('#model_openrouter_select').val(oai_settings.openrouter_model);
@ -2477,7 +2522,13 @@ async function getStatusOpen() {
validateReverseProxy();
}
const canBypass = oai_settings.chat_completion_source === chat_completion_sources.OPENAI && oai_settings.bypass_status_check;
if (oai_settings.chat_completion_source === chat_completion_sources.CUSTOM) {
$('#model_custom_select').empty();
data.custom_url = oai_settings.custom_url;
data.custom_include_headers = oai_settings.custom_include_headers;
}
const canBypass = (oai_settings.chat_completion_source === chat_completion_sources.OPENAI && oai_settings.bypass_status_check) || oai_settings.chat_completion_source === chat_completion_sources.CUSTOM;
if (canBypass) {
setOnlineStatus('Status check bypassed');
}
@ -2544,6 +2595,7 @@ async function saveOpenAIPreset(name, settings, triggerUi = true) {
openrouter_sort_models: settings.openrouter_sort_models,
ai21_model: settings.ai21_model,
mistralai_model: settings.mistralai_model,
custom_model: settings.custom_model,
google_model: settings.google_model,
temperature: settings.temp_openai,
frequency_penalty: settings.freq_pen_openai,
@ -2918,6 +2970,11 @@ function onSettingsPresetChange() {
openrouter_sort_models: ['#openrouter_sort_models', 'openrouter_sort_models', false],
ai21_model: ['#model_ai21_select', 'ai21_model', false],
mistralai_model: ['#model_mistralai_select', 'mistralai_model', false],
custom_model: ['#custom_model_id', 'custom_model', false],
custom_url: ['#custom_api_url_text', 'custom_url', false],
custom_include_body: ['#custom_include_body', 'custom_include_body', false],
custom_exclude_body: ['#custom_exclude_body', 'custom_exclude_body', false],
custom_include_headers: ['#custom_include_headers', 'custom_include_headers', false],
google_model: ['#model_google_select', 'google_model', false],
openai_max_context: ['#openai_max_context', 'openai_max_context', false],
openai_max_tokens: ['#openai_max_tokens', 'openai_max_tokens', false],
@ -3109,6 +3166,12 @@ async function onModelChange() {
oai_settings.mistralai_model = value;
}
if (value && $(this).is('#model_custom_select')) {
console.log('Custom model changed to', value);
oai_settings.custom_model = value;
$('#custom_model_id').val(value).trigger('input');
}
if (oai_settings.chat_completion_source == chat_completion_sources.SCALE) {
if (oai_settings.max_context_unlocked) {
$('#openai_max_context').attr('max', unlocked_max);
@ -3256,6 +3319,12 @@ async function onModelChange() {
$('#top_k_openai').attr('max', 200).val(oai_settings.top_k_openai).trigger('input');
}
if (oai_settings.chat_completion_source == chat_completion_sources.CUSTOM) {
$('#openai_max_context').attr('max', unlocked_max);
oai_settings.openai_max_context = Math.min(Number($('#openai_max_context').attr('max')), oai_settings.openai_max_context);
$('#openai_max_context').val(oai_settings.openai_max_context).trigger('input');
}
$('#openai_max_context_counter').attr('max', Number($('#openai_max_context').attr('max')));
saveSettingsDebounced();
@ -3270,7 +3339,7 @@ async function onNewPresetClick() {
const popupText = `
<h3>Preset name:</h3>
<h4>Hint: Use a character/group name to bind preset to a specific chat.</h4>`;
const name = await callPopup(popupText, 'input');
const name = await callPopup(popupText, 'input', oai_settings.preset_settings_openai);
if (!name) {
return;
@ -3398,6 +3467,19 @@ async function onConnectButtonClick(e) {
}
}
if (oai_settings.chat_completion_source == chat_completion_sources.CUSTOM) {
const api_key_custom = String($('#api_key_custom').val()).trim();
if (api_key_custom.length) {
await writeSecret(SECRET_KEYS.CUSTOM, api_key_custom);
}
if (!oai_settings.custom_url) {
console.log('No API URL saved for Custom');
return;
}
}
startStatusLoading();
saveSettingsDebounced();
await getStatusOpen();
@ -3433,6 +3515,9 @@ function toggleChatCompletionForms() {
else if (oai_settings.chat_completion_source == chat_completion_sources.MISTRALAI) {
$('#model_mistralai_select').trigger('change');
}
else if (oai_settings.chat_completion_source == chat_completion_sources.CUSTOM) {
$('#model_custom_select').trigger('change');
}
$('[data-source]').each(function () {
const validSources = $(this).data('source').split(',');
$(this).toggle(validSources.includes(oai_settings.chat_completion_source));
@ -3484,6 +3569,42 @@ function updateScaleForm() {
}
}
function onCustomizeParametersClick() {
const template = $(`
<div class="flex-container flexFlowColumn height100p">
<h3>Additional Parameters</h3>
<div class="flex1 flex-container flexFlowColumn">
<h4>Include Body Parameters</h4>
<textarea id="custom_include_body" class="flex1" placeholder="Parameters to be included in the Chat Completion request body (YAML object)&#10;&#10;Example:&#10;- top_k: 20&#10;- repetition_penalty: 1.1"></textarea>
</div>
<div class="flex1 flex-container flexFlowColumn">
<h4>Exclude Body Parameters</h4>
<textarea id="custom_exclude_body" class="flex1" placeholder="Parameters to be excluded from the Chat Completion request body (YAML array)&#10;&#10;Example:&#10;- frequency_penalty&#10;- presence_penalty"></textarea>
</div>
<div class="flex1 flex-container flexFlowColumn">
<h4>Include Request Headers</h4>
<textarea id="custom_include_headers" class="flex1" placeholder="Additional headers for Chat Completion requests (YAML object)&#10;&#10;Example:&#10;- CustomHeader: custom-value&#10;- AnotherHeader: custom-value"></textarea>
</div>
</div>`);
template.find('#custom_include_body').val(oai_settings.custom_include_body).on('input', function() {
oai_settings.custom_include_body = String($(this).val());
saveSettingsDebounced();
});
template.find('#custom_exclude_body').val(oai_settings.custom_exclude_body).on('input', function() {
oai_settings.custom_exclude_body = String($(this).val());
saveSettingsDebounced();
});
template.find('#custom_include_headers').val(oai_settings.custom_include_headers).on('input', function() {
oai_settings.custom_include_headers = String($(this).val());
saveSettingsDebounced();
});
callPopup(template, 'text', '', { wide: true, large: true });
}
/**
* Check if the model supports image inlining
* @returns {boolean} True if the model supports image inlining
@ -3495,7 +3616,7 @@ export function isImageInliningSupported() {
const gpt4v = 'gpt-4-vision';
const geminiProV = 'gemini-pro-vision';
const llava13b = 'llava-13b';
const llava = 'llava';
if (!oai_settings.image_inlining) {
return false;
@ -3507,7 +3628,9 @@ export function isImageInliningSupported() {
case chat_completion_sources.MAKERSUITE:
return oai_settings.google_model.includes(geminiProV);
case chat_completion_sources.OPENROUTER:
return oai_settings.openrouter_model.includes(gpt4v) || oai_settings.openrouter_model.includes(llava13b);
return !oai_settings.openrouter_force_instruct && (oai_settings.openrouter_model.includes(gpt4v) || oai_settings.openrouter_model.includes(llava));
case chat_completion_sources.CUSTOM:
return oai_settings.custom_model.includes(gpt4v) || oai_settings.custom_model.includes(llava) || oai_settings.custom_model.includes(geminiProV);
default:
return false;
}
@ -3813,6 +3936,16 @@ $(document).ready(async function () {
saveSettingsDebounced();
});
$('#custom_api_url_text').on('input', function () {
oai_settings.custom_url = String($(this).val());
saveSettingsDebounced();
});
$('#custom_model_id').on('input', function () {
oai_settings.custom_model = String($(this).val());
saveSettingsDebounced();
});
$(document).on('input', '#openai_settings .autoSetHeight', function () {
resetScrollHeight($(this));
});
@ -3829,6 +3962,7 @@ $(document).ready(async function () {
$('#openrouter_sort_models').on('change', onOpenrouterModelSortChange);
$('#model_ai21_select').on('change', onModelChange);
$('#model_mistralai_select').on('change', onModelChange);
$('#model_custom_select').on('change', onModelChange);
$('#settings_preset_openai').on('change', onSettingsPresetChange);
$('#new_oai_preset').on('click', onNewPresetClick);
$('#delete_oai_preset').on('click', onDeletePresetClick);
@ -3843,4 +3977,5 @@ $(document).ready(async function () {
$('#openai_logit_bias_delete_preset').on('click', onLogitBiasPresetDeleteClick);
$('#import_oai_preset').on('click', onImportPresetClick);
$('#openai_proxy_password_show').on('click', onProxyPasswordShowClick);
$('#customize_additional_parameters').on('click', onCustomizeParametersClick);
});

View File

@ -193,6 +193,22 @@ export function autoSelectPersona(name) {
}
}
/**
* Updates the name of a persona if it exists.
* @param {string} avatarId User avatar id
* @param {string} newName New name for the persona
*/
export async function updatePersonaNameIfExists(avatarId, newName) {
if (avatarId in power_user.personas) {
power_user.personas[avatarId] = newName;
await getUserAvatars();
saveSettingsDebounced();
console.log(`Updated persona name for ${avatarId} to ${newName}`);
} else {
console.log(`Persona name ${avatarId} was not updated because it does not exist`);
}
}
async function bindUserNameToPersona() {
const avatarId = $(this).closest('.avatar-container').find('.avatar').attr('imgfile');

View File

@ -205,6 +205,7 @@ let power_user = {
names: false,
names_force_groups: true,
activation_regex: '',
bind_to_context: false,
},
default_context: 'Default',
@ -1718,6 +1719,7 @@ function loadContextSettings() {
}
});
if (power_user.instruct.bind_to_context) {
// Select matching instruct preset
for (const instruct_preset of instruct_presets) {
// If instruct preset matches the context template
@ -1726,9 +1728,9 @@ function loadContextSettings() {
break;
}
}
}
highlightDefaultContext();
saveSettingsDebounced();
});

View File

@ -303,6 +303,7 @@ class PresetManager {
'model_novel',
'streaming_kobold',
'enabled',
'bind_to_context',
'seed',
'legacy_api',
'mancer_model',

View File

@ -16,6 +16,7 @@ export const SECRET_KEYS = {
SERPAPI: 'api_key_serpapi',
MISTRALAI: 'api_key_mistralai',
TOGETHERAI: 'api_key_togetherai',
CUSTOM: 'api_key_custom',
};
const INPUT_MAP = {
@ -32,6 +33,7 @@ const INPUT_MAP = {
[SECRET_KEYS.APHRODITE]: '#api_key_aphrodite',
[SECRET_KEYS.TABBY]: '#api_key_tabby',
[SECRET_KEYS.MISTRALAI]: '#api_key_mistralai',
[SECRET_KEYS.CUSTOM]: '#api_key_custom',
[SECRET_KEYS.TOGETHERAI]: '#api_key_togetherai',
};

View File

@ -388,6 +388,10 @@ export function getTokenizerModel() {
return mistralTokenizer;
}
if (oai_settings.chat_completion_source == chat_completion_sources.CUSTOM) {
return oai_settings.custom_model;
}
// Default to Turbo 3.5
return turboTokenizer;
}

View File

@ -3531,11 +3531,11 @@ a {
display: none;
}
.reverse_proxy_warning {
.reverse_proxy_warning:not(small) {
color: var(--warning);
background-color: var(--black70a);
text-shadow: none !important;
margin-top: 12px !important;
margin-top: 5px !important;
border-radius: 5px;
padding: 3px;
border: 1px solid var(--SmartThemeBorderColor);

View File

@ -160,6 +160,7 @@ const CHAT_COMPLETION_SOURCES = {
AI21: 'ai21',
MAKERSUITE: 'makersuite',
MISTRALAI: 'mistralai',
CUSTOM: 'custom',
};
const UPLOADS_PATH = './uploads';

View File

@ -4,7 +4,7 @@ const { Readable } = require('stream');
const { jsonParser } = require('../../express-common');
const { CHAT_COMPLETION_SOURCES, GEMINI_SAFETY, BISON_SAFETY } = require('../../constants');
const { forwardFetchResponse, getConfigValue, tryParse, uuidv4, color } = require('../../util');
const { forwardFetchResponse, getConfigValue, tryParse, uuidv4, mergeObjectWithYaml, excludeKeysByYaml } = require('../../util');
const { convertClaudePrompt, convertGooglePrompt, convertTextCompletionPrompt } = require('../prompt-converters');
const { readSecret, SECRET_KEYS } = require('../secrets');
@ -21,10 +21,9 @@ const API_CLAUDE = 'https://api.anthropic.com/v1';
async function sendClaudeRequest(request, response) {
const apiUrl = new URL(request.body.reverse_proxy || API_CLAUDE).toString();
const apiKey = request.body.reverse_proxy ? request.body.proxy_password : readSecret(SECRET_KEYS.CLAUDE);
const divider = '-'.repeat(process.stdout.columns);
if (!apiKey) {
console.log(color.red(`Claude API key is missing.\n${divider}`));
console.log('Claude API key is missing.');
return response.status(400).send({ error: true });
}
@ -35,44 +34,14 @@ async function sendClaudeRequest(request, response) {
controller.abort();
});
const isSysPromptSupported = request.body.model === 'claude-2' || request.body.model === 'claude-2.1';
const requestPrompt = convertClaudePrompt(request.body.messages, !request.body.exclude_assistant, request.body.assistant_prefill, isSysPromptSupported, request.body.claude_use_sysprompt, request.body.human_sysprompt_message);
let doSystemPrompt = request.body.model === 'claude-2' || request.body.model === 'claude-2.1';
let requestPrompt = convertClaudePrompt(request.body.messages, true, !request.body.exclude_assistant, doSystemPrompt);
console.log(color.green(`${divider}\nClaude request\n`) + color.cyan(`PROMPT\n${divider}\n${requestPrompt}\n${divider}`));
// Check Claude messages sequence and prefixes presence.
const sequence = requestPrompt.split('\n').filter(x => x.startsWith('Human:') || x.startsWith('Assistant:'));
const humanFound = sequence.some(line => line.startsWith('Human:'));
const assistantFound = sequence.some(line => line.startsWith('Assistant:'));
let humanErrorCount = 0;
let assistantErrorCount = 0;
for (let i = 0; i < sequence.length - 1; i++) {
if (sequence[i].startsWith(sequence[i + 1].split(':')[0])) {
if (sequence[i].startsWith('Human:')) {
humanErrorCount++;
} else if (sequence[i].startsWith('Assistant:')) {
assistantErrorCount++;
}
}
if (request.body.assistant_prefill && !request.body.exclude_assistant) {
requestPrompt += request.body.assistant_prefill;
}
if (!humanFound) {
console.log(color.red(`${divider}\nWarning: No 'Human:' prefix found in the prompt.\n${divider}`));
}
if (!assistantFound) {
console.log(color.red(`${divider}\nWarning: No 'Assistant: ' prefix found in the prompt.\n${divider}`));
}
if (!sequence[0].startsWith('Human:')) {
console.log(color.red(`${divider}\nWarning: The messages sequence should start with 'Human:' prefix.\nMake sure you have 'Human:' prefix at the very beggining of the prompt, or after the system prompt.\n${divider}`));
}
if (humanErrorCount > 0 || assistantErrorCount > 0) {
console.log(color.red(`${divider}\nWarning: Detected incorrect Prefix sequence(s).`));
console.log(color.red(`Incorrect "Human:" prefix(es): ${humanErrorCount}.\nIncorrect "Assistant: " prefix(es): ${assistantErrorCount}.`));
console.log(color.red('Check the prompt above and fix it in the sillytavern.'));
console.log(color.red('\nThe correct sequence should look like this:\nSystem prompt <-(for the sysprompt format only, else have 2 empty lines above the first human\'s message.)'));
console.log(color.red(` <-----(Each message beginning with the "Assistant:/Human:" prefix must have one empty line above.)\nHuman:\n\nAssistant:\n...\n\nHuman:\n\nAssistant:\n${divider}`));
}
console.log('Claude request:', requestPrompt);
const stop_sequences = ['\n\nHuman:', '\n\nSystem:', '\n\nAssistant:'];
// Add custom stop sequences
@ -106,20 +75,20 @@ async function sendClaudeRequest(request, response) {
forwardFetchResponse(generateResponse, response);
} else {
if (!generateResponse.ok) {
console.log(color.red(`Claude API returned error: ${generateResponse.status} ${generateResponse.statusText}\n${await generateResponse.text()}\n${divider}`));
console.log(`Claude API returned error: ${generateResponse.status} ${generateResponse.statusText} ${await generateResponse.text()}`);
return response.status(generateResponse.status).send({ error: true });
}
const generateResponseJson = await generateResponse.json();
const responseText = generateResponseJson.completion;
console.log(color.green(`Claude response\n${divider}\n${responseText}\n${divider}`));
console.log('Claude response:', responseText);
// Wrap it back to OAI format
const reply = { choices: [{ 'message': { 'content': responseText } }] };
return response.send(reply);
}
} catch (error) {
console.log(color.red(`Error communicating with Claude: ${error}\n${divider}`));
console.log('Error communicating with Claude: ', error);
if (!response.headersSent) {
return response.status(500).send({ error: true });
}
@ -469,13 +438,7 @@ async function sendMistralAIRequest(request, response) {
controller.abort();
});
const config = {
method: 'POST',
headers: {
'Content-Type': 'application/json',
'Authorization': 'Bearer ' + apiKey,
},
body: JSON.stringify({
const requestBody = {
'model': request.body.model,
'messages': messages,
'temperature': request.body.temperature,
@ -484,11 +447,21 @@ async function sendMistralAIRequest(request, response) {
'stream': request.body.stream,
'safe_mode': request.body.safe_mode,
'random_seed': request.body.seed === -1 ? undefined : request.body.seed,
}),
};
const config = {
method: 'POST',
headers: {
'Content-Type': 'application/json',
'Authorization': 'Bearer ' + apiKey,
},
body: JSON.stringify(requestBody),
signal: controller.signal,
timeout: 0,
};
console.log('MisralAI request:', requestBody);
const generateResponse = await fetch('https://api.mistral.ai/v1/chat/completions', config);
if (request.body.stream) {
forwardFetchResponse(generateResponse, response);
@ -500,6 +473,7 @@ async function sendMistralAIRequest(request, response) {
return response.status(generateResponse.status === 401 ? 500 : generateResponse.status).send({ error: true });
}
const generateResponseJson = await generateResponse.json();
console.log('MistralAI response:', generateResponseJson);
return response.send(generateResponseJson);
}
} catch (error) {
@ -533,12 +507,17 @@ router.post('/status', jsonParser, async function (request, response_getstatus_o
} else if (request.body.chat_completion_source === CHAT_COMPLETION_SOURCES.MISTRALAI) {
api_url = 'https://api.mistral.ai/v1';
api_key_openai = readSecret(SECRET_KEYS.MISTRALAI);
} else if (request.body.chat_completion_source === CHAT_COMPLETION_SOURCES.CUSTOM) {
api_url = request.body.custom_url;
api_key_openai = readSecret(SECRET_KEYS.CUSTOM);
headers = {};
mergeObjectWithYaml(headers, request.body.custom_include_headers);
} else {
console.log('This chat completion source is not supported yet.');
return response_getstatus_openai.status(400).send({ error: true });
}
if (!api_key_openai && !request.body.reverse_proxy) {
if (!api_key_openai && !request.body.reverse_proxy && request.body.chat_completion_source !== CHAT_COMPLETION_SOURCES.CUSTOM) {
console.log('OpenAI API key is missing.');
return response_getstatus_openai.status(400).send({ error: true });
}
@ -688,7 +667,7 @@ router.post('/generate', jsonParser, function (request, response) {
let headers;
let bodyParams;
if (request.body.chat_completion_source !== CHAT_COMPLETION_SOURCES.OPENROUTER) {
if (request.body.chat_completion_source === CHAT_COMPLETION_SOURCES.OPENAI) {
apiUrl = new URL(request.body.reverse_proxy || API_OPENAI).toString();
apiKey = request.body.reverse_proxy ? request.body.proxy_password : readSecret(SECRET_KEYS.OPENAI);
headers = {};
@ -697,7 +676,7 @@ router.post('/generate', jsonParser, function (request, response) {
if (getConfigValue('openai.randomizeUserId', false)) {
bodyParams['user'] = uuidv4();
}
} else {
} else if (request.body.chat_completion_source === CHAT_COMPLETION_SOURCES.OPENROUTER) {
apiUrl = 'https://openrouter.ai/api/v1';
apiKey = readSecret(SECRET_KEYS.OPENROUTER);
// OpenRouter needs to pass the referer: https://openrouter.ai/docs
@ -707,9 +686,19 @@ router.post('/generate', jsonParser, function (request, response) {
if (request.body.use_fallback) {
bodyParams['route'] = 'fallback';
}
} else if (request.body.chat_completion_source === CHAT_COMPLETION_SOURCES.CUSTOM) {
apiUrl = request.body.custom_url;
apiKey = readSecret(SECRET_KEYS.CUSTOM);
headers = {};
bodyParams = {};
mergeObjectWithYaml(bodyParams, request.body.custom_include_body);
mergeObjectWithYaml(headers, request.body.custom_include_headers);
} else {
console.log('This chat completion source is not supported yet.');
return response.status(400).send({ error: true });
}
if (!apiKey && !request.body.reverse_proxy) {
if (!apiKey && !request.body.reverse_proxy && request.body.chat_completion_source !== CHAT_COMPLETION_SOURCES.CUSTOM) {
console.log('OpenAI API key is missing.');
return response.status(400).send({ error: true });
}
@ -731,15 +720,7 @@ router.post('/generate', jsonParser, function (request, response) {
controller.abort();
});
/** @type {import('node-fetch').RequestInit} */
const config = {
method: 'post',
headers: {
'Content-Type': 'application/json',
'Authorization': 'Bearer ' + apiKey,
...headers,
},
body: JSON.stringify({
const requestBody = {
'messages': isTextCompletion === false ? request.body.messages : undefined,
'prompt': isTextCompletion === true ? textPrompt : undefined,
'model': request.body.model,
@ -754,12 +735,26 @@ router.post('/generate', jsonParser, function (request, response) {
'logit_bias': request.body.logit_bias,
'seed': request.body.seed,
...bodyParams,
}),
};
if (request.body.chat_completion_source === CHAT_COMPLETION_SOURCES.CUSTOM) {
excludeKeysByYaml(requestBody, request.body.custom_exclude_body);
}
/** @type {import('node-fetch').RequestInit} */
const config = {
method: 'post',
headers: {
'Content-Type': 'application/json',
'Authorization': 'Bearer ' + apiKey,
...headers,
},
body: JSON.stringify(requestBody),
signal: controller.signal,
timeout: 0,
};
console.log(JSON.parse(String(config.body)));
console.log(requestBody);
makeRequest(config, response, request);

View File

@ -4,13 +4,15 @@ const express = require('express');
const FormData = require('form-data');
const fs = require('fs');
const { jsonParser, urlencodedParser } = require('../express-common');
const { getConfigValue } = require('../util');
const { getConfigValue, mergeObjectWithYaml, excludeKeysByYaml } = require('../util');
const router = express.Router();
router.post('/caption-image', jsonParser, async (request, response) => {
try {
let key = '';
let headers = {};
let bodyParams = {};
if (request.body.api === 'openai' && !request.body.reverse_proxy) {
key = readSecret(SECRET_KEYS.OPENAI);
@ -24,7 +26,13 @@ router.post('/caption-image', jsonParser, async (request, response) => {
key = request.body.proxy_password;
}
if (!key && !request.body.reverse_proxy) {
if (request.body.api === 'custom') {
key = readSecret(SECRET_KEYS.CUSTOM);
mergeObjectWithYaml(bodyParams, request.body.custom_include_body);
mergeObjectWithYaml(headers, request.body.custom_include_headers);
}
if (!key && !request.body.reverse_proxy && request.body.api !== 'custom') {
console.log('No key found for API', request.body.api);
return response.sendStatus(400);
}
@ -41,6 +49,7 @@ router.post('/caption-image', jsonParser, async (request, response) => {
},
],
max_tokens: 500,
...bodyParams,
};
const captionSystemPrompt = getConfigValue('openai.captionSystemPrompt');
@ -51,10 +60,13 @@ router.post('/caption-image', jsonParser, async (request, response) => {
});
}
if (request.body.api === 'custom') {
excludeKeysByYaml(body, request.body.custom_exclude_body);
}
console.log('Multimodal captioning request', body);
let apiUrl = '';
let headers = {};
if (request.body.api === 'openrouter') {
apiUrl = 'https://openrouter.ai/api/v1/chat/completions';
@ -69,6 +81,10 @@ router.post('/caption-image', jsonParser, async (request, response) => {
apiUrl = `${request.body.reverse_proxy}/chat/completions`;
}
if (request.body.api === 'custom') {
apiUrl = `${request.body.server_url}/chat/completions`;
}
const result = await fetch(apiUrl, {
method: 'POST',
headers: {

View File

@ -27,6 +27,7 @@ const SECRET_KEYS = {
SERPAPI: 'api_key_serpapi',
TOGETHERAI: 'api_key_togetherai',
MISTRALAI: 'api_key_mistralai',
CUSTOM: 'api_key_custom',
};
/**

View File

@ -399,6 +399,65 @@ function forwardFetchResponse(from, to) {
});
}
/**
* Adds YAML-serialized object to the object.
* @param {object} obj Object
* @param {string} yamlString YAML-serialized object
* @returns
*/
function mergeObjectWithYaml(obj, yamlString) {
if (!yamlString) {
return;
}
try {
const parsedObject = yaml.parse(yamlString);
if (Array.isArray(parsedObject)) {
for (const item of parsedObject) {
if (typeof item === 'object' && item && !Array.isArray(item)) {
Object.assign(obj, item);
}
}
}
else if (parsedObject && typeof parsedObject === 'object') {
Object.assign(obj, parsedObject);
}
} catch {
// Do nothing
}
}
/**
* Removes keys from the object by YAML-serialized array.
* @param {object} obj Object
* @param {string} yamlString YAML-serialized array
* @returns {void} Nothing
*/
function excludeKeysByYaml(obj, yamlString) {
if (!yamlString) {
return;
}
try {
const parsedObject = yaml.parse(yamlString);
if (Array.isArray(parsedObject)) {
parsedObject.forEach(key => {
delete obj[key];
});
} else if (typeof parsedObject === 'object') {
Object.keys(parsedObject).forEach(key => {
delete obj[key];
});
} else if (typeof parsedObject === 'string') {
delete obj[parsedObject];
}
} catch {
// Do nothing
}
}
module.exports = {
getConfig,
getConfigValue,
@ -420,4 +479,6 @@ module.exports = {
getImages,
forwardFetchResponse,
getHexString,
mergeObjectWithYaml,
excludeKeysByYaml,
};