mirror of
https://github.com/SillyTavern/SillyTavern.git
synced 2025-06-05 21:59:27 +02:00
Merge remote-tracking branch 'upstream/staging' into staging
This commit is contained in:
@@ -396,6 +396,7 @@ function RA_autoconnect(PrevApi) {
|
||||
|| (secret_state[SECRET_KEYS.OPENROUTER] && oai_settings.chat_completion_source == chat_completion_sources.OPENROUTER)
|
||||
|| (secret_state[SECRET_KEYS.AI21] && oai_settings.chat_completion_source == chat_completion_sources.AI21)
|
||||
|| (secret_state[SECRET_KEYS.MAKERSUITE] && oai_settings.chat_completion_source == chat_completion_sources.MAKERSUITE)
|
||||
|| (secret_state[SECRET_KEYS.MISTRALAI] && oai_settings.chat_completion_source == chat_completion_sources.MISTRALAI)
|
||||
) {
|
||||
$('#api_button_openai').trigger('click');
|
||||
}
|
||||
@@ -1052,11 +1053,12 @@ export function initRossMods() {
|
||||
}
|
||||
|
||||
if (event.key == 'ArrowUp') { //edits last message if chatbar is empty and focused
|
||||
//console.log('got uparrow input');
|
||||
console.log('got uparrow input');
|
||||
if (
|
||||
$('#send_textarea').val() === '' &&
|
||||
chatbarInFocus === true &&
|
||||
$('.swipe_right:last').css('display') === 'flex' &&
|
||||
//$('.swipe_right:last').css('display') === 'flex' &&
|
||||
$('.last_mes .mes_buttons').is(':visible') &&
|
||||
$('#character_popup').css('display') === 'none' &&
|
||||
$('#shadow_select_chat_popup').css('display') === 'none'
|
||||
) {
|
||||
|
@@ -1711,7 +1711,7 @@ async function getPrompt(generationType, message, trigger, quietPrompt) {
|
||||
prompt = message || getRawLastMessage();
|
||||
break;
|
||||
case generationMode.FREE:
|
||||
prompt = trigger.trim();
|
||||
prompt = generateFreeModePrompt(trigger.trim());
|
||||
break;
|
||||
case generationMode.FACE_MULTIMODAL:
|
||||
case generationMode.CHARACTER_MULTIMODAL:
|
||||
@@ -1730,6 +1730,36 @@ async function getPrompt(generationType, message, trigger, quietPrompt) {
|
||||
return prompt;
|
||||
}
|
||||
|
||||
/**
|
||||
* Generates a free prompt with a character-specific prompt prefix support.
|
||||
* @param {string} trigger - The prompt to use for the image generation.
|
||||
* @returns {string}
|
||||
*/
|
||||
function generateFreeModePrompt(trigger) {
|
||||
return trigger
|
||||
.replace(/(?:^char(\s|,)|\{\{charPrefix\}\})/gi, (_, suffix) => {
|
||||
const getLastCharacterKey = () => {
|
||||
if (typeof this_chid !== 'undefined') {
|
||||
return getCharaFilename(this_chid);
|
||||
}
|
||||
const context = getContext();
|
||||
for (let i = context.chat.length - 1; i >= 0; i--) {
|
||||
const message = context.chat[i];
|
||||
if (message.is_user || message.is_system) {
|
||||
continue;
|
||||
} else if (typeof message.original_avatar === 'string') {
|
||||
return message.original_avatar.replace(/\.[^/.]+$/, '');
|
||||
}
|
||||
}
|
||||
throw new Error('No usable messages found.');
|
||||
};
|
||||
|
||||
const key = getLastCharacterKey();
|
||||
const value = (extension_settings.sd.character_prompts[key] || '').trim();
|
||||
return value ? value + (suffix || '') : '';
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Generates a prompt using multimodal captioning.
|
||||
* @param {number} generationType - The type of image generation to perform.
|
||||
|
@@ -394,7 +394,8 @@ async function getSavedHashes(collectionId) {
|
||||
*/
|
||||
async function insertVectorItems(collectionId, items) {
|
||||
if (settings.source === 'openai' && !secret_state[SECRET_KEYS.OPENAI] ||
|
||||
settings.source === 'palm' && !secret_state[SECRET_KEYS.MAKERSUITE]) {
|
||||
settings.source === 'palm' && !secret_state[SECRET_KEYS.MAKERSUITE] ||
|
||||
settings.source === 'mistral' && !secret_state[SECRET_KEYS.MISTRALAI]) {
|
||||
throw new Error('Vectors: API key missing', { cause: 'api_key_missing' });
|
||||
}
|
||||
|
||||
|
@@ -13,6 +13,7 @@
|
||||
<option value="transformers">Local (Transformers)</option>
|
||||
<option value="openai">OpenAI</option>
|
||||
<option value="palm">Google MakerSuite (PaLM)</option>
|
||||
<option value="mistral">MistralAI</option>
|
||||
</select>
|
||||
</div>
|
||||
|
||||
|
@@ -164,6 +164,7 @@ export const chat_completion_sources = {
|
||||
OPENROUTER: 'openrouter',
|
||||
AI21: 'ai21',
|
||||
MAKERSUITE: 'makersuite',
|
||||
MISTRALAI: 'mistralai',
|
||||
};
|
||||
|
||||
const prefixMap = selected_group ? {
|
||||
@@ -208,6 +209,7 @@ const default_settings = {
|
||||
claude_model: 'claude-instant-v1',
|
||||
google_model: 'gemini-pro',
|
||||
ai21_model: 'j2-ultra',
|
||||
mistralai_model: 'mistral-medium',
|
||||
windowai_model: '',
|
||||
openrouter_model: openrouter_website_model,
|
||||
openrouter_use_fallback: false,
|
||||
@@ -265,6 +267,7 @@ const oai_settings = {
|
||||
claude_model: 'claude-instant-v1',
|
||||
google_model: 'gemini-pro',
|
||||
ai21_model: 'j2-ultra',
|
||||
mistralai_model: 'mistral-medium',
|
||||
windowai_model: '',
|
||||
openrouter_model: openrouter_website_model,
|
||||
openrouter_use_fallback: false,
|
||||
@@ -1265,6 +1268,8 @@ function getChatCompletionModel() {
|
||||
return oai_settings.openrouter_model !== openrouter_website_model ? oai_settings.openrouter_model : null;
|
||||
case chat_completion_sources.AI21:
|
||||
return oai_settings.ai21_model;
|
||||
case chat_completion_sources.MISTRALAI:
|
||||
return oai_settings.mistralai_model;
|
||||
default:
|
||||
throw new Error(`Unknown chat completion source: ${oai_settings.chat_completion_source}`);
|
||||
}
|
||||
@@ -1452,6 +1457,7 @@ async function sendOpenAIRequest(type, messages, signal) {
|
||||
const isAI21 = oai_settings.chat_completion_source == chat_completion_sources.AI21;
|
||||
const isGoogle = oai_settings.chat_completion_source == chat_completion_sources.MAKERSUITE;
|
||||
const isOAI = oai_settings.chat_completion_source == chat_completion_sources.OPENAI;
|
||||
const isMistral = oai_settings.chat_completion_source == chat_completion_sources.MISTRALAI;
|
||||
const isTextCompletion = (isOAI && textCompletionModels.includes(oai_settings.openai_model)) || (isOpenRouter && oai_settings.openrouter_force_instruct && power_user.instruct.enabled);
|
||||
const isQuiet = type === 'quiet';
|
||||
const isImpersonate = type === 'impersonate';
|
||||
@@ -1561,7 +1567,11 @@ async function sendOpenAIRequest(type, messages, signal) {
|
||||
generate_data['stop_tokens'] = [name1 + ':', oai_settings.new_chat_prompt, oai_settings.new_group_chat_prompt];
|
||||
}
|
||||
|
||||
if ((isOAI || isOpenRouter) && oai_settings.seed >= 0) {
|
||||
if (isMistral) {
|
||||
generate_data['safe_mode'] = false; // already defaults to false, but just incase they change that in the future.
|
||||
}
|
||||
|
||||
if ((isOAI || isOpenRouter || isMistral) && oai_settings.seed >= 0) {
|
||||
generate_data['seed'] = oai_settings.seed;
|
||||
}
|
||||
|
||||
@@ -2306,6 +2316,7 @@ function loadOpenAISettings(data, settings) {
|
||||
oai_settings.openrouter_use_fallback = settings.openrouter_use_fallback ?? default_settings.openrouter_use_fallback;
|
||||
oai_settings.openrouter_force_instruct = settings.openrouter_force_instruct ?? default_settings.openrouter_force_instruct;
|
||||
oai_settings.ai21_model = settings.ai21_model ?? default_settings.ai21_model;
|
||||
oai_settings.mistralai_model = settings.mistralai_model ?? default_settings.mistralai_model;
|
||||
oai_settings.google_model = settings.google_model ?? default_settings.google_model;
|
||||
oai_settings.chat_completion_source = settings.chat_completion_source ?? default_settings.chat_completion_source;
|
||||
oai_settings.api_url_scale = settings.api_url_scale ?? default_settings.api_url_scale;
|
||||
@@ -2351,6 +2362,8 @@ function loadOpenAISettings(data, settings) {
|
||||
$(`#model_google_select option[value="${oai_settings.google_model}"`).attr('selected', true);
|
||||
$('#model_ai21_select').val(oai_settings.ai21_model);
|
||||
$(`#model_ai21_select option[value="${oai_settings.ai21_model}"`).attr('selected', true);
|
||||
$('#model_mistralai_select').val(oai_settings.mistralai_model);
|
||||
$(`#model_mistralai_select option[value="${oai_settings.mistralai_model}"`).attr('selected', true);
|
||||
$('#openai_max_context').val(oai_settings.openai_max_context);
|
||||
$('#openai_max_context_counter').val(`${oai_settings.openai_max_context}`);
|
||||
$('#model_openrouter_select').val(oai_settings.openrouter_model);
|
||||
@@ -2459,7 +2472,7 @@ async function getStatusOpen() {
|
||||
chat_completion_source: oai_settings.chat_completion_source,
|
||||
};
|
||||
|
||||
if (oai_settings.reverse_proxy && oai_settings.chat_completion_source !== chat_completion_sources.OPENROUTER) {
|
||||
if (oai_settings.reverse_proxy && (oai_settings.chat_completion_source === chat_completion_sources.OPENAI || oai_settings.chat_completion_source === chat_completion_sources.CLAUDE)) {
|
||||
validateReverseProxy();
|
||||
}
|
||||
|
||||
@@ -2529,6 +2542,7 @@ async function saveOpenAIPreset(name, settings, triggerUi = true) {
|
||||
openrouter_group_models: settings.openrouter_group_models,
|
||||
openrouter_sort_models: settings.openrouter_sort_models,
|
||||
ai21_model: settings.ai21_model,
|
||||
mistralai_model: settings.mistralai_model,
|
||||
google_model: settings.google_model,
|
||||
temperature: settings.temp_openai,
|
||||
frequency_penalty: settings.freq_pen_openai,
|
||||
@@ -2902,6 +2916,7 @@ function onSettingsPresetChange() {
|
||||
openrouter_group_models: ['#openrouter_group_models', 'openrouter_group_models', false],
|
||||
openrouter_sort_models: ['#openrouter_sort_models', 'openrouter_sort_models', false],
|
||||
ai21_model: ['#model_ai21_select', 'ai21_model', false],
|
||||
mistralai_model: ['#model_mistralai_select', 'mistralai_model', false],
|
||||
google_model: ['#model_google_select', 'google_model', false],
|
||||
openai_max_context: ['#openai_max_context', 'openai_max_context', false],
|
||||
openai_max_tokens: ['#openai_max_tokens', 'openai_max_tokens', false],
|
||||
@@ -3088,6 +3103,11 @@ async function onModelChange() {
|
||||
oai_settings.google_model = value;
|
||||
}
|
||||
|
||||
if ($(this).is('#model_mistralai_select')) {
|
||||
console.log('MistralAI model changed to', value);
|
||||
oai_settings.mistralai_model = value;
|
||||
}
|
||||
|
||||
if (oai_settings.chat_completion_source == chat_completion_sources.SCALE) {
|
||||
if (oai_settings.max_context_unlocked) {
|
||||
$('#openai_max_context').attr('max', unlocked_max);
|
||||
@@ -3193,6 +3213,16 @@ async function onModelChange() {
|
||||
$('#temp_openai').attr('max', oai_max_temp).val(oai_settings.temp_openai).trigger('input');
|
||||
}
|
||||
|
||||
if (oai_settings.chat_completion_source === chat_completion_sources.MISTRALAI) {
|
||||
$('#openai_max_context').attr('max', max_32k);
|
||||
oai_settings.openai_max_context = Math.min(oai_settings.openai_max_context, Number($('#openai_max_context').attr('max')));
|
||||
$('#openai_max_context').val(oai_settings.openai_max_context).trigger('input');
|
||||
|
||||
//mistral also caps temp at 1.0
|
||||
oai_settings.temp_openai = Math.min(claude_max_temp, oai_settings.temp_openai);
|
||||
$('#temp_openai').attr('max', claude_max_temp).val(oai_settings.temp_openai).trigger('input');
|
||||
}
|
||||
|
||||
if (oai_settings.chat_completion_source == chat_completion_sources.AI21) {
|
||||
if (oai_settings.max_context_unlocked) {
|
||||
$('#openai_max_context').attr('max', unlocked_max);
|
||||
@@ -3354,6 +3384,19 @@ async function onConnectButtonClick(e) {
|
||||
}
|
||||
}
|
||||
|
||||
if (oai_settings.chat_completion_source == chat_completion_sources.MISTRALAI) {
|
||||
const api_key_mistralai = String($('#api_key_mistralai').val()).trim();
|
||||
|
||||
if (api_key_mistralai.length) {
|
||||
await writeSecret(SECRET_KEYS.MISTRALAI, api_key_mistralai);
|
||||
}
|
||||
|
||||
if (!secret_state[SECRET_KEYS.MISTRALAI]) {
|
||||
console.log('No secret key saved for MistralAI');
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
startStatusLoading();
|
||||
saveSettingsDebounced();
|
||||
await getStatusOpen();
|
||||
@@ -3386,6 +3429,9 @@ function toggleChatCompletionForms() {
|
||||
else if (oai_settings.chat_completion_source == chat_completion_sources.AI21) {
|
||||
$('#model_ai21_select').trigger('change');
|
||||
}
|
||||
else if (oai_settings.chat_completion_source == chat_completion_sources.MISTRALAI) {
|
||||
$('#model_mistralai_select').trigger('change');
|
||||
}
|
||||
$('[data-source]').each(function () {
|
||||
const validSources = $(this).data('source').split(',');
|
||||
$(this).toggle(validSources.includes(oai_settings.chat_completion_source));
|
||||
@@ -3775,6 +3821,7 @@ $(document).ready(async function () {
|
||||
$('#openrouter_group_models').on('change', onOpenrouterModelSortChange);
|
||||
$('#openrouter_sort_models').on('change', onOpenrouterModelSortChange);
|
||||
$('#model_ai21_select').on('change', onModelChange);
|
||||
$('#model_mistralai_select').on('change', onModelChange);
|
||||
$('#settings_preset_openai').on('change', onSettingsPresetChange);
|
||||
$('#new_oai_preset').on('click', onNewPresetClick);
|
||||
$('#delete_oai_preset').on('click', onDeletePresetClick);
|
||||
|
@@ -14,6 +14,7 @@ export const SECRET_KEYS = {
|
||||
SCALE_COOKIE: 'scale_cookie',
|
||||
MAKERSUITE: 'api_key_makersuite',
|
||||
SERPAPI: 'api_key_serpapi',
|
||||
MISTRALAI: 'api_key_mistralai',
|
||||
};
|
||||
|
||||
const INPUT_MAP = {
|
||||
@@ -29,6 +30,7 @@ const INPUT_MAP = {
|
||||
[SECRET_KEYS.MAKERSUITE]: '#api_key_makersuite',
|
||||
[SECRET_KEYS.APHRODITE]: '#api_key_aphrodite',
|
||||
[SECRET_KEYS.TABBY]: '#api_key_tabby',
|
||||
[SECRET_KEYS.MISTRALAI]: '#api_key_mistralai',
|
||||
};
|
||||
|
||||
async function clearSecret() {
|
||||
|
@@ -384,6 +384,10 @@ export function getTokenizerModel() {
|
||||
return claudeTokenizer;
|
||||
}
|
||||
|
||||
if (oai_settings.chat_completion_source == chat_completion_sources.MISTRALAI) {
|
||||
return mistralTokenizer;
|
||||
}
|
||||
|
||||
// Default to Turbo 3.5
|
||||
return turboTokenizer;
|
||||
}
|
||||
|
Reference in New Issue
Block a user