Merge branch 'staging' into 202411-auto-templates

This commit is contained in:
Cohee 2024-11-23 15:47:11 +02:00
commit 876da6899d
14 changed files with 663 additions and 321 deletions

View File

@ -766,5 +766,13 @@
{
"filename": "presets/context/Mistral V3-Tekken.json",
"type": "context"
},
{
"filename": "presets/instruct/Mistral V7.json",
"type": "instruct"
},
{
"filename": "presets/context/Mistral V7.json",
"type": "context"
}
]

View File

@ -0,0 +1,11 @@
{
"story_string": "[SYSTEM_PROMPT] {{#if system}}{{system}}\n{{/if}}{{#if wiBefore}}{{wiBefore}}\n{{/if}}{{#if description}}{{description}}\n{{/if}}{{#if personality}}{{personality}}\n{{/if}}{{#if scenario}}{{scenario}}\n{{/if}}{{#if wiAfter}}{{wiAfter}}\n{{/if}}{{#if persona}}{{persona}}\n{{/if}}{{trim}}[/SYSTEM_PROMPT]",
"example_separator": "",
"chat_start": "",
"use_stop_strings": false,
"allow_jailbreak": false,
"always_force_name2": true,
"trim_sentences": false,
"single_line": false,
"name": "Mistral V7"
}

View File

@ -0,0 +1,22 @@
{
"input_sequence": "[INST] ",
"output_sequence": " ",
"last_output_sequence": "",
"system_sequence": "[SYSTEM_PROMPT] ",
"stop_sequence": "</s>",
"wrap": false,
"macro": true,
"names_behavior": "always",
"activation_regex": "",
"system_sequence_prefix": "",
"system_sequence_suffix": "",
"first_output_sequence": "",
"skip_examples": false,
"output_suffix": "</s>",
"input_suffix": "[/INST]",
"system_suffix": "[/SYSTEM_PROMPT]",
"user_alignment_message": "",
"system_same_as_user": false,
"last_system_sequence": "",
"name": "Mistral V7"
}

View File

@ -474,6 +474,8 @@ label[for="trim_spaces"]:has(input:checked) i.warning {
#claude_function_prefill_warning {
display: none;
color: red;
font-weight: bold;
}
#openai_settings:has(#openai_function_calling:checked):has(#claude_assistant_prefill:not(:placeholder-shown), #claude_assistant_impersonation:not(:placeholder-shown)) #claude_function_prefill_warning {

View File

@ -2740,6 +2740,7 @@
</optgroup>
<optgroup label="GPT-4o">
<option value="gpt-4o">gpt-4o</option>
<option value="gpt-4o-2024-11-20">gpt-4o-2024-11-20</option>
<option value="gpt-4o-2024-08-06">gpt-4o-2024-08-06</option>
<option value="gpt-4o-2024-05-13">gpt-4o-2024-05-13</option>
<option value="chatgpt-4o-latest">chatgpt-4o-latest</option>
@ -2979,6 +2980,7 @@
<option value="chat-bison-001">PaLM 2 Chat (Legacy)</option>
</optgroup>
<optgroup label="Subversions">
<option value="gemini-exp-1121">Gemini Experimental 2024-11-21</option>
<option value="gemini-exp-1114">Gemini Experimental 2024-11-14</option>
<option value="gemini-1.5-pro-exp-0801">Gemini 1.5 Pro Experiment 2024-08-01</option>
<option value="gemini-1.5-pro-exp-0827">Gemini 1.5 Pro Experiment 2024-08-27</option>
@ -3159,6 +3161,8 @@
<h4 data-i18n="Cohere Model">Cohere Model</h4>
<select id="model_cohere_select">
<optgroup label="Stable">
<option value="c4ai-aya-expanse-32b">c4ai-aya-expanse-32b</option>
<option value="c4ai-aya-expanse-8b">c4ai-aya-expanse-8b</option>
<option value="c4ai-aya-23-35b">c4ai-aya-23-35b</option>
<option value="c4ai-aya-23-8b">c4ai-aya-23-8b</option>
<option value="command-light">command-light</option>
@ -4486,7 +4490,7 @@
</div>
<div nane="AutoContiueBlock" class="inline-drawer wide100p flexFlowColumn">
<div class="inline-drawer-toggle inline-drawer-header userSettingsInnerExpandable" title="Automatically 'continue' a response if the model stopped before reaching a certain amount of tokens.">
<b><span data-i18n="Auto-swipe">Auto-Continue</span></b>
<b><span data-i18n="Auto-Continue">Auto-Continue</span></b>
<div class="fa-solid fa-circle-chevron-down inline-drawer-icon down"></div>
</div>
<div class="inline-drawer-content">
@ -5065,6 +5069,7 @@
<span data-i18n="Group reply strategy">Group reply strategy</span>
</label>
<select id="rm_group_activation_strategy">
<option value="2" data-i18n="Manual">Manual</option>
<option value="0" data-i18n="Natural order">Natural order</option>
<option value="1" data-i18n="List order">List order</option>
</select>

File diff suppressed because it is too large Load Diff

View File

@ -37,53 +37,85 @@ const chara_note_position = {
};
function setNoteTextCommand(_, text) {
$('#extension_floating_prompt').val(text).trigger('input');
toastr.success(t`Author's Note text updated`);
return '';
if (text) {
$('#extension_floating_prompt').val(text).trigger('input');
toastr.success(t`Author's Note text updated`);
}
return chat_metadata[metadata_keys.prompt];
}
function setNoteDepthCommand(_, text) {
const value = Number(text);
if (text) {
const value = Number(text);
if (Number.isNaN(value)) {
toastr.error(t`Not a valid number`);
return;
if (Number.isNaN(value)) {
toastr.error(t`Not a valid number`);
return;
}
$('#extension_floating_depth').val(Math.abs(value)).trigger('input');
toastr.success(t`Author's Note depth updated`);
}
$('#extension_floating_depth').val(Math.abs(value)).trigger('input');
toastr.success(t`Author's Note depth updated`);
return '';
return chat_metadata[metadata_keys.depth];
}
function setNoteIntervalCommand(_, text) {
const value = Number(text);
if (text) {
const value = Number(text);
if (Number.isNaN(value)) {
toastr.error(t`Not a valid number`);
return;
if (Number.isNaN(value)) {
toastr.error(t`Not a valid number`);
return;
}
$('#extension_floating_interval').val(Math.abs(value)).trigger('input');
toastr.success(t`Author's Note frequency updated`);
}
$('#extension_floating_interval').val(Math.abs(value)).trigger('input');
toastr.success(t`Author's Note frequency updated`);
return '';
return chat_metadata[metadata_keys.interval];
}
function setNotePositionCommand(_, text) {
const validPositions = {
'after': 0,
'scenario': 0,
'chat': 1,
'before_scenario': 2,
'before': 2,
};
const position = validPositions[text?.trim()];
if (text) {
const position = validPositions[text?.trim()?.toLowerCase()];
if (Number.isNaN(position)) {
toastr.error(t`Not a valid position`);
return;
if (typeof position === 'undefined') {
toastr.error(t`Not a valid position`);
return;
}
$(`input[name="extension_floating_position"][value="${position}"]`).prop('checked', true).trigger('input');
toastr.info(t`Author's Note position updated`);
}
return Object.keys(validPositions).find(key => validPositions[key] == chat_metadata[metadata_keys.position]);
}
$(`input[name="extension_floating_position"][value="${position}"]`).prop('checked', true).trigger('input');
toastr.info(t`Author's Note position updated`);
return '';
function setNoteRoleCommand(_, text) {
const validRoles = {
'system': 0,
'user': 1,
'assistant': 2,
};
if (text) {
const role = validRoles[text?.trim()?.toLowerCase()];
if (typeof role === 'undefined') {
toastr.error(t`Not a valid role`);
return;
}
$('#extension_floating_role').val(Math.abs(role)).trigger('input');
toastr.info(t`Author's Note role updated`);
}
return Object.keys(validRoles).find(key => validRoles[key] == chat_metadata[metadata_keys.role]);
}
function updateSettings() {
@ -462,57 +494,84 @@ export function initAuthorsNote() {
});
$('#option_toggle_AN').on('click', onANMenuItemClick);
SlashCommandParser.addCommandObject(SlashCommand.fromProps({ name: 'note',
SlashCommandParser.addCommandObject(SlashCommand.fromProps({
name: 'note',
callback: setNoteTextCommand,
returns: 'current author\'s note',
unnamedArgumentList: [
new SlashCommandArgument(
'text', [ARGUMENT_TYPE.STRING], true,
'text', [ARGUMENT_TYPE.STRING], false,
),
],
helpString: `
<div>
Sets an author's note for the currently selected chat.
Sets an author's note for the currently selected chat if specified and returns the current note.
</div>
`,
}));
SlashCommandParser.addCommandObject(SlashCommand.fromProps({ name: 'depth',
SlashCommandParser.addCommandObject(SlashCommand.fromProps({
name: 'note-depth',
aliases: ['depth'],
callback: setNoteDepthCommand,
returns: 'current author\'s note depth',
unnamedArgumentList: [
new SlashCommandArgument(
'number', [ARGUMENT_TYPE.NUMBER], true,
'number', [ARGUMENT_TYPE.NUMBER], false,
),
],
helpString: `
<div>
Sets an author's note depth for in-chat positioning.
Sets an author's note depth for in-chat positioning if specified and returns the current depth.
</div>
`,
}));
SlashCommandParser.addCommandObject(SlashCommand.fromProps({ name: 'freq',
SlashCommandParser.addCommandObject(SlashCommand.fromProps({
name: 'note-frequency',
aliases: ['freq', 'note-freq'],
callback: setNoteIntervalCommand,
returns: 'current author\'s note insertion frequency',
namedArgumentList: [],
unnamedArgumentList: [
new SlashCommandArgument(
'number', [ARGUMENT_TYPE.NUMBER], true,
'number', [ARGUMENT_TYPE.NUMBER], false,
),
],
helpString: `
<div>
Sets an author's note insertion frequency.
Sets an author's note insertion frequency if specified and returns the current frequency.
</div>
`,
}));
SlashCommandParser.addCommandObject(SlashCommand.fromProps({ name: 'pos',
SlashCommandParser.addCommandObject(SlashCommand.fromProps({
name: 'note-position',
callback: setNotePositionCommand,
aliases: ['pos', 'note-pos'],
returns: 'current author\'s note insertion position',
namedArgumentList: [],
unnamedArgumentList: [
new SlashCommandArgument(
'position', [ARGUMENT_TYPE.STRING], true, false, null, ['chat', 'scenario'],
'position', [ARGUMENT_TYPE.STRING], false, false, null, ['before', 'after', 'chat'],
),
],
helpString: `
<div>
Sets an author's note position.
Sets an author's note position if specified and returns the current position.
</div>
`,
}));
SlashCommandParser.addCommandObject(SlashCommand.fromProps({
name: 'note-role',
callback: setNoteRoleCommand,
returns: 'current author\'s note chat insertion role',
namedArgumentList: [],
unnamedArgumentList: [
new SlashCommandArgument(
'position', [ARGUMENT_TYPE.STRING], false, false, null, ['system', 'user', 'assistant'],
),
],
helpString: `
<div>
Sets an author's note chat insertion role if specified and returns the current role.
</div>
`,
}));

View File

@ -61,6 +61,7 @@
<option data-type="google" value="gemini-1.5-flash-8b-exp-0827">gemini-1.5-flash-8b-exp-0827</option>
<option data-type="google" value="gemini-1.5-flash-8b-exp-0924">gemini-1.5-flash-8b-exp-0924</option>
<option data-type="google" value="gemini-exp-1114">gemini-exp-1114</option>
<option data-type="google" value="gemini-exp-1121">gemini-exp-1121</option>
<option data-type="google" value="gemini-1.5-pro">gemini-1.5-pro</option>
<option data-type="google" value="gemini-1.5-pro-latest">gemini-1.5-pro-latest</option>
<option data-type="google" value="gemini-1.5-pro-001">gemini-1.5-pro-001</option>

View File

@ -7,7 +7,7 @@
<div class="inline-drawer-content">
<div id="tts_status">
</div>
<span>Select TTS Provider</span> </br>
<span data-i18n="Select TTS Provider">Select TTS Provider</span> </br>
<div class="tts_block">
<select id="tts_provider" class="flex1">
</select>
@ -16,49 +16,49 @@
<div>
<label class="checkbox_label" for="tts_enabled">
<input type="checkbox" id="tts_enabled" name="tts_enabled">
<small>Enabled</small>
<small data-i18n="tts_enabled">Enabled</small>
</label>
<label class="checkbox_label" for="tts_narrate_user">
<input type="checkbox" id="tts_narrate_user">
<small>Narrate user messages</small>
<small data-i18n="Narrate user messages">Narrate user messages</small>
</label>
<label class="checkbox_label" for="tts_auto_generation">
<input type="checkbox" id="tts_auto_generation">
<small>Auto Generation</small>
<small data-i18n="Auto Generation">Auto Generation</small>
</label>
<label class="checkbox_label" for="tts_periodic_auto_generation" title="Requires auto generation to be enabled.">
<label class="checkbox_label" for="tts_periodic_auto_generation" data-i18n="[title]Requires auto generation to be enabled." title="Requires auto generation to be enabled.">
<input type="checkbox" id="tts_periodic_auto_generation">
<small>Narrate by paragraphs (when streaming)</small>
<small data-i18n="Narrate by paragraphs (when streaming)">Narrate by paragraphs (when streaming)</small>
</label>
<label class="checkbox_label" for="tts_narrate_quoted">
<input type="checkbox" id="tts_narrate_quoted">
<small>Only narrate "quotes"</small>
<small data-i18n="Only narrate quotes">Only narrate "quotes"</small>
</label>
<label class="checkbox_label" for="tts_narrate_dialogues">
<input type="checkbox" id="tts_narrate_dialogues">
<small>Ignore *text, even "quotes", inside asterisks*</small>
<small data-i18n="Ignore text, even quotes, inside asterisk">Ignore *text, even "quotes", inside asterisks*</small>
</label>
<label class="checkbox_label" for="tts_narrate_translated_only">
<input type="checkbox" id="tts_narrate_translated_only">
<small>Narrate only the translated text</small>
<small data-i18n="Narrate only the translated text">Narrate only the translated text</small>
</label>
<label class="checkbox_label" for="tts_skip_codeblocks">
<input type="checkbox" id="tts_skip_codeblocks">
<small>Skip codeblocks</small>
<small data-i18n="Skip codeblocks">Skip codeblocks</small>
</label>
<label class="checkbox_label" for="tts_skip_tags">
<input type="checkbox" id="tts_skip_tags">
<small>Skip &lt;tagged&gt; blocks</small>
<small data-i18n="Skip tagged blocks">Skip &lt;tagged&gt; blocks</small>
</label>
<label class="checkbox_label" for="tts_pass_asterisks">
<input type="checkbox" id="tts_pass_asterisks">
<small>Pass Asterisks to TTS Engine</small>
<small data-i18n="Pass Asterisks to TTS Engine">Pass Asterisks to TTS Engine</small>
</label>
</div>
<div id="playback_rate_block" class="range-block">
<hr>
<div class="range-block-title justifyLeft" data-i18n="Audio Playback Speed">
<small>Audio Playback Speed</small>
<small data-i18n="Audio Playback Speed">Audio Playback Speed</small>
</div>
<div class="range-block-range-and-counter">
<div class="range-block-range">

View File

@ -110,6 +110,7 @@ let newGroupMembers = [];
export const group_activation_strategy = {
NATURAL: 0,
LIST: 1,
MANUAL: 2,
};
export const group_generation_mode = {
@ -852,6 +853,9 @@ async function generateGroupWrapper(by_auto_mode, type = null, params = {}) {
else if (activationStrategy === group_activation_strategy.LIST) {
activatedMembers = activateListOrder(enabledMembers);
}
else if (activationStrategy === group_activation_strategy.MANUAL && !isUserInput) {
activatedMembers = shuffle(enabledMembers).slice(0, 1).map(x => characters.findIndex(y => y.avatar === x)).filter(x => x !== -1);
}
if (activatedMembers.length === 0) {
//toastr.warning('All group members are disabled. Enable at least one to get a reply.');

View File

@ -519,14 +519,14 @@ function convertTokenIdLogprobsToText(input) {
const tokenizerId = getTokenizerBestMatch(api);
// Flatten unique token IDs across all logprobs
/** @type {any[]} Flatten unique token IDs across all logprobs */
const tokenIds = Array.from(new Set(input.flatMap(logprobs =>
logprobs.topLogprobs.map(([token]) => token).concat(logprobs.token),
)));
// Submit token IDs to tokenizer to get token text, then build ID->text map
// noinspection JSCheckFunctionSignatures - mutates input in-place
const { chunks } = decodeTextTokens(tokenizerId, tokenIds.map(parseInt));
const { chunks } = decodeTextTokens(tokenizerId, tokenIds);
const tokenIdText = new Map(tokenIds.map((id, i) => [id, chunks[i]]));
// Fixup logprobs data with token text

View File

@ -4056,7 +4056,7 @@ async function onModelChange() {
if (oai_settings.chat_completion_source == chat_completion_sources.MAKERSUITE) {
if (oai_settings.max_context_unlocked) {
$('#openai_max_context').attr('max', max_2mil);
} else if (value.includes('gemini-exp-1114')){
} else if (value.includes('gemini-exp-1114') || value.includes('gemini-exp-1121')) {
$('#openai_max_context').attr('max', max_32k);
} else if (value.includes('gemini-1.5-pro')) {
$('#openai_max_context').attr('max', max_2mil);
@ -4196,10 +4196,10 @@ async function onModelChange() {
else if (['command-light-nightly', 'command-nightly'].includes(oai_settings.cohere_model)) {
$('#openai_max_context').attr('max', max_8k);
}
else if (oai_settings.cohere_model.includes('command-r')) {
else if (oai_settings.cohere_model.includes('command-r') || ['c4ai-aya-expanse-32b'].includes(oai_settings.cohere_model)) {
$('#openai_max_context').attr('max', max_128k);
}
else if (['c4ai-aya-23'].includes(oai_settings.cohere_model)) {
else if (['c4ai-aya-23', 'c4ai-aya-expanse-8b'].includes(oai_settings.cohere_model)) {
$('#openai_max_context').attr('max', max_8k);
}
else {
@ -4745,6 +4745,7 @@ export function isImageInliningSupported() {
'gemini-1.5-flash-8b-exp-0827',
'gemini-1.5-flash-8b-exp-0924',
'gemini-exp-1114',
'gemini-exp-1121',
'gemini-1.0-pro-vision-latest',
'gemini-1.5-pro',
'gemini-1.5-pro-latest',

View File

@ -130,10 +130,6 @@ async function sendClaudeRequest(request, response) {
delete requestBody.system;
}
if (useTools) {
// Claude doesn't do prefills on function calls, and doesn't allow empty messages
if (convertedPrompt.messages.length && convertedPrompt.messages[convertedPrompt.messages.length - 1].role === 'assistant') {
convertedPrompt.messages.push({ role: 'user', content: '.' });
}
additionalHeaders['anthropic-beta'] = 'tools-2024-05-16';
requestBody.tool_choice = { type: request.body.tool_choice };
requestBody.tools = request.body.tools
@ -141,6 +137,10 @@ async function sendClaudeRequest(request, response) {
.map(tool => tool.function)
.map(fn => ({ name: fn.name, description: fn.description, input_schema: fn.parameters }));
// Claude doesn't do prefills on function calls, and doesn't allow empty messages
if (requestBody.tools.length && convertedPrompt.messages.length && convertedPrompt.messages[convertedPrompt.messages.length - 1].role === 'assistant') {
convertedPrompt.messages.push({ role: 'user', content: [{ type: 'text', text: '\u200b' }] });
}
if (enableSystemPromptCache && requestBody.tools.length) {
requestBody.tools[requestBody.tools.length - 1]['cache_control'] = { type: 'ephemeral' };
}
@ -280,7 +280,7 @@ async function sendMakerSuiteRequest(request, response) {
delete generationConfig.stopSequences;
}
const should_use_system_prompt = (model.includes('gemini-1.5-flash') || model.includes('gemini-1.5-pro') || model.includes('gemini-exp-1114')) && request.body.use_makersuite_sysprompt;
const should_use_system_prompt = (model.includes('gemini-1.5-flash') || model.includes('gemini-1.5-pro') || model.includes('gemini-exp-1114') || model.includes('gemini-exp-1121')) && request.body.use_makersuite_sysprompt;
const prompt = convertGooglePrompt(request.body.messages, model, should_use_system_prompt, request.body.char_name, request.body.user_name);
let body = {
contents: prompt.contents,

View File

@ -347,6 +347,7 @@ export function convertGooglePrompt(messages, model, useSysPrompt = false, charN
'gemini-1.5-flash-8b-exp-0827',
'gemini-1.5-flash-8b-exp-0924',
'gemini-exp-1114',
'gemini-exp-1121',
'gemini-1.5-pro',
'gemini-1.5-pro-latest',
'gemini-1.5-pro-001',
@ -628,11 +629,30 @@ export function convertMistralMessages(messages, charName = '', userName = '') {
export function mergeMessages(messages, charName, userName, strict) {
let mergedMessages = [];
/** @type {Map<string,object>} */
const contentTokens = new Map();
// Remove names from the messages
messages.forEach((message) => {
if (!message.content) {
message.content = '';
}
// Flatten contents and replace image URLs with random tokens
if (Array.isArray(message.content)) {
const text = message.content.map((content) => {
if (content.type === 'text') {
return content.text;
}
// Could be extended with other non-text types
if (content.type === 'image_url') {
const token = crypto.randomBytes(32).toString('base64');
contentTokens.set(token, content);
return token;
}
return '';
}).join('\n\n');
message.content = text;
}
if (message.role === 'system' && message.name === 'example_assistant') {
if (charName && !message.content.startsWith(`${charName}: `)) {
message.content = `${charName}: ${message.content}`;
@ -673,6 +693,32 @@ export function mergeMessages(messages, charName, userName, strict) {
});
}
// Check for content tokens and replace them with the actual content objects
if (contentTokens.size > 0) {
mergedMessages.forEach((message) => {
const hasValidToken = Array.from(contentTokens.keys()).some(token => message.content.includes(token));
if (hasValidToken) {
const splitContent = message.content.split('\n\n');
const mergedContent = [];
splitContent.forEach((content) => {
if (contentTokens.has(content)) {
mergedContent.push(contentTokens.get(content));
} else {
if (mergedContent.length > 0 && mergedContent[mergedContent.length - 1].type === 'text') {
mergedContent[mergedContent.length - 1].text += `\n\n${content}`;
} else {
mergedContent.push({ type: 'text', text: content });
}
}
});
message.content = mergedContent;
}
});
}
if (strict) {
for (let i = 0; i < mergedMessages.length; i++) {
// Force mid-prompt system messages to be user messages