Merge branch 'staging' into fix/connRefusedErrMsg
This commit is contained in:
commit
70c45fb001
|
@ -91,9 +91,6 @@
|
|||
<div class="margin0 title_restorable standoutHeader">
|
||||
<strong>
|
||||
<span data-i18n="kobldpresets">Kobold Presets</span>
|
||||
<a href="https://docs.sillytavern.app/usage/api-connections/koboldai/" class="notes-link" target="_blank">
|
||||
<span class="fa-solid fa-circle-question note-link-span"></span>
|
||||
</a>
|
||||
</strong>
|
||||
|
||||
<div class="flex-container gap3px">
|
||||
|
@ -1965,7 +1962,7 @@
|
|||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<div class="range-block m-t-1" data-source="openai,openrouter,scale">
|
||||
<div class="range-block m-t-1" data-source="openai,openrouter,scale,custom">
|
||||
<div id="logit_bias_openai" class="range-block-title openai_restorable" data-i18n="Logit Bias">
|
||||
Logit Bias
|
||||
</div>
|
||||
|
@ -2740,6 +2737,7 @@
|
|||
</optgroup>
|
||||
<optgroup label="GPT-4o">
|
||||
<option value="gpt-4o">gpt-4o</option>
|
||||
<option value="gpt-4o-2024-11-20">gpt-4o-2024-11-20</option>
|
||||
<option value="gpt-4o-2024-08-06">gpt-4o-2024-08-06</option>
|
||||
<option value="gpt-4o-2024-05-13">gpt-4o-2024-05-13</option>
|
||||
<option value="chatgpt-4o-latest">chatgpt-4o-latest</option>
|
||||
|
@ -2979,6 +2977,7 @@
|
|||
<option value="chat-bison-001">PaLM 2 Chat (Legacy)</option>
|
||||
</optgroup>
|
||||
<optgroup label="Subversions">
|
||||
<option value="gemini-exp-1121">Gemini Experimental 2024-11-21</option>
|
||||
<option value="gemini-exp-1114">Gemini Experimental 2024-11-14</option>
|
||||
<option value="gemini-1.5-pro-exp-0801">Gemini 1.5 Pro Experiment 2024-08-01</option>
|
||||
<option value="gemini-1.5-pro-exp-0827">Gemini 1.5 Pro Experiment 2024-08-27</option>
|
||||
|
@ -3159,6 +3158,8 @@
|
|||
<h4 data-i18n="Cohere Model">Cohere Model</h4>
|
||||
<select id="model_cohere_select">
|
||||
<optgroup label="Stable">
|
||||
<option value="c4ai-aya-expanse-32b">c4ai-aya-expanse-32b</option>
|
||||
<option value="c4ai-aya-expanse-8b">c4ai-aya-expanse-8b</option>
|
||||
<option value="c4ai-aya-23-35b">c4ai-aya-23-35b</option>
|
||||
<option value="c4ai-aya-23-8b">c4ai-aya-23-8b</option>
|
||||
<option value="command-light">command-light</option>
|
||||
|
@ -3270,8 +3271,14 @@
|
|||
</div>
|
||||
<div id="AdvancedFormatting" class="drawer-content">
|
||||
<div class="flex-container alignItemsBaseline">
|
||||
<h3 class="margin0 flex1" data-i18n="Advanced Formatting">
|
||||
Advanced Formatting
|
||||
<h3 class="margin0 flex1 flex-container alignItemsBaseline">
|
||||
<span data-i18n="Advanced Formatting">
|
||||
Advanced Formatting
|
||||
</span>
|
||||
|
||||
<a href="https://docs.sillytavern.app/usage/prompts/" class="notes-link" target="_blank">
|
||||
<span class="fa-solid fa-circle-question note-link-span"></span>
|
||||
</a>
|
||||
</h3>
|
||||
<div class="flex-container">
|
||||
<input id="af_master_import_file" type="file" hidden accept=".json" class="displayNone">
|
||||
|
@ -3291,9 +3298,12 @@
|
|||
<h4 class="standoutHeader title_restorable">
|
||||
<div>
|
||||
<span data-i18n="Context Template">Context Template</span>
|
||||
<a href="https://docs.sillytavern.app/usage/core-concepts/advancedformatting/#context-template" class="notes-link" target="_blank">
|
||||
<span class="fa-solid fa-circle-question note-link-span"></span>
|
||||
</a>
|
||||
</div>
|
||||
<div class="flex-container">
|
||||
<label for="context_derived" class="checkbox_label flex1" title="Derive from Model Metadata, if possible." data-i18n="[title]context_derived">
|
||||
<input id="context_derived" type="checkbox" style="display:none;" />
|
||||
<small><i class="fa-solid fa-bolt menu_button margin0"></i></small>
|
||||
</label>
|
||||
</div>
|
||||
</h4>
|
||||
<div class="flex-container" title="Select your current Context Template" data-i18n="[title]Select your current Context Template">
|
||||
|
@ -3390,11 +3400,12 @@
|
|||
<h4 class="standoutHeader title_restorable justifySpaceBetween">
|
||||
<div class="flex-container">
|
||||
<span data-i18n="Instruct Template">Instruct Template</span>
|
||||
<a href="https://docs.sillytavern.app/usage/core-concepts/instructmode/" class="notes-link" target="_blank">
|
||||
<span class="fa-solid fa-circle-question note-link-span"></span>
|
||||
</a>
|
||||
</div>
|
||||
<div class="flex-container">
|
||||
<label for="instruct_derived" class="checkbox_label flex1" title="Derive from Model Metadata, if possible." data-i18n="[title]instruct_derived">
|
||||
<input id="instruct_derived" type="checkbox" style="display:none;" />
|
||||
<small><i class="fa-solid fa-bolt menu_button margin0"></i></small>
|
||||
</label>
|
||||
<label for="instruct_bind_to_context" class="checkbox_label flex1" title="Bind to Context
If enabled, Context templates will be automatically selected based on selected Instruct template name or by preference." data-i18n="[title]instruct_bind_to_context">
|
||||
<input id="instruct_bind_to_context" type="checkbox" style="display:none;" />
|
||||
<small><i class="fa-solid fa-link menu_button margin0"></i></small>
|
||||
|
@ -3575,9 +3586,6 @@
|
|||
<h4 class="standoutHeader title_restorable justifySpaceBetween">
|
||||
<div class="flex-container">
|
||||
<span data-i18n="System Prompt">System Prompt</span>
|
||||
<a href="https://docs.sillytavern.app/usage/core-concepts/advancedformatting/#system-prompt" class="notes-link" target="_blank">
|
||||
<span class="fa-solid fa-circle-question note-link-span"></span>
|
||||
</a>
|
||||
</div>
|
||||
<div class="flex-container">
|
||||
<label id="sysprompt_enabled_label" for="sysprompt_enabled" class="checkbox_label flex1" title="Enable System Prompt" data-i18n="[title]sysprompt_enabled">
|
||||
|
@ -3645,7 +3653,7 @@
|
|||
<div name="tokenizerSettingsBlock">
|
||||
<div name="tokenizerSelectorBlock">
|
||||
<h4 class="standoutHeader"><span data-i18n="Tokenizer">Tokenizer</span>
|
||||
<a href="https://docs.sillytavern.app/usage/core-concepts/advancedformatting/#tokenizer" class="notes-link" target="_blank">
|
||||
<a href="https://docs.sillytavern.app/usage/prompts/tokenizer/" class="notes-link" target="_blank">
|
||||
<span class="fa-solid fa-circle-question note-link-span"></span>
|
||||
</a>
|
||||
</h4>
|
||||
|
@ -3671,10 +3679,8 @@
|
|||
</div>
|
||||
<div class="range-block flex-container flexnowrap" name="tokenPaddingBlock">
|
||||
<div class="range-block-title justifyLeft">
|
||||
<small data-i18n="Token Padding" class="flex-container">Token Padding
|
||||
<a href="https://docs.sillytavern.app/usage/core-concepts/advancedformatting/#token-padding" class="notes-link" target="_blank">
|
||||
<span class="fa-solid fa-circle-question note-link-span"></span>
|
||||
</a>
|
||||
<small data-i18n="Token Padding">
|
||||
Token Padding
|
||||
</small>
|
||||
</div>
|
||||
<input id="token_padding" class="text_pole textarea_compact" type="number" min="-2048" max="2048" />
|
||||
|
@ -3732,7 +3738,7 @@
|
|||
</div>
|
||||
<h3 class="margin0">
|
||||
<span data-i18n="Worlds/Lorebooks">Worlds/Lorebooks</span>
|
||||
<a href="https://docs.sillytavern.app/usage/core-concepts/worldinfo/" class="notes-link" target="_blank">
|
||||
<a href="https://docs.sillytavern.app/usage/worldinfo/" class="notes-link" target="_blank">
|
||||
<span class="fa-solid fa-circle-question note-link-span"></span>
|
||||
</a>
|
||||
</h3>
|
||||
|
@ -4259,7 +4265,7 @@
|
|||
<audio id="audio_message_sound" src="sounds/message.mp3" hidden></audio>
|
||||
<span>
|
||||
<small data-i18n="Message Sound">Message Sound</small>
|
||||
<a href="https://docs.sillytavern.app/usage/core-concepts/uicustomization/#message-sound" class="notes-link" target="_blank">
|
||||
<a href="https://docs.sillytavern.app/usage/user_settings/uicustomization/#message-sound" class="notes-link" target="_blank">
|
||||
<span class="fa-solid fa-circle-question note-link-span"></span>
|
||||
</a>
|
||||
</span>
|
||||
|
@ -4476,7 +4482,7 @@
|
|||
</div>
|
||||
<div nane="AutoContiueBlock" class="inline-drawer wide100p flexFlowColumn">
|
||||
<div class="inline-drawer-toggle inline-drawer-header userSettingsInnerExpandable" title="Automatically 'continue' a response if the model stopped before reaching a certain amount of tokens.">
|
||||
<b><span data-i18n="Auto-swipe">Auto-Continue</span></b>
|
||||
<b><span data-i18n="Auto-Continue">Auto-Continue</span></b>
|
||||
<div class="fa-solid fa-circle-chevron-down inline-drawer-icon down"></div>
|
||||
</div>
|
||||
<div class="inline-drawer-content">
|
||||
|
@ -5791,7 +5797,7 @@
|
|||
<div class="flex-container justifySpaceBetween">
|
||||
<small for="group">
|
||||
<span data-i18n="Inclusion Group">Inclusion Group</span>
|
||||
<a href="https://docs.sillytavern.app/usage/core-concepts/worldinfo/#inclusion-group" class="notes-link" target="_blank" title="Inclusion Groups ensure only one entry from a group is activated at a time, if multiple are triggered. Supports multiple comma-separated groups. Documentation: World Info - Inclusion Group" data-i18n="[title]Inclusion Groups ensure only one entry from a group is activated at a time, if multiple are triggered.Documentation: World Info - Inclusion Group">
|
||||
<a href="https://docs.sillytavern.app/usage/worldinfo/#inclusion-group" class="notes-link" target="_blank" title="Inclusion Groups ensure only one entry from a group is activated at a time, if multiple are triggered. Supports multiple comma-separated groups. Documentation: World Info - Inclusion Group" data-i18n="[title]Inclusion Groups ensure only one entry from a group is activated at a time, if multiple are triggered.Documentation: World Info - Inclusion Group">
|
||||
<span class="fa-solid fa-circle-question note-link-span"></span>
|
||||
</a>
|
||||
</small>
|
||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -267,6 +267,7 @@ import { applyBrowserFixes } from './scripts/browser-fixes.js';
|
|||
import { initServerHistory } from './scripts/server-history.js';
|
||||
import { initSettingsSearch } from './scripts/setting-search.js';
|
||||
import { initBulkEdit } from './scripts/bulk-edit.js';
|
||||
import { deriveTemplatesFromChatTemplate } from './scripts/chat-templates.js';
|
||||
|
||||
//exporting functions and vars for mods
|
||||
export {
|
||||
|
@ -1235,6 +1236,38 @@ async function getStatusTextgen() {
|
|||
const supportsTokenization = response.headers.get('x-supports-tokenization') === 'true';
|
||||
supportsTokenization ? sessionStorage.setItem(TOKENIZER_SUPPORTED_KEY, 'true') : sessionStorage.removeItem(TOKENIZER_SUPPORTED_KEY);
|
||||
|
||||
const wantsInstructDerivation = (power_user.instruct.enabled && power_user.instruct.derived);
|
||||
const wantsContextDerivation = power_user.context_derived;
|
||||
const supportsChatTemplate = [textgen_types.KOBOLDCPP, textgen_types.LLAMACPP].includes(textgen_settings.type);
|
||||
if (supportsChatTemplate && (wantsInstructDerivation || wantsContextDerivation)) {
|
||||
const response = await fetch('/api/backends/text-completions/props', {
|
||||
method: 'POST',
|
||||
headers: getRequestHeaders(),
|
||||
body: JSON.stringify({
|
||||
api_server: endpoint,
|
||||
api_type: textgen_settings.type,
|
||||
}),
|
||||
});
|
||||
|
||||
if (response.ok) {
|
||||
const data = await response.json();
|
||||
if (data) {
|
||||
const { chat_template, chat_template_hash } = data;
|
||||
console.log(`We have chat template ${chat_template.split('\n')[0]}...`);
|
||||
const templates = await deriveTemplatesFromChatTemplate(chat_template, chat_template_hash);
|
||||
if (templates) {
|
||||
const { context, instruct } = templates;
|
||||
if (wantsContextDerivation) {
|
||||
selectContextPreset(context, { isAuto: true });
|
||||
}
|
||||
if (wantsInstructDerivation) {
|
||||
selectInstructPreset(instruct, { isAuto: true });
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// We didn't get a 200 status code, but the endpoint has an explanation. Which means it DID connect, but I digress.
|
||||
if (online_status === 'no_connection' && data.response) {
|
||||
toastr.error(data.response, t`API Error`, { timeOut: 5000, preventDuplicates: true });
|
||||
|
|
|
@ -37,53 +37,85 @@ const chara_note_position = {
|
|||
};
|
||||
|
||||
function setNoteTextCommand(_, text) {
|
||||
$('#extension_floating_prompt').val(text).trigger('input');
|
||||
toastr.success(t`Author's Note text updated`);
|
||||
return '';
|
||||
if (text) {
|
||||
$('#extension_floating_prompt').val(text).trigger('input');
|
||||
toastr.success(t`Author's Note text updated`);
|
||||
}
|
||||
return chat_metadata[metadata_keys.prompt];
|
||||
}
|
||||
|
||||
function setNoteDepthCommand(_, text) {
|
||||
const value = Number(text);
|
||||
if (text) {
|
||||
const value = Number(text);
|
||||
|
||||
if (Number.isNaN(value)) {
|
||||
toastr.error(t`Not a valid number`);
|
||||
return;
|
||||
if (Number.isNaN(value)) {
|
||||
toastr.error(t`Not a valid number`);
|
||||
return;
|
||||
}
|
||||
|
||||
$('#extension_floating_depth').val(Math.abs(value)).trigger('input');
|
||||
toastr.success(t`Author's Note depth updated`);
|
||||
}
|
||||
|
||||
$('#extension_floating_depth').val(Math.abs(value)).trigger('input');
|
||||
toastr.success(t`Author's Note depth updated`);
|
||||
return '';
|
||||
return chat_metadata[metadata_keys.depth];
|
||||
}
|
||||
|
||||
function setNoteIntervalCommand(_, text) {
|
||||
const value = Number(text);
|
||||
if (text) {
|
||||
const value = Number(text);
|
||||
|
||||
if (Number.isNaN(value)) {
|
||||
toastr.error(t`Not a valid number`);
|
||||
return;
|
||||
if (Number.isNaN(value)) {
|
||||
toastr.error(t`Not a valid number`);
|
||||
return;
|
||||
}
|
||||
|
||||
$('#extension_floating_interval').val(Math.abs(value)).trigger('input');
|
||||
toastr.success(t`Author's Note frequency updated`);
|
||||
}
|
||||
|
||||
$('#extension_floating_interval').val(Math.abs(value)).trigger('input');
|
||||
toastr.success(t`Author's Note frequency updated`);
|
||||
return '';
|
||||
return chat_metadata[metadata_keys.interval];
|
||||
}
|
||||
|
||||
function setNotePositionCommand(_, text) {
|
||||
const validPositions = {
|
||||
'after': 0,
|
||||
'scenario': 0,
|
||||
'chat': 1,
|
||||
'before_scenario': 2,
|
||||
'before': 2,
|
||||
};
|
||||
|
||||
const position = validPositions[text?.trim()];
|
||||
if (text) {
|
||||
const position = validPositions[text?.trim()?.toLowerCase()];
|
||||
|
||||
if (Number.isNaN(position)) {
|
||||
toastr.error(t`Not a valid position`);
|
||||
return;
|
||||
if (typeof position === 'undefined') {
|
||||
toastr.error(t`Not a valid position`);
|
||||
return;
|
||||
}
|
||||
|
||||
$(`input[name="extension_floating_position"][value="${position}"]`).prop('checked', true).trigger('input');
|
||||
toastr.info(t`Author's Note position updated`);
|
||||
}
|
||||
return Object.keys(validPositions).find(key => validPositions[key] == chat_metadata[metadata_keys.position]);
|
||||
}
|
||||
|
||||
$(`input[name="extension_floating_position"][value="${position}"]`).prop('checked', true).trigger('input');
|
||||
toastr.info(t`Author's Note position updated`);
|
||||
return '';
|
||||
function setNoteRoleCommand(_, text) {
|
||||
const validRoles = {
|
||||
'system': 0,
|
||||
'user': 1,
|
||||
'assistant': 2,
|
||||
};
|
||||
|
||||
if (text) {
|
||||
const role = validRoles[text?.trim()?.toLowerCase()];
|
||||
|
||||
if (typeof role === 'undefined') {
|
||||
toastr.error(t`Not a valid role`);
|
||||
return;
|
||||
}
|
||||
|
||||
$('#extension_floating_role').val(Math.abs(role)).trigger('input');
|
||||
toastr.info(t`Author's Note role updated`);
|
||||
}
|
||||
return Object.keys(validRoles).find(key => validRoles[key] == chat_metadata[metadata_keys.role]);
|
||||
}
|
||||
|
||||
function updateSettings() {
|
||||
|
@ -462,57 +494,84 @@ export function initAuthorsNote() {
|
|||
});
|
||||
$('#option_toggle_AN').on('click', onANMenuItemClick);
|
||||
|
||||
SlashCommandParser.addCommandObject(SlashCommand.fromProps({ name: 'note',
|
||||
SlashCommandParser.addCommandObject(SlashCommand.fromProps({
|
||||
name: 'note',
|
||||
callback: setNoteTextCommand,
|
||||
returns: 'current author\'s note',
|
||||
unnamedArgumentList: [
|
||||
new SlashCommandArgument(
|
||||
'text', [ARGUMENT_TYPE.STRING], true,
|
||||
'text', [ARGUMENT_TYPE.STRING], false,
|
||||
),
|
||||
],
|
||||
helpString: `
|
||||
<div>
|
||||
Sets an author's note for the currently selected chat.
|
||||
Sets an author's note for the currently selected chat if specified and returns the current note.
|
||||
</div>
|
||||
`,
|
||||
}));
|
||||
SlashCommandParser.addCommandObject(SlashCommand.fromProps({ name: 'depth',
|
||||
SlashCommandParser.addCommandObject(SlashCommand.fromProps({
|
||||
name: 'note-depth',
|
||||
aliases: ['depth'],
|
||||
callback: setNoteDepthCommand,
|
||||
returns: 'current author\'s note depth',
|
||||
unnamedArgumentList: [
|
||||
new SlashCommandArgument(
|
||||
'number', [ARGUMENT_TYPE.NUMBER], true,
|
||||
'number', [ARGUMENT_TYPE.NUMBER], false,
|
||||
),
|
||||
],
|
||||
helpString: `
|
||||
<div>
|
||||
Sets an author's note depth for in-chat positioning.
|
||||
Sets an author's note depth for in-chat positioning if specified and returns the current depth.
|
||||
</div>
|
||||
`,
|
||||
}));
|
||||
SlashCommandParser.addCommandObject(SlashCommand.fromProps({ name: 'freq',
|
||||
SlashCommandParser.addCommandObject(SlashCommand.fromProps({
|
||||
name: 'note-frequency',
|
||||
aliases: ['freq', 'note-freq'],
|
||||
callback: setNoteIntervalCommand,
|
||||
returns: 'current author\'s note insertion frequency',
|
||||
namedArgumentList: [],
|
||||
unnamedArgumentList: [
|
||||
new SlashCommandArgument(
|
||||
'number', [ARGUMENT_TYPE.NUMBER], true,
|
||||
'number', [ARGUMENT_TYPE.NUMBER], false,
|
||||
),
|
||||
],
|
||||
helpString: `
|
||||
<div>
|
||||
Sets an author's note insertion frequency.
|
||||
Sets an author's note insertion frequency if specified and returns the current frequency.
|
||||
</div>
|
||||
`,
|
||||
}));
|
||||
SlashCommandParser.addCommandObject(SlashCommand.fromProps({ name: 'pos',
|
||||
SlashCommandParser.addCommandObject(SlashCommand.fromProps({
|
||||
name: 'note-position',
|
||||
callback: setNotePositionCommand,
|
||||
aliases: ['pos', 'note-pos'],
|
||||
returns: 'current author\'s note insertion position',
|
||||
namedArgumentList: [],
|
||||
unnamedArgumentList: [
|
||||
new SlashCommandArgument(
|
||||
'position', [ARGUMENT_TYPE.STRING], true, false, null, ['chat', 'scenario'],
|
||||
'position', [ARGUMENT_TYPE.STRING], false, false, null, ['before', 'after', 'chat'],
|
||||
),
|
||||
],
|
||||
helpString: `
|
||||
<div>
|
||||
Sets an author's note position.
|
||||
Sets an author's note position if specified and returns the current position.
|
||||
</div>
|
||||
`,
|
||||
}));
|
||||
SlashCommandParser.addCommandObject(SlashCommand.fromProps({
|
||||
name: 'note-role',
|
||||
callback: setNoteRoleCommand,
|
||||
returns: 'current author\'s note chat insertion role',
|
||||
namedArgumentList: [],
|
||||
unnamedArgumentList: [
|
||||
new SlashCommandArgument(
|
||||
'position', [ARGUMENT_TYPE.STRING], false, false, null, ['system', 'user', 'assistant'],
|
||||
),
|
||||
],
|
||||
helpString: `
|
||||
<div>
|
||||
Sets an author's note chat insertion role if specified and returns the current role.
|
||||
</div>
|
||||
`,
|
||||
}));
|
||||
|
|
|
@ -89,7 +89,7 @@
|
|||
* @property {boolean} markdownOnly - Whether the script only applies to Markdown
|
||||
* @property {boolean} promptOnly - Whether the script only applies to prompts
|
||||
* @property {boolean} runOnEdit - Whether the script runs on edit
|
||||
* @property {boolean} substituteRegex - Whether the regex should be substituted
|
||||
* @property {number} substituteRegex - Whether the regex should be substituted
|
||||
* @property {number} minDepth - The minimum depth
|
||||
* @property {number} maxDepth - The maximum depth
|
||||
*/
|
||||
|
|
|
@ -0,0 +1,81 @@
|
|||
// the hash can be obtained from command line e.g. via: MODEL=path_to_model; python -c "import json, hashlib, sys; print(hashlib.sha256(json.load(open('"$MODEL"/tokenizer_config.json'))['chat_template'].encode()).hexdigest())"
|
||||
// note that chat templates must be trimmed to match the llama.cpp metadata value
|
||||
const hash_derivations = {
|
||||
// Meta
|
||||
'e10ca381b1ccc5cf9db52e371f3b6651576caee0a630b452e2816b2d404d4b65':
|
||||
// Meta-Llama-3.1-8B-Instruct
|
||||
// Meta-Llama-3.1-70B-Instruct
|
||||
'Llama 3 Instruct'
|
||||
,
|
||||
'5816fce10444e03c2e9ee1ef8a4a1ea61ae7e69e438613f3b17b69d0426223a4':
|
||||
// Llama-3.2-1B-Instruct
|
||||
// Llama-3.2-3B-Instruct
|
||||
'Llama 3 Instruct'
|
||||
,
|
||||
'73e87b1667d87ab7d7b579107f01151b29ce7f3ccdd1018fdc397e78be76219d':
|
||||
// Nemotron 70B
|
||||
'Llama 3 Instruct'
|
||||
,
|
||||
|
||||
// Mistral
|
||||
// Mistral Reference: https://github.com/mistralai/mistral-common
|
||||
'e16746b40344d6c5b5265988e0328a0bf7277be86f1c335156eae07e29c82826':
|
||||
// Mistral-Small-Instruct-2409
|
||||
// Mistral-Large-Instruct-2407
|
||||
'Mistral V2 & V3'
|
||||
,
|
||||
'3c4ad5fa60dd8c7ccdf82fa4225864c903e107728fcaf859fa6052cb80c92ee9':
|
||||
// Mistral-Large-Instruct-2411
|
||||
'Mistral V7' // https://huggingface.co/mistralai/Mistral-Large-Instruct-2411
|
||||
,
|
||||
'e4676cb56dffea7782fd3e2b577cfaf1e123537e6ef49b3ec7caa6c095c62272':
|
||||
// Mistral-Nemo-Instruct-2407
|
||||
'Mistral V3-Tekken'
|
||||
,
|
||||
'26a59556925c987317ce5291811ba3b7f32ec4c647c400c6cc7e3a9993007ba7':
|
||||
// Mistral-7B-Instruct-v0.3
|
||||
'Mistral V2 & V3'
|
||||
,
|
||||
|
||||
// Gemma
|
||||
'ecd6ae513fe103f0eb62e8ab5bfa8d0fe45c1074fa398b089c93a7e70c15cfd6':
|
||||
// gemma-2-9b-it
|
||||
// gemma-2-27b-it
|
||||
'Gemma 2'
|
||||
,
|
||||
'87fa45af6cdc3d6a9e4dd34a0a6848eceaa73a35dcfe976bd2946a5822a38bf3':
|
||||
// gemma-2-2b-it
|
||||
'Gemma 2'
|
||||
,
|
||||
|
||||
// Cohere
|
||||
'3b54f5c219ae1caa5c0bb2cdc7c001863ca6807cf888e4240e8739fa7eb9e02e':
|
||||
// command-r-08-2024
|
||||
'Command R'
|
||||
,
|
||||
};
|
||||
|
||||
const substr_derivations = {
|
||||
'<|im_start|>': 'ChatML', // qwen2.5, ...
|
||||
};
|
||||
|
||||
const parse_derivation = derivation => (typeof derivation === 'string') ? {
|
||||
'context': derivation,
|
||||
'instruct': derivation,
|
||||
} : derivation;
|
||||
|
||||
export async function deriveTemplatesFromChatTemplate(chat_template, hash) {
|
||||
if (hash in hash_derivations) {
|
||||
return parse_derivation(hash_derivations[hash]);
|
||||
}
|
||||
|
||||
// heuristics
|
||||
for (const [substr, derivation] of Object.entries(substr_derivations) ) {
|
||||
if (chat_template.includes(substr)) {
|
||||
return parse_derivation(derivation);
|
||||
}
|
||||
}
|
||||
|
||||
console.log(`Unknown chat template hash: ${hash} for [${chat_template}]`);
|
||||
return null;
|
||||
}
|
|
@ -61,6 +61,7 @@
|
|||
<option data-type="google" value="gemini-1.5-flash-8b-exp-0827">gemini-1.5-flash-8b-exp-0827</option>
|
||||
<option data-type="google" value="gemini-1.5-flash-8b-exp-0924">gemini-1.5-flash-8b-exp-0924</option>
|
||||
<option data-type="google" value="gemini-exp-1114">gemini-exp-1114</option>
|
||||
<option data-type="google" value="gemini-exp-1121">gemini-exp-1121</option>
|
||||
<option data-type="google" value="gemini-1.5-pro">gemini-1.5-pro</option>
|
||||
<option data-type="google" value="gemini-1.5-pro-latest">gemini-1.5-pro-latest</option>
|
||||
<option data-type="google" value="gemini-1.5-pro-001">gemini-1.5-pro-001</option>
|
||||
|
|
|
@ -121,12 +121,16 @@
|
|||
<input type="checkbox" name="run_on_edit" />
|
||||
<span data-i18n="Run On Edit">Run On Edit</span>
|
||||
</label>
|
||||
<label class="checkbox flex-container" data-i18n="[title]ext_regex_substitute_regex_desc" title="Substitute {{macros}} in Find Regex before running it">
|
||||
<input type="checkbox" name="substitute_regex" />
|
||||
<label class="checkbox flex-container flexNoGap marginBot5" data-i18n="[title]ext_regex_substitute_regex_desc" title="Substitute {{macros}} in Find Regex before running it">
|
||||
<span>
|
||||
<span data-i18n="Substitute Regex">Substitute Regex</span>
|
||||
<small data-i18n="Macro in Find Regex">Macros in Find Regex</small>
|
||||
<span class="fa-solid fa-circle-question note-link-span"></span>
|
||||
</span>
|
||||
<select name="substitute_regex" class="text_pole textarea_compact margin0">
|
||||
<option value="0" data-i18n="Don't substitute">Don't substitute</option>
|
||||
<option value="1" data-i18n="Substitute (raw)">Substitute (raw)</option>
|
||||
<option value="2" data-i18n="Substitute (escaped)">Substitute (escaped)</option>
|
||||
</select>
|
||||
</label>
|
||||
<span>
|
||||
<small data-i18n="ext_regex_other_options" data-i18n="Ephemerality">Ephemerality</small>
|
||||
|
|
|
@ -22,6 +22,12 @@ const regex_placement = {
|
|||
WORLD_INFO: 5,
|
||||
};
|
||||
|
||||
export const substitute_find_regex = {
|
||||
NONE: 0,
|
||||
RAW: 1,
|
||||
ESCAPED: 2,
|
||||
};
|
||||
|
||||
function sanitizeRegexMacro(x) {
|
||||
return (x && typeof x === 'string') ?
|
||||
x.replaceAll(/[\n\r\t\v\f\0.^$*+?{}[\]\\/|()]/gs, function (s) {
|
||||
|
@ -131,9 +137,20 @@ function runRegexScript(regexScript, rawString, { characterOverride } = {}) {
|
|||
return newString;
|
||||
}
|
||||
|
||||
const regexString = regexScript.substituteRegex
|
||||
? substituteParamsExtended(regexScript.findRegex, {}, sanitizeRegexMacro)
|
||||
: regexScript.findRegex;
|
||||
const getRegexString = () => {
|
||||
switch(Number(regexScript.substituteRegex)) {
|
||||
case substitute_find_regex.NONE:
|
||||
return regexScript.findRegex;
|
||||
case substitute_find_regex.RAW:
|
||||
return substituteParamsExtended(regexScript.findRegex);
|
||||
case substitute_find_regex.ESCAPED:
|
||||
return substituteParamsExtended(regexScript.findRegex, {}, sanitizeRegexMacro);
|
||||
default:
|
||||
console.warn(`runRegexScript: Unknown substituteRegex value ${regexScript.substituteRegex}. Using raw regex.`);
|
||||
return regexScript.findRegex;
|
||||
}
|
||||
};
|
||||
const regexString = getRegexString();
|
||||
const findRegex = regexFromString(regexString);
|
||||
|
||||
// The user skill issued. Return with nothing.
|
||||
|
|
|
@ -8,7 +8,7 @@ import { enumIcons } from '../../slash-commands/SlashCommandCommonEnumsProvider.
|
|||
import { SlashCommandEnumValue, enumTypes } from '../../slash-commands/SlashCommandEnumValue.js';
|
||||
import { SlashCommandParser } from '../../slash-commands/SlashCommandParser.js';
|
||||
import { download, getFileText, getSortableDelay, uuidv4 } from '../../utils.js';
|
||||
import { regex_placement, runRegexScript } from './engine.js';
|
||||
import { regex_placement, runRegexScript, substitute_find_regex } from './engine.js';
|
||||
import { t } from '../../i18n.js';
|
||||
|
||||
/**
|
||||
|
@ -227,7 +227,7 @@ async function onRegexEditorOpenClick(existingId, isScoped) {
|
|||
editorHtml.find('input[name="only_format_display"]').prop('checked', existingScript.markdownOnly ?? false);
|
||||
editorHtml.find('input[name="only_format_prompt"]').prop('checked', existingScript.promptOnly ?? false);
|
||||
editorHtml.find('input[name="run_on_edit"]').prop('checked', existingScript.runOnEdit ?? false);
|
||||
editorHtml.find('input[name="substitute_regex"]').prop('checked', existingScript.substituteRegex ?? false);
|
||||
editorHtml.find('select[name="substitute_regex"]').val(existingScript.substituteRegex ?? substitute_find_regex.NONE);
|
||||
editorHtml.find('input[name="min_depth"]').val(existingScript.minDepth ?? '');
|
||||
editorHtml.find('input[name="max_depth"]').val(existingScript.maxDepth ?? '');
|
||||
|
||||
|
@ -267,7 +267,7 @@ async function onRegexEditorOpenClick(existingId, isScoped) {
|
|||
findRegex: editorHtml.find('.find_regex').val(),
|
||||
replaceString: editorHtml.find('.regex_replace_string').val(),
|
||||
trimStrings: String(editorHtml.find('.regex_trim_strings').val()).split('\n').filter((e) => e.length !== 0) || [],
|
||||
substituteRegex: editorHtml.find('input[name="substitute_regex"]').prop('checked'),
|
||||
substituteRegex: Number(editorHtml.find('select[name="substitute_regex"]').val()),
|
||||
};
|
||||
const rawTestString = String(editorHtml.find('#regex_test_input').val());
|
||||
const result = runRegexScript(testScript, rawTestString);
|
||||
|
@ -295,7 +295,7 @@ async function onRegexEditorOpenClick(existingId, isScoped) {
|
|||
markdownOnly: editorHtml.find('input[name="only_format_display"]').prop('checked'),
|
||||
promptOnly: editorHtml.find('input[name="only_format_prompt"]').prop('checked'),
|
||||
runOnEdit: editorHtml.find('input[name="run_on_edit"]').prop('checked'),
|
||||
substituteRegex: editorHtml.find('input[name="substitute_regex"]').prop('checked'),
|
||||
substituteRegex: Number(editorHtml.find('select[name="substitute_regex"]').val()),
|
||||
minDepth: parseInt(String(editorHtml.find('input[name="min_depth"]').val())),
|
||||
maxDepth: parseInt(String(editorHtml.find('input[name="max_depth"]').val())),
|
||||
};
|
||||
|
|
|
@ -1,37 +1,62 @@
|
|||
import { Popper } from '../../../lib.js';
|
||||
import {
|
||||
saveSettingsDebounced,
|
||||
systemUserName,
|
||||
getRequestHeaders,
|
||||
event_types,
|
||||
eventSource,
|
||||
generateQuietPrompt,
|
||||
this_chid,
|
||||
getCurrentChatId,
|
||||
animation_duration,
|
||||
appendMediaToMessage,
|
||||
getUserAvatar,
|
||||
user_avatar,
|
||||
getCharacterAvatar,
|
||||
event_types,
|
||||
eventSource,
|
||||
formatCharacterAvatar,
|
||||
generateQuietPrompt,
|
||||
getCharacterAvatar,
|
||||
getCurrentChatId,
|
||||
getRequestHeaders,
|
||||
getUserAvatar,
|
||||
saveSettingsDebounced,
|
||||
substituteParams,
|
||||
substituteParamsExtended,
|
||||
systemUserName,
|
||||
this_chid,
|
||||
user_avatar,
|
||||
} from '../../../script.js';
|
||||
import { getApiUrl, getContext, extension_settings, doExtrasFetch, modules, renderExtensionTemplateAsync, writeExtensionField } from '../../extensions.js';
|
||||
import {
|
||||
doExtrasFetch,
|
||||
extension_settings,
|
||||
getApiUrl,
|
||||
getContext,
|
||||
modules,
|
||||
renderExtensionTemplateAsync,
|
||||
writeExtensionField,
|
||||
} from '../../extensions.js';
|
||||
import { selected_group } from '../../group-chats.js';
|
||||
import { stringFormat, initScrollHeight, resetScrollHeight, getCharaFilename, saveBase64AsFile, getBase64Async, delay, isTrueBoolean, debounce, isFalseBoolean, deepMerge } from '../../utils.js';
|
||||
import {
|
||||
debounce,
|
||||
deepMerge,
|
||||
delay,
|
||||
getBase64Async,
|
||||
getCharaFilename,
|
||||
initScrollHeight,
|
||||
isFalseBoolean,
|
||||
isTrueBoolean,
|
||||
resetScrollHeight,
|
||||
saveBase64AsFile,
|
||||
stringFormat,
|
||||
} from '../../utils.js';
|
||||
import { getMessageTimeStamp, humanizedDateTime } from '../../RossAscends-mods.js';
|
||||
import { SECRET_KEYS, secret_state, writeSecret } from '../../secrets.js';
|
||||
import { getNovelUnlimitedImageGeneration, getNovelAnlas, loadNovelSubscriptionData } from '../../nai-settings.js';
|
||||
import { getNovelAnlas, getNovelUnlimitedImageGeneration, loadNovelSubscriptionData } from '../../nai-settings.js';
|
||||
import { getMultimodalCaption } from '../shared.js';
|
||||
import { SlashCommandParser } from '../../slash-commands/SlashCommandParser.js';
|
||||
import { SlashCommand } from '../../slash-commands/SlashCommand.js';
|
||||
import { ARGUMENT_TYPE, SlashCommandArgument, SlashCommandNamedArgument } from '../../slash-commands/SlashCommandArgument.js';
|
||||
import {
|
||||
ARGUMENT_TYPE,
|
||||
SlashCommandArgument,
|
||||
SlashCommandNamedArgument,
|
||||
} from '../../slash-commands/SlashCommandArgument.js';
|
||||
import { debounce_timeout } from '../../constants.js';
|
||||
import { SlashCommandEnumValue } from '../../slash-commands/SlashCommandEnumValue.js';
|
||||
import { POPUP_RESULT, POPUP_TYPE, Popup, callGenericPopup } from '../../popup.js';
|
||||
import { callGenericPopup, Popup, POPUP_RESULT, POPUP_TYPE } from '../../popup.js';
|
||||
import { commonEnumProviders } from '../../slash-commands/SlashCommandCommonEnumsProvider.js';
|
||||
import { ToolManager } from '../../tool-calling.js';
|
||||
|
||||
export { MODULE_NAME };
|
||||
|
||||
const MODULE_NAME = 'sd';
|
||||
|
@ -118,13 +143,13 @@ const triggerWords = {
|
|||
};
|
||||
|
||||
const messageTrigger = {
|
||||
activationRegex: /\b(send|mail|imagine|generate|make|create|draw|paint|render|show)\b.{0,10}\b(pic|picture|image|drawing|painting|photo|photograph)\b(?:\s+of)?(?:\s+(?:a|an|the|this|that|those|your)?)?(.+)/i,
|
||||
activationRegex: /\b(send|mail|imagine|generate|make|create|draw|paint|render|show)\b.{0,10}\b(pic|picture|image|drawing|painting|photo|photograph)\b(?:\s+of)?(?:\s+(?:a|an|the|this|that|those|your)?\s+)?(.+)/i,
|
||||
specialCases: {
|
||||
[generationMode.CHARACTER]: ['you', 'yourself'],
|
||||
[generationMode.USER]: ['me', 'myself'],
|
||||
[generationMode.SCENARIO]: ['story', 'scenario', 'whole story'],
|
||||
[generationMode.NOW]: ['last message'],
|
||||
[generationMode.FACE]: ['your face', 'your portrait', 'your selfie'],
|
||||
[generationMode.FACE]: ['face', 'portrait', 'selfie'],
|
||||
[generationMode.BACKGROUND]: ['background', 'scene background', 'scene', 'scenery', 'surroundings', 'environment'],
|
||||
},
|
||||
};
|
||||
|
@ -343,7 +368,7 @@ function processTriggers(chat, _, abort) {
|
|||
return;
|
||||
}
|
||||
|
||||
console.log(`SD: Triggered by "${message}", detected subject: ${subject}"`);
|
||||
console.log(`SD: Triggered by "${message}", detected subject: "${subject}"`);
|
||||
|
||||
outer: for (const [specialMode, triggers] of Object.entries(messageTrigger.specialCases)) {
|
||||
for (const trigger of triggers) {
|
||||
|
@ -359,7 +384,6 @@ function processTriggers(chat, _, abort) {
|
|||
setTimeout(() => generatePicture(initiators.interactive, {}, subject, message), 1);
|
||||
} catch {
|
||||
console.log('SD: Failed to process triggers.');
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -685,7 +709,7 @@ async function refinePrompt(prompt, isNegative) {
|
|||
return prompt;
|
||||
}
|
||||
|
||||
function onChatChanged() {
|
||||
async function onChatChanged() {
|
||||
if (this_chid === undefined || selected_group) {
|
||||
$('#sd_character_prompt_block').hide();
|
||||
return;
|
||||
|
@ -713,7 +737,7 @@ function onChatChanged() {
|
|||
$('#sd_character_prompt').val(characterPrompt);
|
||||
$('#sd_character_negative_prompt').val(negativePrompt);
|
||||
$('#sd_character_prompt_share').prop('checked', hasSharedData);
|
||||
adjustElementScrollHeight();
|
||||
await adjustElementScrollHeight();
|
||||
}
|
||||
|
||||
async function adjustElementScrollHeight() {
|
||||
|
@ -1285,8 +1309,7 @@ async function getAutoRemoteModel() {
|
|||
throw new Error('SD WebUI returned an error.');
|
||||
}
|
||||
|
||||
const data = await result.text();
|
||||
return data;
|
||||
return await result.text();
|
||||
} catch (error) {
|
||||
console.error(error);
|
||||
return null;
|
||||
|
@ -1305,9 +1328,7 @@ async function getDrawthingsRemoteModel() {
|
|||
throw new Error('SD DrawThings API returned an error.');
|
||||
}
|
||||
|
||||
const data = await result.text();
|
||||
|
||||
return data;
|
||||
return await result.text();
|
||||
} catch (error) {
|
||||
console.error(error);
|
||||
return null;
|
||||
|
@ -1330,8 +1351,7 @@ async function getAutoRemoteUpscalers() {
|
|||
throw new Error('SD WebUI returned an error.');
|
||||
}
|
||||
|
||||
const data = await result.json();
|
||||
return data;
|
||||
return await result.json();
|
||||
} catch (error) {
|
||||
console.error(error);
|
||||
return [extension_settings.sd.hr_upscaler];
|
||||
|
@ -1350,8 +1370,7 @@ async function getAutoRemoteSchedulers() {
|
|||
throw new Error('SD WebUI returned an error.');
|
||||
}
|
||||
|
||||
const data = await result.json();
|
||||
return data;
|
||||
return await result.json();
|
||||
} catch (error) {
|
||||
console.error(error);
|
||||
return ['N/A'];
|
||||
|
@ -1370,8 +1389,7 @@ async function getVladRemoteUpscalers() {
|
|||
throw new Error('SD.Next returned an error.');
|
||||
}
|
||||
|
||||
const data = await result.json();
|
||||
return data;
|
||||
return await result.json();
|
||||
} catch (error) {
|
||||
console.error(error);
|
||||
return [extension_settings.sd.hr_upscaler];
|
||||
|
@ -1504,8 +1522,7 @@ async function loadHordeSamplers() {
|
|||
});
|
||||
|
||||
if (result.ok) {
|
||||
const data = await result.json();
|
||||
return data;
|
||||
return await result.json();
|
||||
}
|
||||
|
||||
return [];
|
||||
|
@ -1544,8 +1561,7 @@ async function loadAutoSamplers() {
|
|||
throw new Error('SD WebUI returned an error.');
|
||||
}
|
||||
|
||||
const data = await result.json();
|
||||
return data;
|
||||
return await result.json();
|
||||
} catch (error) {
|
||||
return [];
|
||||
}
|
||||
|
@ -1583,8 +1599,7 @@ async function loadVladSamplers() {
|
|||
throw new Error('SD.Next returned an error.');
|
||||
}
|
||||
|
||||
const data = await result.json();
|
||||
return data;
|
||||
return await result.json();
|
||||
} catch (error) {
|
||||
return [];
|
||||
}
|
||||
|
@ -1723,8 +1738,7 @@ async function loadPollinationsModels() {
|
|||
});
|
||||
|
||||
if (result.ok) {
|
||||
const data = await result.json();
|
||||
return data;
|
||||
return await result.json();
|
||||
}
|
||||
|
||||
return [];
|
||||
|
@ -1742,8 +1756,7 @@ async function loadTogetherAIModels() {
|
|||
});
|
||||
|
||||
if (result.ok) {
|
||||
const data = await result.json();
|
||||
return data;
|
||||
return await result.json();
|
||||
}
|
||||
|
||||
return [];
|
||||
|
@ -1781,8 +1794,7 @@ async function loadNanoGPTModels() {
|
|||
});
|
||||
|
||||
if (result.ok) {
|
||||
const data = await result.json();
|
||||
return data;
|
||||
return await result.json();
|
||||
}
|
||||
|
||||
return [];
|
||||
|
@ -1798,8 +1810,10 @@ async function loadHordeModels() {
|
|||
if (result.ok) {
|
||||
const data = await result.json();
|
||||
data.sort((a, b) => b.count - a.count);
|
||||
const models = data.map(x => ({ value: x.name, text: `${x.name} (ETA: ${x.eta}s, Queue: ${x.queued}, Workers: ${x.count})` }));
|
||||
return models;
|
||||
return data.map(x => ({
|
||||
value: x.name,
|
||||
text: `${x.name} (ETA: ${x.eta}s, Queue: ${x.queued}, Workers: ${x.count})`,
|
||||
}));
|
||||
}
|
||||
|
||||
return [];
|
||||
|
@ -1824,8 +1838,7 @@ async function loadExtrasModels() {
|
|||
|
||||
if (getModelsResult.ok) {
|
||||
const data = await getModelsResult.json();
|
||||
const view_models = data.models.map(x => ({ value: x, text: x }));
|
||||
return view_models;
|
||||
return data.models.map(x => ({ value: x, text: x }));
|
||||
}
|
||||
|
||||
return [];
|
||||
|
@ -1867,8 +1880,7 @@ async function loadAutoModels() {
|
|||
}
|
||||
}
|
||||
|
||||
const data = await result.json();
|
||||
return data;
|
||||
return await result.json();
|
||||
} catch (error) {
|
||||
return [];
|
||||
}
|
||||
|
@ -1953,8 +1965,7 @@ async function loadVladModels() {
|
|||
}
|
||||
}
|
||||
|
||||
const data = await result.json();
|
||||
return data;
|
||||
return await result.json();
|
||||
} catch (error) {
|
||||
return [];
|
||||
}
|
||||
|
@ -2242,7 +2253,7 @@ async function loadComfyWorkflows() {
|
|||
$('#sd_comfy_workflow').append(option);
|
||||
}
|
||||
} catch (error) {
|
||||
return;
|
||||
console.error(`Could not load ComfyUI workflows: ${error.message}`);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -2361,7 +2372,7 @@ function ensureSelectionExists(setting, selector) {
|
|||
* @param {string} trigger Subject trigger word
|
||||
* @param {string} [message] Chat message
|
||||
* @param {function} [callback] Callback function
|
||||
* @returns {Promise<string>} Image path
|
||||
* @returns {Promise<string|undefined>} Image path
|
||||
* @throws {Error} If the prompt or image generation fails
|
||||
*/
|
||||
async function generatePicture(initiator, args, trigger, message, callback) {
|
||||
|
@ -2380,7 +2391,9 @@ async function generatePicture(initiator, args, trigger, message, callback) {
|
|||
|
||||
trigger = trigger.trim();
|
||||
const generationType = getGenerationType(trigger);
|
||||
console.log('Image generation mode', generationType, 'triggered with', trigger);
|
||||
const generationTypeKey = Object.keys(generationMode).find(key => generationMode[key] === generationType);
|
||||
console.log(`Image generation mode ${generationTypeKey} triggered with "${trigger}"`);
|
||||
|
||||
const quietPrompt = getQuietPrompt(generationType, trigger);
|
||||
const context = getContext();
|
||||
|
||||
|
@ -2392,7 +2405,7 @@ async function generatePicture(initiator, args, trigger, message, callback) {
|
|||
const callbackOriginal = callback;
|
||||
callback = async function (prompt, imagePath, generationType, _negativePromptPrefix, _initiator, prefixedPrompt) {
|
||||
const imgUrl = `url("${encodeURI(imagePath)}")`;
|
||||
eventSource.emit(event_types.FORCE_SET_BACKGROUND, { url: imgUrl, path: imagePath });
|
||||
await eventSource.emit(event_types.FORCE_SET_BACKGROUND, { url: imgUrl, path: imagePath });
|
||||
|
||||
if (typeof callbackOriginal === 'function') {
|
||||
await callbackOriginal(prompt, imagePath, generationType, negativePromptPrefix, initiator, prefixedPrompt);
|
||||
|
@ -2454,12 +2467,12 @@ function setTypeSpecificDimensions(generationType) {
|
|||
const aspectRatio = extension_settings.sd.width / extension_settings.sd.height;
|
||||
|
||||
// Face images are always portrait (pun intended)
|
||||
if ((generationType == generationMode.FACE || generationType == generationMode.FACE_MULTIMODAL) && aspectRatio >= 1) {
|
||||
if ((generationType === generationMode.FACE || generationType === generationMode.FACE_MULTIMODAL) && aspectRatio >= 1) {
|
||||
// Round to nearest multiple of 64
|
||||
extension_settings.sd.height = Math.round(extension_settings.sd.width * 1.5 / 64) * 64;
|
||||
}
|
||||
|
||||
if (generationType == generationMode.BACKGROUND) {
|
||||
if (generationType === generationMode.BACKGROUND) {
|
||||
// Background images are always landscape
|
||||
if (aspectRatio <= 1) {
|
||||
// Round to nearest multiple of 64
|
||||
|
@ -2545,7 +2558,7 @@ async function getPrompt(generationType, message, trigger, quietPrompt, combineN
|
|||
*/
|
||||
function generateFreeModePrompt(trigger, combineNegatives) {
|
||||
return trigger
|
||||
.replace(/(?:^char(\s|,)|\{\{charPrefix\}\})/gi, (_, suffix) => {
|
||||
.replace(/^char(\s|,)|{{charPrefix}}/gi, (_, suffix) => {
|
||||
const getLastCharacterKey = () => {
|
||||
if (typeof this_chid !== 'undefined') {
|
||||
return getCharaFilename(this_chid);
|
||||
|
@ -2553,9 +2566,7 @@ function generateFreeModePrompt(trigger, combineNegatives) {
|
|||
const context = getContext();
|
||||
for (let i = context.chat.length - 1; i >= 0; i--) {
|
||||
const message = context.chat[i];
|
||||
if (message.is_user || message.is_system) {
|
||||
continue;
|
||||
} else if (typeof message.original_avatar === 'string') {
|
||||
if (!message.is_user && !message.is_system && typeof message.original_avatar === 'string') {
|
||||
return message.original_avatar.replace(/\.[^/.]+$/, '');
|
||||
}
|
||||
}
|
||||
|
@ -2578,11 +2589,11 @@ function generateFreeModePrompt(trigger, combineNegatives) {
|
|||
async function generateMultimodalPrompt(generationType, quietPrompt) {
|
||||
let avatarUrl;
|
||||
|
||||
if (generationType == generationMode.USER_MULTIMODAL) {
|
||||
if (generationType === generationMode.USER_MULTIMODAL) {
|
||||
avatarUrl = getUserAvatarUrl();
|
||||
}
|
||||
|
||||
if (generationType == generationMode.CHARACTER_MULTIMODAL || generationType === generationMode.FACE_MULTIMODAL) {
|
||||
if (generationType === generationMode.CHARACTER_MULTIMODAL || generationType === generationMode.FACE_MULTIMODAL) {
|
||||
avatarUrl = getCharacterAvatarUrl();
|
||||
}
|
||||
|
||||
|
@ -3190,8 +3201,8 @@ function getNovelParams() {
|
|||
const ratio = Math.sqrt(MAX_PIXELS / (width * height));
|
||||
|
||||
// Calculate new width and height while maintaining aspect ratio.
|
||||
var newWidth = Math.round(width * ratio);
|
||||
var newHeight = Math.round(height * ratio);
|
||||
let newWidth = Math.round(width * ratio);
|
||||
let newHeight = Math.round(height * ratio);
|
||||
|
||||
// Ensure new dimensions are multiples of 64. If not, reduce accordingly.
|
||||
if (newWidth % 64 !== 0) {
|
||||
|
@ -3487,9 +3498,9 @@ async function onComfyOpenWorkflowEditorClick() {
|
|||
const popupResult = popup.show();
|
||||
const checkPlaceholders = () => {
|
||||
workflow = $('#sd_comfy_workflow_editor_workflow').val().toString();
|
||||
$('.sd_comfy_workflow_editor_placeholder_list > li[data-placeholder]').each(function (idx) {
|
||||
$('.sd_comfy_workflow_editor_placeholder_list > li[data-placeholder]').each(function () {
|
||||
const key = this.getAttribute('data-placeholder');
|
||||
const found = workflow.search(`"%${key}%"`) != -1;
|
||||
const found = workflow.search(`"%${key}%"`) !== -1;
|
||||
this.classList[found ? 'remove' : 'add']('sd_comfy_workflow_editor_not_found');
|
||||
});
|
||||
};
|
||||
|
@ -3849,7 +3860,7 @@ async function sdMessageButton(e) {
|
|||
swipes.push(image);
|
||||
|
||||
// If already contains an image and it's not inline - leave it as is
|
||||
message.extra.inline_image = message.extra.image && !message.extra.inline_image ? false : true;
|
||||
message.extra.inline_image = !(message.extra.image && !message.extra.inline_image);
|
||||
message.extra.image = image;
|
||||
message.extra.title = prompt;
|
||||
message.extra.generationType = generationType;
|
||||
|
|
|
@ -7,7 +7,7 @@
|
|||
<div class="inline-drawer-content">
|
||||
<div id="tts_status">
|
||||
</div>
|
||||
<span>Select TTS Provider</span> </br>
|
||||
<span data-i18n="Select TTS Provider">Select TTS Provider</span> </br>
|
||||
<div class="tts_block">
|
||||
<select id="tts_provider" class="flex1">
|
||||
</select>
|
||||
|
@ -16,49 +16,49 @@
|
|||
<div>
|
||||
<label class="checkbox_label" for="tts_enabled">
|
||||
<input type="checkbox" id="tts_enabled" name="tts_enabled">
|
||||
<small>Enabled</small>
|
||||
<small data-i18n="tts_enabled">Enabled</small>
|
||||
</label>
|
||||
<label class="checkbox_label" for="tts_narrate_user">
|
||||
<input type="checkbox" id="tts_narrate_user">
|
||||
<small>Narrate user messages</small>
|
||||
<small data-i18n="Narrate user messages">Narrate user messages</small>
|
||||
</label>
|
||||
<label class="checkbox_label" for="tts_auto_generation">
|
||||
<input type="checkbox" id="tts_auto_generation">
|
||||
<small>Auto Generation</small>
|
||||
<small data-i18n="Auto Generation">Auto Generation</small>
|
||||
</label>
|
||||
<label class="checkbox_label" for="tts_periodic_auto_generation" title="Requires auto generation to be enabled.">
|
||||
<label class="checkbox_label" for="tts_periodic_auto_generation" data-i18n="[title]Requires auto generation to be enabled." title="Requires auto generation to be enabled.">
|
||||
<input type="checkbox" id="tts_periodic_auto_generation">
|
||||
<small>Narrate by paragraphs (when streaming)</small>
|
||||
<small data-i18n="Narrate by paragraphs (when streaming)">Narrate by paragraphs (when streaming)</small>
|
||||
</label>
|
||||
<label class="checkbox_label" for="tts_narrate_quoted">
|
||||
<input type="checkbox" id="tts_narrate_quoted">
|
||||
<small>Only narrate "quotes"</small>
|
||||
<small data-i18n="Only narrate quotes">Only narrate "quotes"</small>
|
||||
</label>
|
||||
<label class="checkbox_label" for="tts_narrate_dialogues">
|
||||
<input type="checkbox" id="tts_narrate_dialogues">
|
||||
<small>Ignore *text, even "quotes", inside asterisks*</small>
|
||||
<small data-i18n="Ignore text, even quotes, inside asterisk">Ignore *text, even "quotes", inside asterisks*</small>
|
||||
</label>
|
||||
<label class="checkbox_label" for="tts_narrate_translated_only">
|
||||
<input type="checkbox" id="tts_narrate_translated_only">
|
||||
<small>Narrate only the translated text</small>
|
||||
<small data-i18n="Narrate only the translated text">Narrate only the translated text</small>
|
||||
</label>
|
||||
<label class="checkbox_label" for="tts_skip_codeblocks">
|
||||
<input type="checkbox" id="tts_skip_codeblocks">
|
||||
<small>Skip codeblocks</small>
|
||||
<small data-i18n="Skip codeblocks">Skip codeblocks</small>
|
||||
</label>
|
||||
<label class="checkbox_label" for="tts_skip_tags">
|
||||
<input type="checkbox" id="tts_skip_tags">
|
||||
<small>Skip <tagged> blocks</small>
|
||||
<small data-i18n="Skip tagged blocks">Skip <tagged> blocks</small>
|
||||
</label>
|
||||
<label class="checkbox_label" for="tts_pass_asterisks">
|
||||
<input type="checkbox" id="tts_pass_asterisks">
|
||||
<small>Pass Asterisks to TTS Engine</small>
|
||||
<small data-i18n="Pass Asterisks to TTS Engine">Pass Asterisks to TTS Engine</small>
|
||||
</label>
|
||||
</div>
|
||||
<div id="playback_rate_block" class="range-block">
|
||||
<hr>
|
||||
<div class="range-block-title justifyLeft" data-i18n="Audio Playback Speed">
|
||||
<small>Audio Playback Speed</small>
|
||||
<small data-i18n="Audio Playback Speed">Audio Playback Speed</small>
|
||||
</div>
|
||||
<div class="range-block-range-and-counter">
|
||||
<div class="range-block-range">
|
||||
|
@ -80,4 +80,4 @@
|
|||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
|
@ -39,6 +39,7 @@ const controls = [
|
|||
{ id: 'instruct_first_input_sequence', property: 'first_input_sequence', isCheckbox: false },
|
||||
{ id: 'instruct_last_input_sequence', property: 'last_input_sequence', isCheckbox: false },
|
||||
{ id: 'instruct_activation_regex', property: 'activation_regex', isCheckbox: false },
|
||||
{ id: 'instruct_derived', property: 'derived', isCheckbox: true },
|
||||
{ id: 'instruct_bind_to_context', property: 'bind_to_context', isCheckbox: true },
|
||||
{ id: 'instruct_skip_examples', property: 'skip_examples', isCheckbox: true },
|
||||
{ id: 'instruct_names_behavior', property: 'names_behavior', isCheckbox: false },
|
||||
|
@ -100,6 +101,7 @@ export async function loadInstructMode(data) {
|
|||
|
||||
$('#instruct_enabled').parent().find('i').toggleClass('toggleEnabled', !!power_user.instruct.enabled);
|
||||
$('#instructSettingsBlock, #InstructSequencesColumn').toggleClass('disabled', !power_user.instruct.enabled);
|
||||
$('#instruct_derived').parent().find('i').toggleClass('toggleEnabled', !!power_user.instruct.derived);
|
||||
$('#instruct_bind_to_context').parent().find('i').toggleClass('toggleEnabled', !!power_user.instruct.bind_to_context);
|
||||
|
||||
controls.forEach(control => {
|
||||
|
@ -146,6 +148,12 @@ export async function loadInstructMode(data) {
|
|||
* @param {boolean} [options.isAuto=false] Is auto-select.
|
||||
*/
|
||||
export function selectContextPreset(preset, { quiet = false, isAuto = false } = {}) {
|
||||
const presetExists = context_presets.some(x => x.name === preset);
|
||||
if (!presetExists) {
|
||||
console.warn(`Context template "${preset}" not found`);
|
||||
return;
|
||||
}
|
||||
|
||||
// If context template is not already selected, select it
|
||||
if (preset !== power_user.context.preset) {
|
||||
$('#context_presets').val(preset).trigger('change');
|
||||
|
@ -163,6 +171,12 @@ export function selectContextPreset(preset, { quiet = false, isAuto = false } =
|
|||
* @param {boolean} [options.isAuto=false] Is auto-select.
|
||||
*/
|
||||
export function selectInstructPreset(preset, { quiet = false, isAuto = false } = {}) {
|
||||
const presetExists = instruct_presets.some(x => x.name === preset);
|
||||
if (!presetExists) {
|
||||
console.warn(`Instruct template "${preset}" not found`);
|
||||
return;
|
||||
}
|
||||
|
||||
// If instruct preset is not already selected, select it
|
||||
if (preset !== power_user.instruct.preset) {
|
||||
$('#instruct_presets').val(preset).trigger('change');
|
||||
|
@ -715,6 +729,10 @@ jQuery(() => {
|
|||
}
|
||||
});
|
||||
|
||||
$('#instruct_derived').on('change', function () {
|
||||
$('#instruct_derived').parent().find('i').toggleClass('toggleEnabled', !!power_user.instruct.derived);
|
||||
});
|
||||
|
||||
$('#instruct_bind_to_context').on('change', function () {
|
||||
$('#instruct_bind_to_context').parent().find('i').toggleClass('toggleEnabled', !!power_user.instruct.bind_to_context);
|
||||
});
|
||||
|
|
|
@ -519,14 +519,14 @@ function convertTokenIdLogprobsToText(input) {
|
|||
|
||||
const tokenizerId = getTokenizerBestMatch(api);
|
||||
|
||||
// Flatten unique token IDs across all logprobs
|
||||
/** @type {any[]} Flatten unique token IDs across all logprobs */
|
||||
const tokenIds = Array.from(new Set(input.flatMap(logprobs =>
|
||||
logprobs.topLogprobs.map(([token]) => token).concat(logprobs.token),
|
||||
)));
|
||||
|
||||
// Submit token IDs to tokenizer to get token text, then build ID->text map
|
||||
// noinspection JSCheckFunctionSignatures - mutates input in-place
|
||||
const { chunks } = decodeTextTokens(tokenizerId, tokenIds.map(parseInt));
|
||||
const { chunks } = decodeTextTokens(tokenizerId, tokenIds);
|
||||
const tokenIdText = new Map(tokenIds.map((id, i) => [id, chunks[i]]));
|
||||
|
||||
// Fixup logprobs data with token text
|
||||
|
|
|
@ -752,7 +752,8 @@ async function populateChatHistory(messages, prompts, chatCompletion, type = nul
|
|||
if (type === 'continue' && oai_settings.continue_prefill && chatPrompt === firstNonInjected) {
|
||||
// in case we are using continue_prefill and the latest message is an assistant message, we want to prepend the users assistant prefill on the message
|
||||
if (chatPrompt.role === 'assistant') {
|
||||
const continueMessage = await Message.createAsync(chatMessage.role, substituteParams(oai_settings.assistant_prefill + '\n\n') + chatMessage.content, chatMessage.identifier);
|
||||
const messageContent = [substituteParams(oai_settings.assistant_prefill), chatMessage.content].filter(x => x).join('\n\n');
|
||||
const continueMessage = await Message.createAsync(chatMessage.role, messageContent, chatMessage.identifier);
|
||||
const collection = new MessageCollection('continuePrefill', continueMessage);
|
||||
chatCompletion.add(collection, -1);
|
||||
continue;
|
||||
|
@ -4083,7 +4084,7 @@ async function onModelChange() {
|
|||
if (oai_settings.chat_completion_source == chat_completion_sources.MAKERSUITE) {
|
||||
if (oai_settings.max_context_unlocked) {
|
||||
$('#openai_max_context').attr('max', max_2mil);
|
||||
} else if (value.includes('gemini-exp-1114')){
|
||||
} else if (value.includes('gemini-exp-1114') || value.includes('gemini-exp-1121')) {
|
||||
$('#openai_max_context').attr('max', max_32k);
|
||||
} else if (value.includes('gemini-1.5-pro')) {
|
||||
$('#openai_max_context').attr('max', max_2mil);
|
||||
|
@ -4223,10 +4224,10 @@ async function onModelChange() {
|
|||
else if (['command-light-nightly', 'command-nightly'].includes(oai_settings.cohere_model)) {
|
||||
$('#openai_max_context').attr('max', max_8k);
|
||||
}
|
||||
else if (oai_settings.cohere_model.includes('command-r')) {
|
||||
else if (oai_settings.cohere_model.includes('command-r') || ['c4ai-aya-expanse-32b'].includes(oai_settings.cohere_model)) {
|
||||
$('#openai_max_context').attr('max', max_128k);
|
||||
}
|
||||
else if (['c4ai-aya-23'].includes(oai_settings.cohere_model)) {
|
||||
else if (['c4ai-aya-23', 'c4ai-aya-expanse-8b'].includes(oai_settings.cohere_model)) {
|
||||
$('#openai_max_context').attr('max', max_8k);
|
||||
}
|
||||
else {
|
||||
|
@ -4772,6 +4773,7 @@ export function isImageInliningSupported() {
|
|||
'gemini-1.5-flash-8b-exp-0827',
|
||||
'gemini-1.5-flash-8b-exp-0924',
|
||||
'gemini-exp-1114',
|
||||
'gemini-exp-1121',
|
||||
'gemini-1.0-pro-vision-latest',
|
||||
'gemini-1.5-pro',
|
||||
'gemini-1.5-pro-latest',
|
||||
|
|
|
@ -226,6 +226,7 @@ let power_user = {
|
|||
macro: true,
|
||||
names_behavior: names_behavior_types.FORCE,
|
||||
activation_regex: '',
|
||||
derived: false,
|
||||
bind_to_context: false,
|
||||
user_alignment_message: '',
|
||||
system_same_as_user: false,
|
||||
|
@ -243,6 +244,8 @@ let power_user = {
|
|||
names_as_stop_strings: true,
|
||||
},
|
||||
|
||||
context_derived: false,
|
||||
|
||||
sysprompt: {
|
||||
enabled: true,
|
||||
name: 'Neutral - Chat',
|
||||
|
@ -1472,6 +1475,7 @@ async function loadPowerUserSettings(settings, data) {
|
|||
$('#encode_tags').prop('checked', power_user.encode_tags);
|
||||
$('#example_messages_behavior').val(getExampleMessagesBehavior());
|
||||
$(`#example_messages_behavior option[value="${getExampleMessagesBehavior()}"]`).prop('selected', true);
|
||||
$('#context_derived').parent().find('i').toggleClass('toggleEnabled', !!power_user.context_derived);
|
||||
|
||||
$('#console_log_prompts').prop('checked', power_user.console_log_prompts);
|
||||
$('#request_token_probabilities').prop('checked', power_user.request_token_probabilities);
|
||||
|
@ -3057,6 +3061,16 @@ $(document).ready(() => {
|
|||
saveSettingsDebounced();
|
||||
});
|
||||
|
||||
$('#context_derived').on('input', function () {
|
||||
const value = !!$(this).prop('checked');
|
||||
power_user.context_derived = value;
|
||||
saveSettingsDebounced();
|
||||
});
|
||||
|
||||
$('#context_derived').on('change', function () {
|
||||
$('#context_derived').parent().find('i').toggleClass('toggleEnabled', !!power_user.context_derived);
|
||||
});
|
||||
|
||||
$('#always-force-name2-checkbox').change(function () {
|
||||
power_user.always_force_name2 = !!$(this).prop('checked');
|
||||
saveSettingsDebounced();
|
||||
|
|
|
@ -584,6 +584,7 @@ class PresetManager {
|
|||
'openrouter_providers',
|
||||
'openrouter_allow_fallbacks',
|
||||
'tabby_model',
|
||||
'derived',
|
||||
];
|
||||
const settings = Object.assign({}, getSettingsByApiId(this.apiId));
|
||||
|
||||
|
|
|
@ -1827,6 +1827,30 @@ export function initDefaultSlashCommands() {
|
|||
</div>
|
||||
`,
|
||||
}));
|
||||
SlashCommandParser.addCommandObject(SlashCommand.fromProps({
|
||||
name: 'upper',
|
||||
aliases: ['uppercase', 'to-upper'],
|
||||
callback: (_, text) => typeof text === 'string' ? text.toUpperCase() : '',
|
||||
returns: 'uppercase string',
|
||||
unnamedArgumentList: [
|
||||
new SlashCommandArgument(
|
||||
'string', [ARGUMENT_TYPE.STRING], true, false,
|
||||
),
|
||||
],
|
||||
helpString: 'Converts the provided string to uppercase.',
|
||||
}));
|
||||
SlashCommandParser.addCommandObject(SlashCommand.fromProps({
|
||||
name: 'lower',
|
||||
aliases: ['lowercase', 'to-lower'],
|
||||
callback: (_, text) => typeof text === 'string' ? text.toLowerCase() : '',
|
||||
returns: 'lowercase string',
|
||||
unnamedArgumentList: [
|
||||
new SlashCommandArgument(
|
||||
'string', [ARGUMENT_TYPE.STRING], true, false,
|
||||
),
|
||||
],
|
||||
helpString: 'Converts the provided string to lowercase.',
|
||||
}));
|
||||
|
||||
registerVariableCommands();
|
||||
}
|
||||
|
|
|
@ -280,7 +280,7 @@ async function sendMakerSuiteRequest(request, response) {
|
|||
delete generationConfig.stopSequences;
|
||||
}
|
||||
|
||||
const should_use_system_prompt = (model.includes('gemini-1.5-flash') || model.includes('gemini-1.5-pro') || model.includes('gemini-exp-1114')) && request.body.use_makersuite_sysprompt;
|
||||
const should_use_system_prompt = (model.includes('gemini-1.5-flash') || model.includes('gemini-1.5-pro') || model.includes('gemini-exp-1114') || model.includes('gemini-exp-1121')) && request.body.use_makersuite_sysprompt;
|
||||
const prompt = convertGooglePrompt(request.body.messages, model, should_use_system_prompt, request.body.char_name, request.body.user_name);
|
||||
let body = {
|
||||
contents: prompt.contents,
|
||||
|
|
|
@ -16,6 +16,7 @@ import {
|
|||
} from '../../constants.js';
|
||||
import { forwardFetchResponse, trimV1, getConfigValue } from '../../util.js';
|
||||
import { setAdditionalHeaders } from '../../additional-headers.js';
|
||||
import { createHash } from 'node:crypto';
|
||||
|
||||
export const router = express.Router();
|
||||
|
||||
|
@ -227,6 +228,40 @@ router.post('/status', jsonParser, async function (request, response) {
|
|||
}
|
||||
});
|
||||
|
||||
router.post('/props', jsonParser, async function (request, response) {
|
||||
if (!request.body.api_server) return response.sendStatus(400);
|
||||
|
||||
try {
|
||||
const baseUrl = trimV1(request.body.api_server);
|
||||
const args = {
|
||||
headers: {},
|
||||
};
|
||||
|
||||
setAdditionalHeaders(request, args, baseUrl);
|
||||
|
||||
const apiType = request.body.api_type;
|
||||
const propsUrl = baseUrl + '/props';
|
||||
const propsReply = await fetch(propsUrl, args);
|
||||
|
||||
if (!propsReply.ok) {
|
||||
return response.status(400);
|
||||
}
|
||||
|
||||
/** @type {any} */
|
||||
const props = await propsReply.json();
|
||||
// TEMPORARY: llama.cpp's /props endpoint has a bug which replaces the last newline with a \0
|
||||
if (apiType === TEXTGEN_TYPES.LLAMACPP && props['chat_template'].endsWith('\u0000')) {
|
||||
props['chat_template'] = props['chat_template'].slice(0, -1) + '\n';
|
||||
}
|
||||
props['chat_template_hash'] = createHash('sha256').update(props['chat_template']).digest('hex');
|
||||
console.log(`Model properties: ${JSON.stringify(props)}`);
|
||||
return response.send(props);
|
||||
} catch (error) {
|
||||
console.error(error);
|
||||
return response.status(500);
|
||||
}
|
||||
});
|
||||
|
||||
router.post('/generate', jsonParser, async function (request, response) {
|
||||
if (!request.body) return response.sendStatus(400);
|
||||
|
||||
|
|
|
@ -347,6 +347,7 @@ export function convertGooglePrompt(messages, model, useSysPrompt = false, charN
|
|||
'gemini-1.5-flash-8b-exp-0827',
|
||||
'gemini-1.5-flash-8b-exp-0924',
|
||||
'gemini-exp-1114',
|
||||
'gemini-exp-1121',
|
||||
'gemini-1.5-pro',
|
||||
'gemini-1.5-pro-latest',
|
||||
'gemini-1.5-pro-001',
|
||||
|
@ -628,11 +629,30 @@ export function convertMistralMessages(messages, charName = '', userName = '') {
|
|||
export function mergeMessages(messages, charName, userName, strict) {
|
||||
let mergedMessages = [];
|
||||
|
||||
/** @type {Map<string,object>} */
|
||||
const contentTokens = new Map();
|
||||
|
||||
// Remove names from the messages
|
||||
messages.forEach((message) => {
|
||||
if (!message.content) {
|
||||
message.content = '';
|
||||
}
|
||||
// Flatten contents and replace image URLs with random tokens
|
||||
if (Array.isArray(message.content)) {
|
||||
const text = message.content.map((content) => {
|
||||
if (content.type === 'text') {
|
||||
return content.text;
|
||||
}
|
||||
// Could be extended with other non-text types
|
||||
if (content.type === 'image_url') {
|
||||
const token = crypto.randomBytes(32).toString('base64');
|
||||
contentTokens.set(token, content);
|
||||
return token;
|
||||
}
|
||||
return '';
|
||||
}).join('\n\n');
|
||||
message.content = text;
|
||||
}
|
||||
if (message.role === 'system' && message.name === 'example_assistant') {
|
||||
if (charName && !message.content.startsWith(`${charName}: `)) {
|
||||
message.content = `${charName}: ${message.content}`;
|
||||
|
@ -673,6 +693,32 @@ export function mergeMessages(messages, charName, userName, strict) {
|
|||
});
|
||||
}
|
||||
|
||||
// Check for content tokens and replace them with the actual content objects
|
||||
if (contentTokens.size > 0) {
|
||||
mergedMessages.forEach((message) => {
|
||||
const hasValidToken = Array.from(contentTokens.keys()).some(token => message.content.includes(token));
|
||||
|
||||
if (hasValidToken) {
|
||||
const splitContent = message.content.split('\n\n');
|
||||
const mergedContent = [];
|
||||
|
||||
splitContent.forEach((content) => {
|
||||
if (contentTokens.has(content)) {
|
||||
mergedContent.push(contentTokens.get(content));
|
||||
} else {
|
||||
if (mergedContent.length > 0 && mergedContent[mergedContent.length - 1].type === 'text') {
|
||||
mergedContent[mergedContent.length - 1].text += `\n\n${content}`;
|
||||
} else {
|
||||
mergedContent.push({ type: 'text', text: content });
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
message.content = mergedContent;
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
if (strict) {
|
||||
for (let i = 0; i < mergedMessages.length; i++) {
|
||||
// Force mid-prompt system messages to be user messages
|
||||
|
|
Loading…
Reference in New Issue