Merge branch 'staging' into if-command-default-arguments

This commit is contained in:
Wolfsblvt
2024-09-25 20:46:40 +02:00
151 changed files with 1655 additions and 905 deletions

View File

@@ -1,12 +1,13 @@
'use strict';
import { callPopup, event_types, eventSource, is_send_press, main_api, substituteParams } from '../script.js';
import { event_types, eventSource, is_send_press, main_api, substituteParams } from '../script.js';
import { is_group_generating } from './group-chats.js';
import { Message, TokenHandler } from './openai.js';
import { power_user } from './power-user.js';
import { debounce, waitUntilCondition, escapeHtml } from './utils.js';
import { debounce_timeout } from './constants.js';
import { renderTemplateAsync } from './templates.js';
import { Popup } from './popup.js';
function debouncePromise(func, delay) {
let timeoutId;
@@ -453,21 +454,24 @@ class PromptManager {
};
// Delete selected prompt from list form and close edit form
this.handleDeletePrompt = (event) => {
const promptID = document.getElementById(this.configuration.prefix + 'prompt_manager_footer_append_prompt').value;
const prompt = this.getPromptById(promptID);
this.handleDeletePrompt = async (event) => {
Popup.show.confirm('Are you sure you want to delete this prompt?', null).then((userChoice) => {
if (!userChoice) return;
const promptID = document.getElementById(this.configuration.prefix + 'prompt_manager_footer_append_prompt').value;
const prompt = this.getPromptById(promptID);
if (prompt && true === this.isPromptDeletionAllowed(prompt)) {
const promptIndex = this.getPromptIndexById(promptID);
this.serviceSettings.prompts.splice(Number(promptIndex), 1);
if (prompt && true === this.isPromptDeletionAllowed(prompt)) {
const promptIndex = this.getPromptIndexById(promptID);
this.serviceSettings.prompts.splice(Number(promptIndex), 1);
this.log('Deleted prompt: ' + prompt.identifier);
this.log('Deleted prompt: ' + prompt.identifier);
this.hidePopup();
this.clearEditForm();
this.render();
this.saveServiceSettings();
}
this.hidePopup();
this.clearEditForm();
this.render();
this.saveServiceSettings();
}
});
};
// Create new prompt, then save it to settings and close form.
@@ -527,9 +531,9 @@ class PromptManager {
// Import prompts for the selected character
this.handleImport = () => {
callPopup('Existing prompts with the same ID will be overridden. Do you want to proceed?', 'confirm')
Popup.show.confirm('Existing prompts with the same ID will be overridden. Do you want to proceed?', null)
.then(userChoice => {
if (false === userChoice) return;
if (!userChoice) return;
const fileOpener = document.createElement('input');
fileOpener.type = 'file';
@@ -563,9 +567,9 @@ class PromptManager {
// Restore default state of a characters prompt order
this.handleCharacterReset = () => {
callPopup('This will reset the prompt order for this character. You will not lose any prompts.', 'confirm')
Popup.show.confirm('This will reset the prompt order for this character. You will not lose any prompts.', null)
.then(userChoice => {
if (false === userChoice) return;
if (!userChoice) return;
this.removePromptOrderForCharacter(this.activeCharacter);
this.addPromptOrderForCharacter(this.activeCharacter, promptManagerDefaultPromptOrder);
@@ -1536,16 +1540,17 @@ class PromptManager {
}
const encodedName = escapeHtml(prompt.name);
const isMarkerPrompt = prompt.marker && prompt.injection_position !== INJECTION_POSITION.ABSOLUTE;
const isSystemPrompt = !prompt.marker && prompt.system_prompt && prompt.injection_position !== INJECTION_POSITION.ABSOLUTE && !prompt.forbid_overrides;
const isImportantPrompt = !prompt.marker && prompt.system_prompt && prompt.injection_position !== INJECTION_POSITION.ABSOLUTE && prompt.forbid_overrides;
const isImportantPrompt = !prompt.marker && prompt.system_prompt && prompt.injection_position !== INJECTION_POSITION.ABSOLUTE && prompt.forbid_overrides;
const isUserPrompt = !prompt.marker && !prompt.system_prompt && prompt.injection_position !== INJECTION_POSITION.ABSOLUTE;
const isInjectionPrompt = !prompt.marker && prompt.injection_position === INJECTION_POSITION.ABSOLUTE;
const isInjectionPrompt = prompt.injection_position === INJECTION_POSITION.ABSOLUTE;
const isOverriddenPrompt = Array.isArray(this.overriddenPrompts) && this.overriddenPrompts.includes(prompt.identifier);
const importantClass = isImportantPrompt ? `${prefix}prompt_manager_important` : '';
listItemHtml += `
<li class="${prefix}prompt_manager_prompt ${draggableClass} ${enabledClass} ${markerClass} ${importantClass}" data-pm-identifier="${prompt.identifier}">
<span class="${prefix}prompt_manager_prompt_name" data-pm-name="${encodedName}">
${prompt.marker ? '<span class="fa-fw fa-solid fa-thumb-tack" title="Marker"></span>' : ''}
${isMarkerPrompt ? '<span class="fa-fw fa-solid fa-thumb-tack" title="Marker"></span>' : ''}
${isSystemPrompt ? '<span class="fa-fw fa-solid fa-square-poll-horizontal" title="Global Prompt"></span>' : ''}
${isImportantPrompt ? '<span class="fa-fw fa-solid fa-star" title="Important Prompt"></span>' : ''}
${isUserPrompt ? '<span class="fa-fw fa-solid fa-user" title="User Prompt"></span>' : ''}
@@ -1745,7 +1750,7 @@ class PromptManager {
*/
showPopup(area = 'edit') {
const areaElement = document.getElementById(this.configuration.prefix + 'prompt_manager_popup_' + area);
areaElement.style.display = 'block';
areaElement.style.display = 'flex';
$('#' + this.configuration.prefix + 'prompt_manager_popup').first()
.slideDown(200, 'swing')

View File

@@ -313,7 +313,7 @@ async function onDeleteBackgroundClick(e) {
}
}
const autoBgPrompt = 'Pause your roleplay and choose a location ONLY from the provided list that is the most suitable for the current scene. Do not output any other text:\n{0}';
const autoBgPrompt = 'Ignore previous instructions and choose a location ONLY from the provided list that is the most suitable for the current scene. Do not output any other text:\n{0}';
async function autoBackgroundCommand() {
/** @type {HTMLElement[]} */

View File

@@ -49,11 +49,14 @@
<option data-type="google" value="gemini-1.5-flash">gemini-1.5-flash</option>
<option data-type="google" value="gemini-1.5-flash-latest">gemini-1.5-flash-latest</option>
<option data-type="google" value="gemini-1.5-flash-001">gemini-1.5-flash-001</option>
<option data-type="google" value="gemini-1.5-flash-002">gemini-1.5-flash-002</option>
<option data-type="google" value="gemini-1.5-flash-exp-0827">gemini-1.5-flash-exp-0827</option>
<option data-type="google" value="gemini-1.5-flash-8b-exp-0827">gemini-1.5-flash-8b-exp-0827</option>
<option data-type="google" value="gemini-1.5-flash-8b-exp-0924">gemini-1.5-flash-8b-exp-0924</option>
<option data-type="google" value="gemini-1.5-pro">gemini-1.5-pro</option>
<option data-type="google" value="gemini-1.5-pro-latest">gemini-1.5-pro-latest</option>
<option data-type="google" value="gemini-1.5-pro-001">gemini-1.5-pro-001</option>
<option data-type="google" value="gemini-1.5-pro-002">gemini-1.5-pro-002</option>
<option data-type="google" value="gemini-1.5-pro-exp-0801">gemini-1.5-pro-exp-0801</option>
<option data-type="google" value="gemini-1.5-pro-exp-0827">gemini-1.5-pro-exp-0827</option>
<option data-type="google" value="gemini-pro-vision">gemini-pro-vision</option>

View File

@@ -34,6 +34,8 @@ const TC_COMMANDS = [
'preset',
'api-url',
'model',
'sysprompt',
'sysprompt-state',
'instruct',
'context',
'instruct-state',
@@ -46,6 +48,8 @@ const FANCY_NAMES = {
'preset': 'Settings Preset',
'model': 'Model',
'proxy': 'Proxy Preset',
'sysprompt-state': 'Use System Prompt',
'sysprompt': 'System Prompt Name',
'instruct-state': 'Instruct Mode',
'instruct': 'Instruct Template',
'context': 'Context Template',
@@ -181,6 +185,11 @@ async function readProfileFromCommands(mode, profile, cleanUp = false) {
}
if (cleanUp) {
for (const command of commands) {
if (command.endsWith('-state') && profile[command] === 'false') {
delete profile[command.replace('-state', '')];
}
}
for (const command of opposingCommands) {
if (commands.includes(command)) {
continue;
@@ -326,7 +335,7 @@ function renderConnectionProfiles(profiles) {
noneOption.selected = !extension_settings.connectionManager.selectedProfile;
profiles.appendChild(noneOption);
for (const profile of extension_settings.connectionManager.profiles) {
for (const profile of extension_settings.connectionManager.profiles.sort((a, b) => a.name.localeCompare(b.name))) {
const option = document.createElement('option');
option.value = profile.id;
option.textContent = profile.name;
@@ -463,6 +472,31 @@ async function renderDetailsContent(detailsContent) {
await eventSource.emit(event_types.CONNECTION_PROFILE_LOADED, NONE);
});
const renameButton = document.getElementById('rename_connection_profile');
renameButton.addEventListener('click', async () => {
const selectedProfile = extension_settings.connectionManager.selectedProfile;
const profile = extension_settings.connectionManager.profiles.find(p => p.id === selectedProfile);
if (!profile) {
console.log('No profile selected');
return;
}
const newName = await Popup.show.input('Enter a new name', null, profile.name, { rows: 2 });
if (!newName) {
return;
}
if (extension_settings.connectionManager.profiles.some(p => p.name === newName)) {
toastr.error('A profile with the same name already exists.');
return;
}
profile.name = newName;
saveSettingsDebounced();
renderConnectionProfiles(profiles);
toastr.success('Connection profile renamed', '', { timeOut: 1500 });
});
/** @type {HTMLElement} */
const viewDetails = document.getElementById('view_connection_profile');
const detailsContent = document.getElementById('connection_profile_details_content');

View File

@@ -13,6 +13,7 @@
<i id="view_connection_profile" class="menu_button fa-solid fa-info-circle" title="View connection profile details" data-i18n="[title]View connection profile details"></i>
<i id="create_connection_profile" class="menu_button fa-solid fa-file-circle-plus" title="Create a new connection profile" data-i18n="[title]Create a new connection profile"></i>
<i id="update_connection_profile" class="menu_button fa-solid fa-save" title="Update a connection profile" data-i18n="[title]Update a connection profile"></i>
<i id="rename_connection_profile" class="menu_button fa-solid fa-pencil" title="Rename a connection profile" data-i18n="[title]Rename a connection profile"></i>
<i id="reload_connection_profile" class="menu_button fa-solid fa-recycle" title="Reload a connection profile" data-i18n="[title]Reload a connection profile"></i>
<i id="delete_connection_profile" class="menu_button fa-solid fa-trash-can" title="Delete a connection profile" data-i18n="[title]Delete a connection profile"></i>
</div>

View File

@@ -20,7 +20,7 @@ const STREAMING_UPDATE_INTERVAL = 10000;
const TALKINGCHECK_UPDATE_INTERVAL = 500;
const DEFAULT_FALLBACK_EXPRESSION = 'joy';
const FUNCTION_NAME = 'set_emotion';
const DEFAULT_LLM_PROMPT = 'Pause your roleplay. Classify the emotion of the last message. Output just one word, e.g. "joy" or "anger". Choose only one of the following labels: {{labels}}';
const DEFAULT_LLM_PROMPT = 'Ignore previous instructions. Classify the emotion of the last message. Output just one word, e.g. "joy" or "anger". Choose only one of the following labels: {{labels}}';
const DEFAULT_EXPRESSIONS = [
'talkinghead',
'admiration',
@@ -1161,7 +1161,7 @@ export async function getExpressionLabel(text, expressionsApi = extension_settin
}
const expressionsList = await getExpressionsList();
const prompt = substituteParamsExtended(String(customPrompt), { labels: expressionsList }) || await getLlmPrompt(expressionsList);
const prompt = substituteParamsExtended(customPrompt, { labels: expressionsList }) || await getLlmPrompt(expressionsList);
let functionResult = null;
eventSource.once(event_types.TEXT_COMPLETION_SETTINGS_READY, onTextGenSettingsReady);
eventSource.once(event_types.LLM_FUNCTION_TOOL_REGISTER, onFunctionToolRegister);

View File

@@ -102,7 +102,7 @@ const prompt_builders = {
RAW_NON_BLOCKING: 2,
};
const defaultPrompt = '[Pause your roleplay. Summarize the most important facts and events in the story so far. If a summary already exists in your memory, use that as a base and expand with new facts. Limit the summary to {{words}} words or less. Your response should include nothing but the summary.]';
const defaultPrompt = 'Ignore previous instructions. Summarize the most important facts and events in the story so far. If a summary already exists in your memory, use that as a base and expand with new facts. Limit the summary to {{words}} words or less. Your response should include nothing but the summary.';
const defaultTemplate = '[Summary: {{summary}}]';
const defaultSettings = {

View File

@@ -125,15 +125,14 @@ const messageTrigger = {
const promptTemplates = {
// Not really a prompt template, rather an outcome message template
[generationMode.MESSAGE]: '[{{char}} sends a picture that contains: {{prompt}}].',
/*OLD: [generationMode.CHARACTER]: "Pause your roleplay and provide comma-delimited list of phrases and keywords which describe {{char}}'s physical appearance and clothing. Ignore {{char}}'s personality traits, and chat history when crafting this description. End your response once the comma-delimited list is complete. Do not roleplay when writing this description, and do not attempt to continue the story.", */
[generationMode.CHARACTER]: 'In the next response I want you to provide only a detailed comma-delimited list of keywords and phrases which describe {{char}}. The list must include all of the following items in this order: name, species and race, gender, age, clothing, occupation, physical features and appearances. Do not include descriptions of non-visual qualities such as personality, movements, scents, mental traits, or anything which could not be seen in a still photograph. Do not write in full sentences. Prefix your description with the phrase \'full body portrait,\'',
//face-specific prompt
[generationMode.FACE]: 'In the next response I want you to provide only a detailed comma-delimited list of keywords and phrases which describe {{char}}. The list must include all of the following items in this order: name, species and race, gender, age, facial features and expressions, occupation, hair and hair accessories (if any), what they are wearing on their upper body (if anything). Do not describe anything below their neck. Do not include descriptions of non-visual qualities such as personality, movements, scents, mental traits, or anything which could not be seen in a still photograph. Do not write in full sentences. Prefix your description with the phrase \'close up facial portrait,\'',
//prompt for only the last message
[generationMode.USER]: 'Pause your roleplay and provide a detailed description of {{user}}\'s physical appearance from the perspective of {{char}} in the form of a comma-delimited list of keywords and phrases. The list must include all of the following items in this order: name, species and race, gender, age, clothing, occupation, physical features and appearances. Do not include descriptions of non-visual qualities such as personality, movements, scents, mental traits, or anything which could not be seen in a still photograph. Do not write in full sentences. Prefix your description with the phrase \'full body portrait,\'. Ignore the rest of the story when crafting this description. Do not roleplay as {{char}} when writing this description, and do not attempt to continue the story.',
[generationMode.SCENARIO]: 'Pause your roleplay and provide a detailed description for all of the following: a brief recap of recent events in the story, {{char}}\'s appearance, and {{char}}\'s surroundings. Do not roleplay while writing this description.',
[generationMode.USER]: 'Ignore previous instructions and provide a detailed description of {{user}}\'s physical appearance from the perspective of {{char}} in the form of a comma-delimited list of keywords and phrases. The list must include all of the following items in this order: name, species and race, gender, age, clothing, occupation, physical features and appearances. Do not include descriptions of non-visual qualities such as personality, movements, scents, mental traits, or anything which could not be seen in a still photograph. Do not write in full sentences. Prefix your description with the phrase \'full body portrait,\'. Ignore the rest of the story when crafting this description. Do not reply as {{char}} when writing this description, and do not attempt to continue the story.',
[generationMode.SCENARIO]: 'Ignore previous instructions and provide a detailed description for all of the following: a brief recap of recent events in the story, {{char}}\'s appearance, and {{char}}\'s surroundings. Do not reply as {{char}} while writing this description.',
[generationMode.NOW]: `Pause your roleplay. Your next response must be formatted as a single comma-delimited list of concise keywords. The list will describe of the visual details included in the last chat message.
[generationMode.NOW]: `Ignore previous instructions. Your next response must be formatted as a single comma-delimited list of concise keywords. The list will describe of the visual details included in the last chat message.
Only mention characters by using pronouns ('he','his','she','her','it','its') or neutral nouns ('male', 'the man', 'female', 'the woman').
@@ -157,12 +156,12 @@ const promptTemplates = {
A correctly formatted example response would be:
'(location),(character list by gender),(primary action), (relative character position) POV, (character 1's description and actions), (character 2's description and actions)'`,
[generationMode.RAW_LAST]: 'Pause your roleplay and provide ONLY the last chat message string back to me verbatim. Do not write anything after the string. Do not roleplay at all in your response. Do not continue the roleplay story.',
[generationMode.BACKGROUND]: 'Pause your roleplay and provide a detailed description of {{char}}\'s surroundings in the form of a comma-delimited list of keywords and phrases. The list must include all of the following items in this order: location, time of day, weather, lighting, and any other relevant details. Do not include descriptions of characters and non-visual qualities such as names, personality, movements, scents, mental traits, or anything which could not be seen in a still photograph. Do not write in full sentences. Prefix your description with the phrase \'background,\'. Ignore the rest of the story when crafting this description. Do not roleplay as {{user}} when writing this description, and do not attempt to continue the story.',
[generationMode.RAW_LAST]: 'Ignore previous instructions and provide ONLY the last chat message string back to me verbatim. Do not write anything after the string. Do not reply as {{char}} when writing this description, and do not attempt to continue the story.',
[generationMode.BACKGROUND]: 'Ignore previous instructions and provide a detailed description of {{char}}\'s surroundings in the form of a comma-delimited list of keywords and phrases. The list must include all of the following items in this order: location, time of day, weather, lighting, and any other relevant details. Do not include descriptions of characters and non-visual qualities such as names, personality, movements, scents, mental traits, or anything which could not be seen in a still photograph. Do not write in full sentences. Prefix your description with the phrase \'background,\'. Ignore the rest of the story when crafting this description. Do not reply as {{char}} when writing this description, and do not attempt to continue the story.',
[generationMode.FACE_MULTIMODAL]: 'Provide an exhaustive comma-separated list of tags describing the appearance of the character on this image in great detail. Start with "close-up portrait".',
[generationMode.CHARACTER_MULTIMODAL]: 'Provide an exhaustive comma-separated list of tags describing the appearance of the character on this image in great detail. Start with "full body portrait".',
[generationMode.USER_MULTIMODAL]: 'Provide an exhaustive comma-separated list of tags describing the appearance of the character on this image in great detail. Start with "full body portrait".',
[generationMode.FREE_EXTENDED]: 'Pause your roleplay and provide an exhaustive comma-separated list of tags describing the appearance of "{0}" in great detail. Start with {{charPrefix}} (sic) if the subject is associated with {{char}}.',
[generationMode.FREE_EXTENDED]: 'Ignore previous instructions and provide an exhaustive comma-separated list of tags describing the appearance of "{0}" in great detail. Start with {{charPrefix}} (sic) if the subject is associated with {{char}}.',
};
const defaultPrefix = 'best quality, absurdres, aesthetic,';
@@ -3813,7 +3812,10 @@ jQuery(async () => {
],
helpString: `
<div>
Requests to generate an image and posts it to chat (unless <code>quiet=true</code> argument is specified).</code>.
Requests to generate an image and posts it to chat (unless <code>quiet=true</code> argument is specified).
</div>
<div>
Supported arguments: <code>${Object.values(triggerWords).flat().join(', ')}</code>.
</div>
<div>
Anything else would trigger a "free mode" to make generate whatever you prompted. Example: <code>/imagine apple tree</code> would generate a picture of an apple tree. Returns a link to the generated image.

View File

@@ -56,7 +56,7 @@ const settings = {
summarize: false,
summarize_sent: false,
summary_source: 'main',
summary_prompt: 'Pause your roleplay. Summarize the most important parts of the message. Limit yourself to 250 words or less. Your response should include nothing but the summary.',
summary_prompt: 'Ignore previous instructions. Summarize the most important parts of the message. Limit yourself to 250 words or less. Your response should include nothing but the summary.',
force_chunk_delimiter: '',
// For chats
@@ -999,25 +999,6 @@ async function purgeAllVectorIndexes() {
}
}
async function isModelScopesEnabled() {
try {
const response = await fetch('/api/vector/scopes-enabled', {
method: 'GET',
headers: getVectorHeaders(),
});
if (!response.ok) {
return false;
}
const data = await response.json();
return data?.enabled ?? false;
} catch (error) {
console.error('Vectors: Failed to check model scopes', error);
return false;
}
}
function toggleSettings() {
$('#vectors_files_settings').toggle(!!settings.enabled_files);
$('#vectors_chats_settings').toggle(!!settings.enabled_chats);
@@ -1282,7 +1263,6 @@ jQuery(async () => {
}
Object.assign(settings, extension_settings.vectors);
const scopesEnabled = await isModelScopesEnabled();
// Migrate from TensorFlow to Transformers
settings.source = settings.source !== 'local' ? settings.source : 'transformers';
@@ -1294,7 +1274,6 @@ jQuery(async () => {
saveSettingsDebounced();
toggleSettings();
});
$('#vectors_modelWarning').hide();
$('#vectors_enabled_files').prop('checked', settings.enabled_files).on('input', () => {
settings.enabled_files = $('#vectors_enabled_files').prop('checked');
Object.assign(extension_settings.vectors, settings);
@@ -1334,31 +1313,26 @@ jQuery(async () => {
saveSettingsDebounced();
});
$('#vectors_togetherai_model').val(settings.togetherai_model).on('change', () => {
!scopesEnabled && $('#vectors_modelWarning').show();
settings.togetherai_model = String($('#vectors_togetherai_model').val());
Object.assign(extension_settings.vectors, settings);
saveSettingsDebounced();
});
$('#vectors_openai_model').val(settings.openai_model).on('change', () => {
!scopesEnabled && $('#vectors_modelWarning').show();
settings.openai_model = String($('#vectors_openai_model').val());
Object.assign(extension_settings.vectors, settings);
saveSettingsDebounced();
});
$('#vectors_cohere_model').val(settings.cohere_model).on('change', () => {
!scopesEnabled && $('#vectors_modelWarning').show();
settings.cohere_model = String($('#vectors_cohere_model').val());
Object.assign(extension_settings.vectors, settings);
saveSettingsDebounced();
});
$('#vectors_ollama_model').val(settings.ollama_model).on('input', () => {
!scopesEnabled && $('#vectors_modelWarning').show();
settings.ollama_model = String($('#vectors_ollama_model').val());
Object.assign(extension_settings.vectors, settings);
saveSettingsDebounced();
});
$('#vectors_vllm_model').val(settings.vllm_model).on('input', () => {
!scopesEnabled && $('#vectors_modelWarning').show();
settings.vllm_model = String($('#vectors_vllm_model').val());
Object.assign(extension_settings.vectors, settings);
saveSettingsDebounced();

View File

@@ -96,14 +96,6 @@
</i>
</div>
<small id="vectors_modelWarning">
<i class="fa-solid fa-exclamation-triangle"></i>
<span>
Set <code>vectors.enableModelScopes</code> to true in config.yaml to switch between vectorization models without needing to purge existing vectors.
This option will soon be enabled by default.
</span>
</small>
<div class="flex-container alignItemsCenter" id="nomicai_apiKey">
<label for="api_key_nomicai" class="flex1">
<span data-i18n="NomicAI API Key">NomicAI API Key</span>

View File

@@ -23,7 +23,6 @@ export const names_behavior_types = {
const controls = [
{ id: 'instruct_enabled', property: 'enabled', isCheckbox: true },
{ id: 'instruct_wrap', property: 'wrap', isCheckbox: true },
{ id: 'instruct_system_prompt', property: 'system_prompt', isCheckbox: false },
{ id: 'instruct_system_sequence_prefix', property: 'system_sequence_prefix', isCheckbox: false },
{ id: 'instruct_system_sequence_suffix', property: 'system_sequence_suffix', isCheckbox: false },
{ id: 'instruct_input_sequence', property: 'input_sequence', isCheckbox: false },
@@ -35,7 +34,6 @@ const controls = [
{ id: 'instruct_last_system_sequence', property: 'last_system_sequence', isCheckbox: false },
{ id: 'instruct_user_alignment_message', property: 'user_alignment_message', isCheckbox: false },
{ id: 'instruct_stop_sequence', property: 'stop_sequence', isCheckbox: false },
{ id: 'instruct_names', property: 'names', isCheckbox: true },
{ id: 'instruct_first_output_sequence', property: 'first_output_sequence', isCheckbox: false },
{ id: 'instruct_last_output_sequence', property: 'last_output_sequence', isCheckbox: false },
{ id: 'instruct_first_input_sequence', property: 'first_input_sequence', isCheckbox: false },
@@ -43,7 +41,7 @@ const controls = [
{ id: 'instruct_activation_regex', property: 'activation_regex', isCheckbox: false },
{ id: 'instruct_bind_to_context', property: 'bind_to_context', isCheckbox: true },
{ id: 'instruct_skip_examples', property: 'skip_examples', isCheckbox: true },
{ id: 'instruct_names_behavior input[name="names_behavior"]', property: 'names_behavior', isCheckbox: false },
{ id: 'instruct_names_behavior', property: 'names_behavior', isCheckbox: false },
{ id: 'instruct_system_same_as_user', property: 'system_same_as_user', isCheckbox: true, trigger: true },
];
@@ -109,9 +107,10 @@ export async function loadInstructMode(data) {
if (control.isCheckbox) {
$element.prop('checked', power_user.instruct[control.property]);
} else if (control.property === 'names_behavior') {
const behavior = power_user.instruct[control.property];
$element.filter(`[value="${behavior}"]`).prop('checked', true);
} else if ($element.is('select')) {
const value = power_user.instruct[control.property];
$element.val(value);
$element.filter(`[value="${value}"]`).prop('checked', true);
} else {
$element.val(power_user.instruct[control.property]);
}
@@ -137,12 +136,6 @@ export async function loadInstructMode(data) {
option.selected = name === power_user.instruct.preset;
$('#instruct_presets').append(option);
});
highlightDefaultPreset();
}
function highlightDefaultPreset() {
$('#instruct_set_default').toggleClass('default', power_user.default_instruct === power_user.instruct.preset);
}
/**
@@ -159,13 +152,6 @@ export function selectContextPreset(preset, { quiet = false, isAuto = false } =
!quiet && toastr.info(`Context Template: "${preset}" ${isAuto ? 'auto-' : ''}selected`);
}
// If instruct mode is disabled, enable it, except for default context template
if (!power_user.instruct.enabled && preset !== power_user.default_context) {
power_user.instruct.enabled = true;
$('#instruct_enabled').prop('checked', true).trigger('change');
!quiet && toastr.info('Instruct Mode enabled');
}
saveSettingsDebounced();
}
@@ -235,13 +221,6 @@ export function autoSelectInstructPreset(modelId) {
}
}
}
if (power_user.instruct.bind_to_context && power_user.default_instruct && power_user.instruct.preset !== power_user.default_instruct) {
if (instruct_presets.some(p => p.name === power_user.default_instruct)) {
console.log(`Instruct mode: default preset "${power_user.default_instruct}" selected`);
$('#instruct_presets').val(power_user.default_instruct).trigger('change');
}
}
}
return false;
@@ -580,10 +559,6 @@ function selectMatchingContextTemplate(name) {
break;
}
}
if (!foundMatch) {
// If no match was found, select default context preset
selectContextPreset(power_user.default_context, { isAuto: true });
}
}
/**
@@ -597,9 +572,13 @@ export function replaceInstructMacros(input, env) {
if (!input) {
return '';
}
const syspromptMacros = {
'systemPrompt': (power_user.prefer_character_prompt && env.charPrompt ? env.charPrompt : power_user.sysprompt.content),
'defaultSystemPrompt|instructSystem|instructSystemPrompt': power_user.sysprompt.content,
};
const instructMacros = {
'systemPrompt': (power_user.prefer_character_prompt && env.charPrompt ? env.charPrompt : power_user.instruct.system_prompt),
'instructSystem|instructSystemPrompt': power_user.instruct.system_prompt,
'instructSystemPromptPrefix': power_user.instruct.system_sequence_prefix,
'instructSystemPromptSuffix': power_user.instruct.system_sequence_suffix,
'instructInput|instructUserPrefix': power_user.instruct.input_sequence,
@@ -622,6 +601,11 @@ export function replaceInstructMacros(input, env) {
input = input.replace(regex, power_user.instruct.enabled ? value : '');
}
for (const [placeholder, value] of Object.entries(syspromptMacros)) {
const regex = new RegExp(`{{(${placeholder})}}`, 'gi');
input = input.replace(regex, power_user.sysprompt.enabled ? value : '');
}
input = input.replace(/{{exampleSeparator}}/gi, power_user.context.example_separator);
input = input.replace(/{{chatStart}}/gi, power_user.context.chat_start);
@@ -629,20 +613,6 @@ export function replaceInstructMacros(input, env) {
}
jQuery(() => {
$('#instruct_set_default').on('click', function () {
if (power_user.instruct.preset === power_user.default_instruct) {
power_user.default_instruct = null;
$(this).removeClass('default');
toastr.info('Default instruct template cleared');
} else {
power_user.default_instruct = power_user.instruct.preset;
$(this).addClass('default');
toastr.info(`Default instruct template set to ${power_user.default_instruct}`);
}
saveSettingsDebounced();
});
$('#instruct_system_same_as_user').on('input', function () {
const state = !!$(this).prop('checked');
if (state) {
@@ -671,9 +641,6 @@ jQuery(() => {
// When instruct mode gets enabled, select context template matching selected instruct preset
if (power_user.instruct.enabled) {
selectMatchingContextTemplate(power_user.instruct.preset);
} else {
// When instruct mode gets disabled, select default context preset
selectContextPreset(power_user.default_context);
}
});
@@ -699,9 +666,10 @@ jQuery(() => {
if (control.isCheckbox) {
$element.prop('checked', power_user.instruct[control.property]).trigger('input');
} else if (control.property === 'names_behavior') {
const behavior = power_user.instruct[control.property];
$element.filter(`[value="${behavior}"]`).prop('checked', true).trigger('input');
} else if ($element.is('select')) {
const value = power_user.instruct[control.property];
$element.val(value);
$element.filter(`[value="${value}"]`).prop('checked', true).trigger('input');
} else {
$element.val(power_user.instruct[control.property]);
$element.trigger('input');
@@ -713,7 +681,13 @@ jQuery(() => {
// Select matching context template
selectMatchingContextTemplate(name);
}
highlightDefaultPreset();
});
if (!CSS.supports('field-sizing', 'content')) {
$('#InstructSequencesColumn details').on('toggle', function () {
if ($(this).prop('open')) {
resetScrollHeight($(this).find('textarea'));
}
});
}
});

View File

@@ -5,9 +5,7 @@ import {
novelai_setting_names,
saveSettingsDebounced,
setGenerationParamsFromPreset,
substituteParams,
} from '../script.js';
import { getCfgPrompt } from './cfg-scale.js';
import { MAX_CONTEXT_DEFAULT, MAX_RESPONSE_DEFAULT, power_user } from './power-user.js';
import { getTextTokens, tokenizers } from './tokenizers.js';
import { getEventSourceStream } from './sse-stream.js';
@@ -24,6 +22,7 @@ const maximum_output_length = 150;
const default_presets = {
'clio-v1': 'Talker-Chat-Clio',
'kayra-v1': 'Carefree-Kayra',
'llama-3-erato-v1': 'Erato-Dragonfruit',
};
export const nai_settings = {
@@ -48,7 +47,6 @@ export const nai_settings = {
streaming_novel: false,
preamble: default_preamble,
prefix: '',
cfg_uc: '',
banned_tokens: '',
order: default_order,
logit_bias: [],
@@ -61,9 +59,22 @@ const nai_tiers = {
3: 'Opus',
};
const samplers = {
temperature: 0,
top_k: 1,
top_p: 2,
tfs: 3,
top_a: 4,
typical_p: 5,
// removed samplers were here
mirostat: 8,
math1: 9,
min_p: 10,
};
let novel_data = null;
let badWordsCache = {};
const BIAS_KEY = '#novel_api-settings';
const BIAS_KEY = '#range_block_novel';
export function setNovelData(data) {
novel_data = data;
@@ -95,6 +106,37 @@ export function getKayraMaxResponseTokens() {
return maximum_output_length;
}
export function convertNovelPreset(data) {
if (!data || typeof data !== 'object' || data.presetVersion !== 3 || !data.parameters || typeof data.parameters !== 'object') {
return data;
}
return {
max_context: 8000,
temperature: data.parameters.temperature,
max_length: data.parameters.max_length,
min_length: data.parameters.min_length,
top_k: data.parameters.top_k,
top_p: data.parameters.top_p,
top_a: data.parameters.top_a,
typical_p: data.parameters.typical_p,
tail_free_sampling: data.parameters.tail_free_sampling,
repetition_penalty: data.parameters.repetition_penalty,
repetition_penalty_range: data.parameters.repetition_penalty_range,
repetition_penalty_slope: data.parameters.repetition_penalty_slope,
repetition_penalty_frequency: data.parameters.repetition_penalty_frequency,
repetition_penalty_presence: data.parameters.repetition_penalty_presence,
phrase_rep_pen: data.parameters.phrase_rep_pen,
mirostat_lr: data.parameters.mirostat_lr,
mirostat_tau: data.parameters.mirostat_tau,
math1_temp: data.parameters.math1_temp,
math1_quad: data.parameters.math1_quad,
math1_quad_entropy_scale: data.parameters.math1_quad_entropy_scale,
min_p: data.parameters.min_p,
order: Array.isArray(data.parameters.order) ? data.parameters.order.filter(s => s.enabled && Object.keys(samplers).includes(s.id)).map(s => samplers[s.id]) : default_order,
};
}
export function getNovelTier() {
return nai_tiers[novel_data?.tier] ?? 'no_connection';
}
@@ -145,12 +187,10 @@ export function loadNovelPreset(preset) {
nai_settings.top_a = preset.top_a;
nai_settings.typical_p = preset.typical_p;
nai_settings.min_length = preset.min_length;
nai_settings.cfg_scale = preset.cfg_scale;
nai_settings.phrase_rep_pen = preset.phrase_rep_pen;
nai_settings.mirostat_lr = preset.mirostat_lr;
nai_settings.mirostat_tau = preset.mirostat_tau;
nai_settings.prefix = preset.prefix;
nai_settings.cfg_uc = preset.cfg_uc || '';
nai_settings.banned_tokens = preset.banned_tokens || '';
nai_settings.order = preset.order || default_order;
nai_settings.logit_bias = preset.logit_bias || [];
@@ -186,13 +226,11 @@ export function loadNovelSettings(settings) {
nai_settings.typical_p = settings.typical_p;
nai_settings.min_length = settings.min_length;
nai_settings.phrase_rep_pen = settings.phrase_rep_pen;
nai_settings.cfg_scale = settings.cfg_scale;
nai_settings.mirostat_lr = settings.mirostat_lr;
nai_settings.mirostat_tau = settings.mirostat_tau;
nai_settings.streaming_novel = !!settings.streaming_novel;
nai_settings.preamble = settings.preamble || default_preamble;
nai_settings.prefix = settings.prefix;
nai_settings.cfg_uc = settings.cfg_uc || '';
nai_settings.banned_tokens = settings.banned_tokens || '';
nai_settings.order = settings.order || default_order;
nai_settings.logit_bias = settings.logit_bias || [];
@@ -226,8 +264,6 @@ function loadNovelSettingsUi(ui_settings) {
$('#top_a_counter_novel').val(Number(ui_settings.top_a).toFixed(3));
$('#typical_p_novel').val(ui_settings.typical_p);
$('#typical_p_counter_novel').val(Number(ui_settings.typical_p).toFixed(3));
$('#cfg_scale_novel').val(ui_settings.cfg_scale);
$('#cfg_scale_counter_novel').val(Number(ui_settings.cfg_scale).toFixed(2));
$('#phrase_rep_pen_novel').val(ui_settings.phrase_rep_pen || 'off');
$('#mirostat_lr_novel').val(ui_settings.mirostat_lr);
$('#mirostat_lr_counter_novel').val(Number(ui_settings.mirostat_lr).toFixed(2));
@@ -237,7 +273,6 @@ function loadNovelSettingsUi(ui_settings) {
$('#min_length_counter_novel').val(Number(ui_settings.min_length).toFixed(0));
$('#nai_preamble_textarea').val(ui_settings.preamble);
$('#nai_prefix').val(ui_settings.prefix || 'vanilla');
$('#nai_cfg_uc').val(ui_settings.cfg_uc || '');
$('#nai_banned_tokens').val(ui_settings.banned_tokens || '');
$('#min_p_novel').val(ui_settings.min_p);
$('#min_p_counter_novel').val(Number(ui_settings.min_p).toFixed(3));
@@ -332,24 +367,12 @@ const sliders = [
format: (val) => Number(val).toFixed(2),
setValue: (val) => { nai_settings.mirostat_lr = Number(val).toFixed(2); },
},
{
sliderId: '#cfg_scale_novel',
counterId: '#cfg_scale_counter_novel',
format: (val) => `${val}`,
setValue: (val) => { nai_settings.cfg_scale = Number(val).toFixed(2); },
},
{
sliderId: '#min_length_novel',
counterId: '#min_length_counter_novel',
format: (val) => `${val}`,
setValue: (val) => { nai_settings.min_length = Number(val).toFixed(0); },
},
{
sliderId: '#nai_cfg_uc',
counterId: '#nai_cfg_uc_counter',
format: (val) => val,
setValue: (val) => { nai_settings.cfg_uc = val; },
},
{
sliderId: '#nai_banned_tokens',
counterId: '#nai_banned_tokens_counter',
@@ -467,11 +490,8 @@ function getBadWordPermutations(text) {
return result.filter(onlyUnique);
}
export function getNovelGenerationData(finalPrompt, settings, maxLength, isImpersonate, isContinue, cfgValues, type) {
export function getNovelGenerationData(finalPrompt, settings, maxLength, isImpersonate, isContinue, _cfgValues, type) {
console.debug('NovelAI generation data for', type);
if (cfgValues && cfgValues.guidanceScale && cfgValues.guidanceScale?.value !== 1) {
cfgValues.negativePrompt = (getCfgPrompt(cfgValues.guidanceScale, true))?.value;
}
const tokenizerType = getTokenizerTypeForModel(nai_settings.model_novel);
const stopSequences = (tokenizerType !== tokenizers.NONE)
@@ -495,7 +515,14 @@ export function getNovelGenerationData(finalPrompt, settings, maxLength, isImper
console.log(finalPrompt);
}
const adjustedMaxLength = nai_settings.model_novel.includes('kayra') ? getKayraMaxResponseTokens() : maximum_output_length;
const isKayra = nai_settings.model_novel.includes('kayra');
const isErato = nai_settings.model_novel.includes('erato');
if (isErato) {
finalPrompt = '<|startoftext|>' + finalPrompt;
}
const adjustedMaxLength = (isKayra || isErato) ? getKayraMaxResponseTokens() : maximum_output_length;
return {
'input': finalPrompt,
@@ -520,8 +547,6 @@ export function getNovelGenerationData(finalPrompt, settings, maxLength, isImper
'typical_p': Number(nai_settings.typical_p),
'mirostat_lr': Number(nai_settings.mirostat_lr),
'mirostat_tau': Number(nai_settings.mirostat_tau),
'cfg_scale': cfgValues?.guidanceScale?.value ?? Number(nai_settings.cfg_scale),
'cfg_uc': cfgValues?.negativePrompt ?? substituteParams(nai_settings.cfg_uc) ?? '',
'phrase_rep_pen': nai_settings.phrase_rep_pen,
'stop_sequences': stopSequences,
'bad_words_ids': badWordIds,
@@ -540,7 +565,8 @@ function selectPrefix(selected_prefix, finalPrompt) {
let useInstruct = false;
const clio = nai_settings.model_novel.includes('clio');
const kayra = nai_settings.model_novel.includes('kayra');
const isNewModel = clio || kayra;
const erato = nai_settings.model_novel.includes('erato');
const isNewModel = clio || kayra || erato;
if (isNewModel) {
// NovelAI claims they scan backwards 1000 characters (not tokens!) to look for instruct brackets. That's really short.
@@ -559,6 +585,9 @@ function getTokenizerTypeForModel(model) {
if (model.includes('kayra')) {
return tokenizers.NERD2;
}
if (model.includes('erato')) {
return tokenizers.LLAMA3;
}
return tokenizers.NONE;
}

View File

@@ -61,12 +61,6 @@ import {
stringFormat,
} from './utils.js';
import { countTokensOpenAI, getTokenizerModel } from './tokenizers.js';
import {
formatInstructModeChat,
formatInstructModeExamples,
formatInstructModePrompt,
formatInstructModeSystemPrompt,
} from './instruct-mode.js';
import { isMobile } from './RossAscends-mods.js';
import { saveLogprobsForActiveMessage } from './logprobs.js';
import { SlashCommandParser } from './slash-commands/SlashCommandParser.js';
@@ -263,7 +257,6 @@ const default_settings = {
windowai_model: '',
openrouter_model: openrouter_website_model,
openrouter_use_fallback: false,
openrouter_force_instruct: false,
openrouter_group_models: false,
openrouter_sort_models: 'alphabetically',
openrouter_providers: [],
@@ -341,7 +334,6 @@ const oai_settings = {
windowai_model: '',
openrouter_model: openrouter_website_model,
openrouter_use_fallback: false,
openrouter_force_instruct: false,
openrouter_group_models: false,
openrouter_sort_models: 'alphabetically',
openrouter_providers: [],
@@ -416,108 +408,6 @@ async function validateReverseProxy() {
localStorage.setItem(rememberKey, String(true));
}
/**
* Converts the Chat Completion object to an Instruct Mode prompt string.
* @param {object[]} messages Array of messages
* @param {string} type Generation type
* @returns {string} Text completion prompt
*/
function convertChatCompletionToInstruct(messages, type) {
const newChatPrompts = [
substituteParams(oai_settings.new_chat_prompt),
substituteParams(oai_settings.new_example_chat_prompt),
substituteParams(oai_settings.new_group_chat_prompt),
];
messages = messages.filter(x => !newChatPrompts.includes(x.content));
let chatMessagesText = '';
let systemPromptText = '';
let examplesText = '';
function getPrefix(message) {
let prefix;
if (message.role === 'user' || message.name === 'example_user') {
if (selected_group) {
prefix = '';
} else if (message.name === 'example_user') {
prefix = name1;
} else {
prefix = message.name ?? name1;
}
}
if (message.role === 'assistant' || message.name === 'example_assistant') {
if (selected_group) {
prefix = '';
}
else if (message.name === 'example_assistant') {
prefix = name2;
} else {
prefix = message.name ?? name2;
}
}
return prefix;
}
function toString(message) {
if (message.role === 'system' && !message.name) {
return message.content;
}
const prefix = getPrefix(message);
return prefix ? `${prefix}: ${message.content}` : message.content;
}
const firstChatMessage = messages.findIndex(message => message.role === 'assistant' || message.role === 'user');
const systemPromptMessages = messages.slice(0, firstChatMessage).filter(message => message.role === 'system' && !message.name);
if (systemPromptMessages.length) {
systemPromptText = systemPromptMessages.map(message => message.content).join('\n');
systemPromptText = formatInstructModeSystemPrompt(systemPromptText);
}
const exampleMessages = messages.filter(x => x.role === 'system' && (x.name === 'example_user' || x.name === 'example_assistant'));
if (exampleMessages.length) {
const blockHeading = power_user.context.example_separator ? (substituteParams(power_user.context.example_separator) + '\n') : '';
const examplesArray = exampleMessages.map(m => '<START>\n' + toString(m));
examplesText = blockHeading + formatInstructModeExamples(examplesArray, name1, name2).join('');
}
const chatMessages = messages.slice(firstChatMessage);
if (chatMessages.length) {
chatMessagesText = substituteParams(power_user.context.chat_start) + '\n';
for (const message of chatMessages) {
const name = getPrefix(message);
const isUser = message.role === 'user';
const isNarrator = message.role === 'system';
chatMessagesText += formatInstructModeChat(name, message.content, isUser, isNarrator, '', name1, name2, false);
}
}
const isImpersonate = type === 'impersonate';
const isContinue = type === 'continue';
const isQuiet = type === 'quiet';
const isQuietToLoud = false; // Quiet to loud not implemented for Chat Completion
const promptName = isImpersonate ? name1 : name2;
const promptLine = isContinue ? '' : formatInstructModePrompt(promptName, isImpersonate, '', name1, name2, isQuiet, isQuietToLoud).trimStart();
let prompt = [systemPromptText, examplesText, chatMessagesText, promptLine]
.filter(x => x)
.map(x => x.endsWith('\n') ? x : `${x}\n`)
.join('');
if (isContinue) {
prompt = prompt.replace(/\n$/, '');
}
return prompt;
}
/**
* Formats chat messages into chat completion messages.
* @param {object[]} chat - Array containing all messages.
@@ -761,10 +651,6 @@ function populationInjectionPrompts(prompts, messages) {
return messages;
}
export function isOpenRouterWithInstruct() {
return oai_settings.chat_completion_source === chat_completion_sources.OPENROUTER && oai_settings.openrouter_force_instruct && power_user.instruct.enabled;
}
/**
* Populates the chat history of the conversation.
* @param {object[]} messages - Array containing all messages.
@@ -795,8 +681,7 @@ async function populateChatHistory(messages, prompts, chatCompletion, type = nul
// Reserve budget for continue nudge
let continueMessage = null;
const instruct = isOpenRouterWithInstruct();
if (type === 'continue' && cyclePrompt && !instruct && !oai_settings.continue_prefill) {
if (type === 'continue' && cyclePrompt && !oai_settings.continue_prefill) {
const promptObject = {
identifier: 'continueNudge',
role: 'system',
@@ -1811,7 +1696,7 @@ async function sendOpenAIRequest(type, messages, signal) {
const isPerplexity = oai_settings.chat_completion_source == chat_completion_sources.PERPLEXITY;
const isGroq = oai_settings.chat_completion_source == chat_completion_sources.GROQ;
const is01AI = oai_settings.chat_completion_source == chat_completion_sources.ZEROONEAI;
const isTextCompletion = (isOAI && textCompletionModels.includes(oai_settings.openai_model)) || (isOpenRouter && oai_settings.openrouter_force_instruct && power_user.instruct.enabled);
const isTextCompletion = isOAI && textCompletionModels.includes(oai_settings.openai_model);
const isQuiet = type === 'quiet';
const isImpersonate = type === 'impersonate';
const isContinue = type === 'continue';
@@ -1819,11 +1704,6 @@ async function sendOpenAIRequest(type, messages, signal) {
const useLogprobs = !!power_user.request_token_probabilities;
const canMultiSwipe = oai_settings.n > 1 && !isContinue && !isImpersonate && !isQuiet && (isOAI || isCustom);
if (isTextCompletion && isOpenRouter) {
messages = convertChatCompletionToInstruct(messages, type);
replaceItemizedPromptText(messageId, messages);
}
// If we're using the window.ai extension, use that instead
// Doesn't support logit bias yet
if (oai_settings.chat_completion_source == chat_completion_sources.WINDOWAI) {
@@ -1915,10 +1795,9 @@ async function sendOpenAIRequest(type, messages, signal) {
}
if (isGoogle) {
const nameStopString = isImpersonate ? `\n${name2}:` : `\n${name1}:`;
const stopStringsLimit = 3; // 5 - 2 (nameStopString and new_chat_prompt)
const stopStringsLimit = 5;
generate_data['top_k'] = Number(oai_settings.top_k_openai);
generate_data['stop'] = [nameStopString, substituteParams(oai_settings.new_chat_prompt), ...getCustomStoppingStrings(stopStringsLimit)];
generate_data['stop'] = getCustomStoppingStrings(stopStringsLimit).slice(0, stopStringsLimit).filter(x => x.length >= 1 && x.length <= 16);
generate_data['use_makersuite_sysprompt'] = oai_settings.use_makersuite_sysprompt;
}
@@ -3063,7 +2942,6 @@ function loadOpenAISettings(data, settings) {
oai_settings.openrouter_group_models = settings.openrouter_group_models ?? default_settings.openrouter_group_models;
oai_settings.openrouter_sort_models = settings.openrouter_sort_models ?? default_settings.openrouter_sort_models;
oai_settings.openrouter_use_fallback = settings.openrouter_use_fallback ?? default_settings.openrouter_use_fallback;
oai_settings.openrouter_force_instruct = settings.openrouter_force_instruct ?? default_settings.openrouter_force_instruct;
oai_settings.openrouter_allow_fallbacks = settings.openrouter_allow_fallbacks ?? default_settings.openrouter_allow_fallbacks;
oai_settings.ai21_model = settings.ai21_model ?? default_settings.ai21_model;
oai_settings.mistralai_model = settings.mistralai_model ?? default_settings.mistralai_model;
@@ -3170,7 +3048,6 @@ function loadOpenAISettings(data, settings) {
$('#use_makersuite_sysprompt').prop('checked', oai_settings.use_makersuite_sysprompt);
$('#scale-alt').prop('checked', oai_settings.use_alt_scale);
$('#openrouter_use_fallback').prop('checked', oai_settings.openrouter_use_fallback);
$('#openrouter_force_instruct').prop('checked', oai_settings.openrouter_force_instruct);
$('#openrouter_group_models').prop('checked', oai_settings.openrouter_group_models);
$('#openrouter_allow_fallbacks').prop('checked', oai_settings.openrouter_allow_fallbacks);
$('#openrouter_providers_chat').val(oai_settings.openrouter_providers).trigger('change');
@@ -3398,7 +3275,6 @@ async function saveOpenAIPreset(name, settings, triggerUi = true) {
windowai_model: settings.windowai_model,
openrouter_model: settings.openrouter_model,
openrouter_use_fallback: settings.openrouter_use_fallback,
openrouter_force_instruct: settings.openrouter_force_instruct,
openrouter_group_models: settings.openrouter_group_models,
openrouter_sort_models: settings.openrouter_sort_models,
openrouter_providers: settings.openrouter_providers,
@@ -3835,7 +3711,6 @@ function onSettingsPresetChange() {
windowai_model: ['#model_windowai_select', 'windowai_model', false],
openrouter_model: ['#model_openrouter_select', 'openrouter_model', false],
openrouter_use_fallback: ['#openrouter_use_fallback', 'openrouter_use_fallback', true],
openrouter_force_instruct: ['#openrouter_force_instruct', 'openrouter_force_instruct', true],
openrouter_group_models: ['#openrouter_group_models', 'openrouter_group_models', false],
openrouter_sort_models: ['#openrouter_sort_models', 'openrouter_sort_models', false],
openrouter_providers: ['#openrouter_providers_chat', 'openrouter_providers', false],
@@ -4128,6 +4003,8 @@ async function onModelChange() {
$('#openai_max_context').attr('max', max_2mil);
} else if (value.includes('gemini-1.5-pro')) {
$('#openai_max_context').attr('max', max_2mil);
} else if (value.match('gemini-1.5-flash-002')) {
$('#openai_max_context').attr('max', max_2mil);
} else if (value.includes('gemini-1.5-flash')) {
$('#openai_max_context').attr('max', max_1mil);
} else if (value.includes('gemini-1.0-pro-vision') || value === 'gemini-pro-vision') {
@@ -4774,12 +4651,15 @@ export function isImageInliningSupported() {
'gemini-1.5-flash',
'gemini-1.5-flash-latest',
'gemini-1.5-flash-001',
'gemini-1.5-flash-002',
'gemini-1.5-flash-exp-0827',
'gemini-1.5-flash-8b-exp-0827',
'gemini-1.5-flash-8b-exp-0924',
'gemini-1.0-pro-vision-latest',
'gemini-1.5-pro',
'gemini-1.5-pro-latest',
'gemini-1.5-pro-001',
'gemini-1.5-pro-002',
'gemini-1.5-pro-exp-0801',
'gemini-1.5-pro-exp-0827',
'gemini-pro-vision',
@@ -4802,7 +4682,7 @@ export function isImageInliningSupported() {
case chat_completion_sources.CLAUDE:
return visionSupportedModels.some(model => oai_settings.claude_model.includes(model));
case chat_completion_sources.OPENROUTER:
return !oai_settings.openrouter_force_instruct;
return true;
case chat_completion_sources.CUSTOM:
return true;
case chat_completion_sources.ZEROONEAI:
@@ -4937,7 +4817,7 @@ function runProxyCallback(_, value) {
return foundName;
}
export function initOpenai() {
export function initOpenAI() {
SlashCommandParser.addCommandObject(SlashCommand.fromProps({
name: 'proxy',
callback: runProxyCallback,
@@ -4953,9 +4833,7 @@ export function initOpenai() {
],
helpString: 'Sets a proxy preset by name.',
}));
}
$(document).ready(async function () {
$('#test_api_button').on('click', testApiConnection);
$('#scale-alt').on('change', function () {
@@ -5226,11 +5104,6 @@ $(document).ready(async function () {
saveSettingsDebounced();
});
$('#openrouter_force_instruct').on('input', function () {
oai_settings.openrouter_force_instruct = !!$(this).prop('checked');
saveSettingsDebounced();
});
$('#openrouter_group_models').on('input', function () {
oai_settings.openrouter_group_models = !!$(this).prop('checked');
saveSettingsDebounced();
@@ -5419,4 +5292,4 @@ $(document).ready(async function () {
$('#openai_proxy_password_show').on('click', onProxyPasswordShowClick);
$('#customize_additional_parameters').on('click', onCustomizeParametersClick);
$('#openai_proxy_preset').on('change', onProxyPresetChange);
});
}

View File

@@ -50,6 +50,7 @@ import { AUTOCOMPLETE_SELECT_KEY, AUTOCOMPLETE_WIDTH } from './autocomplete/Auto
import { SlashCommandEnumValue, enumTypes } from './slash-commands/SlashCommandEnumValue.js';
import { commonEnumProviders, enumIcons } from './slash-commands/SlashCommandCommonEnumsProvider.js';
import { POPUP_TYPE, callGenericPopup } from './popup.js';
import { loadSystemPrompts } from './sysprompt.js';
export {
loadPowerUserSettings,
@@ -114,7 +115,6 @@ let power_user = {
pin_examples: false,
strip_examples: false,
trim_sentences: false,
include_newline: false,
always_force_name2: false,
user_prompt_bias: '',
show_user_prompt_bias: true,
@@ -206,11 +206,9 @@ let power_user = {
disable_group_trimming: false,
single_line: false,
default_instruct: '',
instruct: {
enabled: false,
preset: 'Alpaca',
system_prompt: 'Below is an instruction that describes a task. Write a response that appropriately completes the request.\n\nWrite {{char}}\'s next reply in a fictional roleplay chat between {{user}} and {{char}}.\n',
input_sequence: '### Instruction:',
input_suffix: '',
output_sequence: '### Response:',
@@ -234,7 +232,6 @@ let power_user = {
separator_sequence: '',
},
default_context: 'Default',
context: {
preset: 'Default',
story_string: defaultStoryString,
@@ -245,6 +242,12 @@ let power_user = {
names_as_stop_strings: true,
},
sysprompt: {
enabled: true,
name: 'Neutral - Chat',
content: 'Write {{char}}\'s next reply in a fictional chat between {{char}} and {{user}}.',
},
personas: {},
default_persona: null,
persona_descriptions: {},
@@ -317,7 +320,6 @@ const contextControls = [
// Existing power user settings
{ id: 'always-force-name2-checkbox', property: 'always_force_name2', isCheckbox: true, isGlobalSetting: true, defaultValue: true },
{ id: 'trim_sentences_checkbox', property: 'trim_sentences', isCheckbox: true, isGlobalSetting: true, defaultValue: false },
{ id: 'include_newline_checkbox', property: 'include_newline', isCheckbox: true, isGlobalSetting: true, defaultValue: false },
{ id: 'single_line', property: 'single_line', isCheckbox: true, isGlobalSetting: true, defaultValue: false },
];
@@ -1487,7 +1489,6 @@ async function loadPowerUserSettings(settings, data) {
$('#collapse-newlines-checkbox').prop('checked', power_user.collapse_newlines);
$('#always-force-name2-checkbox').prop('checked', power_user.always_force_name2);
$('#trim_sentences_checkbox').prop('checked', power_user.trim_sentences);
$('#include_newline_checkbox').prop('checked', power_user.include_newline);
$('#render_formulas').prop('checked', power_user.render_formulas);
$('#disable_group_trimming').prop('checked', power_user.disable_group_trimming);
$('#markdown_escape_strings').val(power_user.markdown_escape_strings);
@@ -1598,6 +1599,7 @@ async function loadPowerUserSettings(settings, data) {
reloadMarkdownProcessor(power_user.render_formulas);
await loadInstructMode(data);
await loadContextSettings();
await loadSystemPrompts(data);
loadMaxContextUnlocked();
switchWaifuMode();
switchSpoilerMode();
@@ -1817,29 +1819,8 @@ async function loadContextSettings() {
}
}
highlightDefaultContext();
saveSettingsDebounced();
});
$('#context_set_default').on('click', function () {
if (power_user.context.preset !== power_user.default_context) {
power_user.default_context = power_user.context.preset;
$(this).addClass('default');
toastr.info(`Default context template set to ${power_user.default_context}`);
highlightDefaultContext();
saveSettingsDebounced();
}
});
highlightDefaultContext();
}
function highlightDefaultContext() {
$('#context_set_default').toggleClass('default', power_user.default_context === power_user.context.preset);
$('#context_set_default').toggleClass('disabled', power_user.default_context === power_user.context.preset);
$('#context_delete_preset').toggleClass('disabled', power_user.default_context === power_user.context.preset);
}
/**
@@ -3069,19 +3050,6 @@ $(document).ready(() => {
// if trim sentences is unchecked, include newline must be unchecked
$('#trim_sentences_checkbox').change(function () {
power_user.trim_sentences = !!$(this).prop('checked');
if (!$(this).prop('checked')) {
$('#include_newline_checkbox').prop('checked', false);
power_user.include_newline = false;
}
saveSettingsDebounced();
});
$('#include_newline_checkbox').change(function () {
power_user.include_newline = !!$(this).prop('checked');
if ($(this).prop('checked')) {
$('#trim_sentences_checkbox').prop('checked', true);
power_user.trim_sentences = true;
}
saveSettingsDebounced();
});

View File

@@ -18,13 +18,16 @@ import {
import { groups, selected_group } from './group-chats.js';
import { instruct_presets } from './instruct-mode.js';
import { kai_settings } from './kai-settings.js';
import { Popup } from './popup.js';
import { convertNovelPreset } from './nai-settings.js';
import { Popup, POPUP_RESULT, POPUP_TYPE } from './popup.js';
import { context_presets, getContextSettings, power_user } from './power-user.js';
import { SlashCommand } from './slash-commands/SlashCommand.js';
import { ARGUMENT_TYPE, SlashCommandArgument } from './slash-commands/SlashCommandArgument.js';
import { enumIcons } from './slash-commands/SlashCommandCommonEnumsProvider.js';
import { SlashCommandEnumValue, enumTypes } from './slash-commands/SlashCommandEnumValue.js';
import { SlashCommandParser } from './slash-commands/SlashCommandParser.js';
import { checkForSystemPromptInInstructTemplate, system_prompts } from './sysprompt.js';
import { renderTemplateAsync } from './templates.js';
import {
textgenerationwebui_preset_names,
textgenerationwebui_presets,
@@ -71,7 +74,7 @@ function autoSelectPreset() {
* @param {string} apiId API id
* @returns {PresetManager} Preset manager
*/
function getPresetManager(apiId = '') {
export function getPresetManager(apiId = '') {
if (!apiId) {
apiId = main_api == 'koboldhorde' ? 'kobold' : main_api;
}
@@ -102,6 +105,215 @@ class PresetManager {
this.apiId = apiId;
}
static masterSections = {
'instruct': {
name: 'Instruct Template',
getData: () => {
const manager = getPresetManager('instruct');
const name = manager.getSelectedPresetName();
return manager.getPresetSettings(name);
},
setData: (data) => {
const manager = getPresetManager('instruct');
const name = data.name;
return manager.savePreset(name, data);
},
isValid: (data) => PresetManager.isPossiblyInstructData(data),
},
'context': {
name: 'Context Template',
getData: () => {
const manager = getPresetManager('context');
const name = manager.getSelectedPresetName();
return manager.getPresetSettings(name);
},
setData: (data) => {
const manager = getPresetManager('context');
const name = data.name;
return manager.savePreset(name, data);
},
isValid: (data) => PresetManager.isPossiblyContextData(data),
},
'sysprompt': {
name: 'System Prompt',
getData: () => {
const manager = getPresetManager('sysprompt');
const name = manager.getSelectedPresetName();
return manager.getPresetSettings(name);
},
setData: (data) => {
const manager = getPresetManager('sysprompt');
const name = data.name;
return manager.savePreset(name, data);
},
isValid: (data) => PresetManager.isPossiblySystemPromptData(data),
},
'preset': {
name: 'Text Completion Preset',
getData: () => {
const manager = getPresetManager('textgenerationwebui');
const name = manager.getSelectedPresetName();
const data = manager.getPresetSettings(name);
data['name'] = name;
return data;
},
setData: (data) => {
const manager = getPresetManager('textgenerationwebui');
const name = data.name;
return manager.savePreset(name, data);
},
isValid: (data) => PresetManager.isPossiblyTextCompletionData(data),
},
};
static isPossiblyInstructData(data) {
const instructProps = ['name', 'input_sequence', 'output_sequence'];
return data && instructProps.every(prop => Object.keys(data).includes(prop));
}
static isPossiblyContextData(data) {
const contextProps = ['name', 'story_string'];
return data && contextProps.every(prop => Object.keys(data).includes(prop));
}
static isPossiblySystemPromptData(data) {
const sysPromptProps = ['name', 'content'];
return data && sysPromptProps.every(prop => Object.keys(data).includes(prop));
}
static isPossiblyTextCompletionData(data) {
const textCompletionProps = ['temp', 'top_k', 'top_p', 'rep_pen'];
return data && textCompletionProps.every(prop => Object.keys(data).includes(prop));
}
/**
* Imports master settings from JSON data.
* @param {object} data Data to import
* @param {string} fileName File name
* @returns {Promise<void>}
*/
static async performMasterImport(data, fileName) {
if (!data || typeof data !== 'object') {
toastr.error('Invalid data provided for master import');
return;
}
// Check for legacy file imports
// 1. Instruct Template
if (this.isPossiblyInstructData(data)) {
toastr.info('Importing instruct template...', 'Instruct template detected');
return await getPresetManager('instruct').savePreset(data.name, data);
}
// 2. Context Template
if (this.isPossiblyContextData(data)) {
toastr.info('Importing as context template...', 'Context template detected');
return await getPresetManager('context').savePreset(data.name, data);
}
// 3. System Prompt
if (this.isPossiblySystemPromptData(data)) {
toastr.info('Importing as system prompt...', 'System prompt detected');
return await getPresetManager('sysprompt').savePreset(data.name, data);
}
// 4. Text Completion settings
if (this.isPossiblyTextCompletionData(data)) {
toastr.info('Importing as settings preset...', 'Text Completion settings detected');
return await getPresetManager('textgenerationwebui').savePreset(fileName, data);
}
const validSections = [];
for (const [key, section] of Object.entries(this.masterSections)) {
if (key in data && section.isValid(data[key])) {
validSections.push(key);
}
}
if (validSections.length === 0) {
toastr.error('No valid sections found in imported data');
return;
}
const sectionNames = validSections.reduce((acc, key) => {
acc[key] = { key: key, name: this.masterSections[key].name, preset: data[key]?.name || '' };
return acc;
}, {});
const html = $(await renderTemplateAsync('masterImport', { sections: sectionNames }));
const popup = new Popup(html, POPUP_TYPE.CONFIRM, '', {
okButton: 'Import',
cancelButton: 'Cancel',
});
const result = await popup.show();
// Import cancelled
if (result !== POPUP_RESULT.AFFIRMATIVE) {
return;
}
const importedSections = [];
const confirmedSections = html.find('input:checked').map((_, el) => el instanceof HTMLInputElement && el.value).get();
if (confirmedSections.length === 0) {
toastr.info('No sections selected for import');
return;
}
for (const section of confirmedSections) {
const sectionData = data[section];
const masterSection = this.masterSections[section];
if (sectionData && masterSection) {
await masterSection.setData(sectionData);
importedSections.push(masterSection.name);
}
}
toastr.success(`Imported ${importedSections.length} settings: ${importedSections.join(', ')}`);
}
/**
* Exports master settings to JSON data.
* @returns {Promise<string>} JSON data
*/
static async performMasterExport() {
const sectionNames = Object.entries(this.masterSections).reduce((acc, [key, section]) => {
acc[key] = { key: key, name: section.name, checked: key !== 'preset' };
return acc;
}, {});
const html = $(await renderTemplateAsync('masterExport', { sections: sectionNames }));
const popup = new Popup(html, POPUP_TYPE.CONFIRM, '', {
okButton: 'Export',
cancelButton: 'Cancel',
});
const result = await popup.show();
// Export cancelled
if (result !== POPUP_RESULT.AFFIRMATIVE) {
return;
}
const confirmedSections = html.find('input:checked').map((_, el) => el instanceof HTMLInputElement && el.value).get();
const data = {};
if (confirmedSections.length === 0) {
toastr.info('No sections selected for export');
return;
}
for (const section of confirmedSections) {
const masterSection = this.masterSections[section];
if (masterSection) {
data[section] = masterSection.getData();
}
}
return JSON.stringify(data, null, 4);
}
/**
* Gets all preset names.
* @returns {string[]} List of preset names
@@ -182,6 +394,14 @@ class PresetManager {
}
async savePreset(name, settings) {
if (this.apiId === 'instruct' && settings) {
await checkForSystemPromptInInstructTemplate(name, settings);
}
if (this.apiId === 'novel' && settings) {
settings = convertNovelPreset(settings);
}
const preset = settings ?? this.getPresetSettings(name);
const response = await fetch('/api/presets/save', {
@@ -228,6 +448,10 @@ class PresetManager {
presets = instruct_presets;
preset_names = instruct_presets.map(x => x.name);
break;
case 'sysprompt':
presets = system_prompts;
preset_names = system_prompts.map(x => x.name);
break;
default:
console.warn(`Unknown API ID ${this.apiId}`);
}
@@ -236,11 +460,11 @@ class PresetManager {
}
isKeyedApi() {
return this.apiId == 'textgenerationwebui' || this.apiId == 'context' || this.apiId == 'instruct';
return this.apiId == 'textgenerationwebui' || this.isAdvancedFormatting();
}
isAdvancedFormatting() {
return this.apiId == 'context' || this.apiId == 'instruct';
return this.apiId == 'context' || this.apiId == 'instruct' || this.apiId == 'sysprompt';
}
updateList(name, preset) {
@@ -298,6 +522,11 @@ class PresetManager {
instruct_preset['name'] = name || power_user.instruct.preset;
return instruct_preset;
}
case 'sysprompt': {
const sysprompt_preset = structuredClone(power_user.sysprompt);
sysprompt_preset['name'] = name || power_user.sysprompt.preset;
return sysprompt_preset;
}
default:
console.warn(`Unknown API ID ${apiId}`);
return {};
@@ -601,11 +830,6 @@ export async function initPresetManager() {
return;
}
// default context preset cannot be deleted
if (apiId == 'context' && power_user.default_context === power_user.context.preset) {
return;
}
const headerText = !presetManager.isAdvancedFormatting() ? 'Delete this preset?' : 'Delete this template?';
const confirm = await Popup.show.confirm(headerText, 'This action is irreversible and your current settings will be overwritten.');
if (!confirm) {
@@ -682,4 +906,35 @@ export async function initPresetManager() {
toastr.success(successToast);
}
});
$('#af_master_import').on('click', () => {
$('#af_master_import_file').trigger('click');
});
$('#af_master_import_file').on('change', async function (e) {
if (!(e.target instanceof HTMLInputElement)) {
return;
}
const file = e.target.files[0];
if (!file) {
return;
}
const data = await parseJsonFile(file);
const fileName = file.name.replace('.json', '');
await PresetManager.performMasterImport(data, fileName);
e.target.value = null;
});
$('#af_master_export').on('click', async () => {
const data = await PresetManager.performMasterExport();
if (!data) {
return;
}
const shortDate = new Date().toISOString().split('T')[0];
download(data, `ST-formatting-${shortDate}.json`, 'application/json');
});
}

250
public/scripts/sysprompt.js Normal file
View File

@@ -0,0 +1,250 @@
import { saveSettingsDebounced } from '../script.js';
import { callGenericPopup, POPUP_TYPE } from './popup.js';
import { power_user } from './power-user.js';
import { getPresetManager } from './preset-manager.js';
import { SlashCommand } from './slash-commands/SlashCommand.js';
import { ARGUMENT_TYPE, SlashCommandArgument, SlashCommandNamedArgument } from './slash-commands/SlashCommandArgument.js';
import { commonEnumProviders, enumIcons } from './slash-commands/SlashCommandCommonEnumsProvider.js';
import { enumTypes, SlashCommandEnumValue } from './slash-commands/SlashCommandEnumValue.js';
import { SlashCommandParser } from './slash-commands/SlashCommandParser.js';
import { renderTemplateAsync } from './templates.js';
import { isTrueBoolean, resetScrollHeight } from './utils.js';
export let system_prompts = [];
const $enabled = $('#sysprompt_enabled');
const $select = $('#sysprompt_select');
const $content = $('#sysprompt_content');
const $contentBlock = $('#SystemPromptBlock');
async function migrateSystemPromptFromInstructMode() {
if ('system_prompt' in power_user.instruct) {
const prompt = String(power_user.instruct.system_prompt);
delete power_user.instruct.system_prompt;
power_user.sysprompt.enabled = power_user.instruct.enabled;
power_user.sysprompt.content = prompt;
const existingPromptName = system_prompts.find(x => x.content === prompt)?.name;
if (existingPromptName) {
power_user.sysprompt.name = existingPromptName;
} else {
const data = { name: `[Migrated] ${power_user.instruct.preset}`, content: prompt };
await getPresetManager('sysprompt')?.savePreset(data.name, data);
power_user.sysprompt.name = data.name;
}
saveSettingsDebounced();
toastr.info('System prompt settings have been moved from the Instruct Mode.', 'Migration notice', { timeOut: 5000 });
}
}
/**
* Loads sysprompt settings from the given data object.
* @param {object} data Settings data object.
*/
export async function loadSystemPrompts(data) {
if (data.sysprompt !== undefined) {
system_prompts = data.sysprompt;
}
await migrateSystemPromptFromInstructMode();
toggleSystemPromptDisabledControls();
for (const prompt of system_prompts) {
$('<option>').val(prompt.name).text(prompt.name).appendTo($select);
}
$enabled.prop('checked', power_user.sysprompt.enabled);
$select.val(power_user.sysprompt.name);
$content.val(power_user.sysprompt.content);
if (!CSS.supports('field-sizing', 'content')) {
await resetScrollHeight($content);
}
}
/**
* Checks if the instruct template has a system prompt and prompts the user to save it as a system prompt.
* @param {string} name Name of the instruct template
* @param {object} template Instruct template object
*/
export async function checkForSystemPromptInInstructTemplate(name, template) {
if (!template || !name || typeof name !== 'string' || typeof template !== 'object') {
return;
}
if ('system_prompt' in template && template.system_prompt) {
const existingName = system_prompts.find(x => x.content === template.system_prompt)?.name;
const html = await renderTemplateAsync('migrateInstructPrompt', { prompt: template.system_prompt, existing: existingName });
const confirm = await callGenericPopup(html, POPUP_TYPE.CONFIRM);
if (confirm) {
const migratedName = `[Migrated] ${name}`;
const prompt = { name: migratedName, content: template.system_prompt };
const presetManager = getPresetManager('sysprompt');
await presetManager.savePreset(migratedName, prompt);
toastr.success(`System prompt "${migratedName}" has been saved.`);
} else {
toastr.info('System prompt has been discarded.');
}
delete template.system_prompt;
}
}
function toggleSystemPromptDisabledControls() {
$enabled.parent().find('i').toggleClass('toggleEnabled', !!power_user.sysprompt.enabled);
$contentBlock.toggleClass('disabled', !power_user.sysprompt.enabled);
}
/**
* Sets the system prompt state.
* @param {boolean} state System prompt state
* @returns {string} Empty string
*/
function setSystemPromptStateCallback(state) {
power_user.sysprompt.enabled = state;
$enabled.prop('checked', state);
toggleSystemPromptDisabledControls();
saveSettingsDebounced();
return '';
}
function toggleSystemPromptCallback(_args, state) {
if (!state || typeof state !== 'string') {
return String(power_user.sysprompt.enabled);
}
const newState = isTrueBoolean(state);
setSystemPromptStateCallback(newState);
return String(power_user.sysprompt.enabled);
}
function selectSystemPromptCallback(args, name) {
if (!power_user.sysprompt.enabled && !isTrueBoolean(args.forceGet)) {
return '';
}
if (!name) {
return power_user.sysprompt.name ?? '';
}
const quiet = isTrueBoolean(args?.quiet);
const systemPromptNames = system_prompts.map(preset => preset.name);
let foundName = systemPromptNames.find(x => x.toLowerCase() === name.toLowerCase());
if (!foundName) {
const fuse = new Fuse(systemPromptNames);
const result = fuse.search(name);
if (result.length === 0) {
!quiet && toastr.warning(`System prompt "${name}" not found`);
return '';
}
foundName = result[0].item;
}
$select.val(foundName).trigger('change');
!quiet && toastr.success(`System prompt "${foundName}" selected`);
return foundName;
}
export function initSystemPrompts() {
$enabled.on('input', function () {
power_user.sysprompt.enabled = !!$(this).prop('checked');
toggleSystemPromptDisabledControls();
saveSettingsDebounced();
});
$select.on('change', async function () {
if (!power_user.sysprompt.enabled) {
$enabled.prop('checked', true).trigger('input');
}
const name = String($(this).val());
const prompt = system_prompts.find(p => p.name === name);
if (prompt) {
$content.val(prompt.content);
if (!CSS.supports('field-sizing', 'content')) {
await resetScrollHeight($content);
}
power_user.sysprompt.name = name;
power_user.sysprompt.content = prompt.content;
}
saveSettingsDebounced();
});
$content.on('input', function () {
power_user.sysprompt.content = String($(this).val());
saveSettingsDebounced();
});
SlashCommandParser.addCommandObject(SlashCommand.fromProps({
name: 'sysprompt',
aliases: ['system-prompt'],
callback: selectSystemPromptCallback,
returns: 'current prompt name',
namedArgumentList: [
SlashCommandNamedArgument.fromProps({
name: 'quiet',
description: 'Suppress the toast message on prompt change',
typeList: [ARGUMENT_TYPE.BOOLEAN],
defaultValue: 'false',
enumList: commonEnumProviders.boolean('trueFalse')(),
}),
SlashCommandNamedArgument.fromProps({
name: 'forceGet',
description: 'Force getting a name even if system prompt is disabled',
typeList: [ARGUMENT_TYPE.BOOLEAN],
defaultValue: 'false',
enumList: commonEnumProviders.boolean('trueFalse')(),
}),
],
unnamedArgumentList: [
SlashCommandArgument.fromProps({
description: 'system prompt name',
typeList: [ARGUMENT_TYPE.STRING],
enumProvider: () => system_prompts.map(x => new SlashCommandEnumValue(x.name, null, enumTypes.enum, enumIcons.preset)),
}),
],
helpString: `
<div>
Selects a system prompt by name, using fuzzy search to find the closest match.
Gets the current system prompt if no name is provided and sysprompt is enabled or <code>forceGet=true</code> is passed.
</div>
<div>
<strong>Example:</strong>
<ul>
<li>
<pre><code class="language-stscript">/sysprompt </code></pre>
</li>
</ul>
</div>
`,
}));
SlashCommandParser.addCommandObject(SlashCommand.fromProps({
name: 'sysprompt-on',
aliases: ['sysprompt-enable'],
callback: () => setSystemPromptStateCallback(true),
helpString: 'Enables system prompt.',
}));
SlashCommandParser.addCommandObject(SlashCommand.fromProps({
name: 'sysprompt-off',
aliases: ['sysprompt-disable'],
callback: () => setSystemPromptStateCallback(false),
helpString: 'Disables system prompt',
}));
SlashCommandParser.addCommandObject(SlashCommand.fromProps({
name: 'sysprompt-state',
aliases: ['sysprompt-toggle'],
helpString: 'Gets the current system prompt state. If an argument is provided, it will set the system prompt state.',
unnamedArgumentList: [
SlashCommandArgument.fromProps({
description: 'system prompt state',
typeList: [ARGUMENT_TYPE.BOOLEAN],
enumList: commonEnumProviders.boolean('trueFalse')(),
}),
],
callback: toggleSystemPromptCallback,
}));
}

View File

@@ -57,8 +57,8 @@
<li><tt>&lcub;&lcub;maxPrompt&rcub;&rcub;</tt> <span data-i18n="help_macros_41">max allowed prompt length in tokens = (context size - response length)</span></li>
<li><tt>&lcub;&lcub;exampleSeparator&rcub;&rcub;</tt> <span data-i18n="help_macros_42">context template example dialogues separator</span></li>
<li><tt>&lcub;&lcub;chatStart&rcub;&rcub;</tt> <span data-i18n="help_macros_43">context template chat start line</span></li>
<li><tt>&lcub;&lcub;systemPrompt&rcub;&rcub;</tt> <span data-i18n="help_macros_44">main system prompt (either character prompt override if chosen, or instructSystemPrompt)</span></li>
<li><tt>&lcub;&lcub;instructSystemPrompt&rcub;&rcub;</tt> <span data-i18n="help_macros_45">instruct system prompt</span></li>
<li><tt>&lcub;&lcub;systemPrompt&rcub;&rcub;</tt> <span data-i18n="help_macros_44">system prompt content if enabled (either character prompt override if allowed, or defaultSystemPrompt)</span></li>
<li><tt>&lcub;&lcub;defaultSystemPrompt&rcub;&rcub;</tt> <span data-i18n="help_macros_45">system prompt content</span></li>
<li><tt>&lcub;&lcub;instructSystemPromptPrefix&rcub;&rcub;</tt> <span data-i18n="help_macros_46">instruct system prompt prefix sequence</span></li>
<li><tt>&lcub;&lcub;instructSystemPromptSuffix&rcub;&rcub;</tt> <span data-i18n="help_macros_47">instruct system prompt suffix sequence</span></li>
<li><tt>&lcub;&lcub;instructUserPrefix&rcub;&rcub;</tt> <span data-i18n="help_macros_48">instruct user prefix sequence</span></li>

View File

@@ -0,0 +1,17 @@
<h3>
Choose what to export
</h3>
<div class="flex-container flexFlowColumn justifyLeft">
{{#each sections}}
{{#with this}}
<label class="checkbox_label">
{{#if checked}}
<input type="checkbox" value="{{key}}" checked />
{{else}}
<input type="checkbox" value="{{key}}" />
{{/if}}
<span data-i18n="{{name}}">{{name}}</span>
</label>
{{/with}}
{{/each}}
</div>

View File

@@ -0,0 +1,17 @@
<h3>
Choose what to import
</h3>
<div class="flex-container flexFlowColumn justifyLeft">
{{#each sections}}
{{#with this}}
<label class="checkbox_label">
<input type="checkbox" value="{{key}}" checked>
<span data-i18n="{{name}}">{{name}}</span>
{{#if preset}}
<span>&ndash;</span>
<small>{{preset}}</small>
{{/if}}
</label>
{{/with}}
{{/each}}
</div>

View File

@@ -0,0 +1,27 @@
<h3>
This instruct template also contains a system prompt.
</h3>
<div>
Would you like to migrate the system prompt from the template?
</div>
{{#if existing}}
<div class="marginTopBot5">
<b>Note:</b>
<span>you already have this prompt saved as:</span>
<span>{{existing}}</span>
</div>
{{/if}}
<div class="justifyLeft marginTop5">
<div>
<small>
<b>"Yes"</b> &ndash; The prompt will be imported and selected as a current system prompt.
</small>
</div>
<div>
<small>
<b>"No"</b> &ndash; The prompt will be ignored, no changes to the current system prompt.
</small>
</div>
</div>
<textarea class="wide100p textarea_compact" rows="10">{{prompt}}</textarea>

View File

@@ -10,7 +10,8 @@ import { getCurrentDreamGenModelTokenizer, getCurrentOpenRouterModelTokenizer, o
const { OOBA, TABBY, KOBOLDCPP, VLLM, APHRODITE, LLAMACPP, OPENROUTER, DREAMGEN } = textgen_types;
export const CHARACTERS_PER_TOKEN_RATIO = 3.35;
const TOKENIZER_WARNING_KEY = 'tokenizationWarningShown';
export const TOKENIZER_WARNING_KEY = 'tokenizationWarningShown';
export const TOKENIZER_SUPPORTED_KEY = 'tokenizationSupported';
export const tokenizers = {
NONE: 0,
@@ -273,6 +274,9 @@ export function getTokenizerBestMatch(forApi) {
if (nai_settings.model_novel.includes('kayra')) {
return tokenizers.NERD2;
}
if (nai_settings.model_novel.includes('erato')) {
return tokenizers.LLAMA3;
}
}
if (forApi === 'kobold' || forApi === 'textgenerationwebui' || forApi === 'koboldhorde') {
// Try to use the API tokenizer if possible:
@@ -280,8 +284,9 @@ export function getTokenizerBestMatch(forApi) {
// - Kobold must pass a version check
// - Tokenizer haven't reported an error previously
const hasTokenizerError = sessionStorage.getItem(TOKENIZER_WARNING_KEY);
const hasValidEndpoint = sessionStorage.getItem(TOKENIZER_SUPPORTED_KEY);
const isConnected = online_status !== 'no_connection';
const isTokenizerSupported = TEXTGEN_TOKENIZERS.includes(textgen_settings.type);
const isTokenizerSupported = TEXTGEN_TOKENIZERS.includes(textgen_settings.type) && (textgen_settings.type !== OOBA || hasValidEndpoint);
if (!hasTokenizerError && isConnected) {
if (forApi === 'kobold' && kai_flags.can_use_tokenization) {
@@ -923,15 +928,20 @@ function countTokensFromTextgenAPI(str, resolve) {
function apiFailureTokenCount(str) {
console.error('Error counting tokens');
let shouldTryAgain = false;
if (!sessionStorage.getItem(TOKENIZER_WARNING_KEY)) {
toastr.warning(
'Your selected API doesn\'t support the tokenization endpoint. Using estimated counts.',
'Error counting tokens',
{ timeOut: 10000, preventDuplicates: true },
);
const bestMatchBefore = getTokenizerBestMatch(main_api);
sessionStorage.setItem(TOKENIZER_WARNING_KEY, String(true));
const bestMatchAfter = getTokenizerBestMatch(main_api);
if ([tokenizers.API_TEXTGENERATIONWEBUI, tokenizers.API_KOBOLD].includes(bestMatchBefore) && bestMatchBefore !== bestMatchAfter) {
shouldTryAgain = true;
}
}
// Only try again if we guarantee not to be looped by the same error
if (shouldTryAgain && power_user.tokenizer === tokenizers.BEST_MATCH) {
return getTokenCount(str);
}
return guesstimate(str);

View File

@@ -607,12 +607,11 @@ export function sortByCssOrder(a, b) {
/**
* Trims a string to the end of a nearest sentence.
* @param {string} input The string to trim.
* @param {boolean} include_newline Whether to include a newline character in the trimmed string.
* @returns {string} The trimmed string.
* @example
* trimToEndSentence('Hello, world! I am from'); // 'Hello, world!'
*/
export function trimToEndSentence(input, include_newline = false) {
export function trimToEndSentence(input) {
if (!input) {
return '';
}
@@ -633,11 +632,6 @@ export function trimToEndSentence(input, include_newline = false) {
}
break;
}
if (include_newline && char === '\n') {
last = i;
break;
}
}
if (last === -1) {

View File

@@ -1,5 +1,6 @@
import { chat_metadata, getCurrentChatId, saveSettingsDebounced, sendSystemMessage, system_message_types } from '../script.js';
import { extension_settings, saveMetadataDebounced } from './extensions.js';
import { callGenericPopup, POPUP_TYPE } from './popup.js';
import { executeSlashCommandsWithOptions } from './slash-commands.js';
import { SlashCommand } from './slash-commands/SlashCommand.js';
import { SlashCommandAbortController } from './slash-commands/SlashCommandAbortController.js';
@@ -303,24 +304,48 @@ export function replaceVariableMacros(input) {
return lines.join('\n');
}
function listVariablesCallback() {
async function listVariablesCallback(args) {
const type = String(args?.format || '').toLowerCase().trim() || 'popup';
const scope = String(args?.scope || '').toLowerCase().trim() || 'all';
if (!chat_metadata.variables) {
chat_metadata.variables = {};
}
const localVariables = Object.entries(chat_metadata.variables).map(([name, value]) => `${name}: ${value}`);
const globalVariables = Object.entries(extension_settings.variables.global).map(([name, value]) => `${name}: ${value}`);
const includeLocalVariables = scope === 'all' || scope === 'local';
const includeGlobalVariables = scope === 'all' || scope === 'global';
const localVariables = includeLocalVariables ? Object.entries(chat_metadata.variables).map(([name, value]) => `${name}: ${value}`) : [];
const globalVariables = includeGlobalVariables ? Object.entries(extension_settings.variables.global).map(([name, value]) => `${name}: ${value}`) : [];
const jsonVariables = [
...Object.entries(chat_metadata.variables).map(x => ({ key: x[0], value: x[1], scope: 'local' })),
...Object.entries(extension_settings.variables.global).map(x => ({ key: x[0], value: x[1], scope: 'global' })),
];
const localVariablesString = localVariables.length > 0 ? localVariables.join('\n\n') : 'No local variables';
const globalVariablesString = globalVariables.length > 0 ? globalVariables.join('\n\n') : 'No global variables';
const chatName = getCurrentChatId();
const converter = new showdown.Converter();
const message = `### Local variables (${chatName}):\n${localVariablesString}\n\n### Global variables:\n${globalVariablesString}`;
const message = [
includeLocalVariables ? `### Local variables (${chatName}):\n${localVariablesString}` : '',
includeGlobalVariables ? `### Global variables:\n${globalVariablesString}` : '',
].filter(x => x).join('\n\n');
const htmlMessage = DOMPurify.sanitize(converter.makeHtml(message));
sendSystemMessage(system_message_types.GENERIC, htmlMessage);
return '';
switch (type) {
case 'none':
break;
case 'chat':
sendSystemMessage(system_message_types.GENERIC, htmlMessage);
break;
case 'popup':
default:
await callGenericPopup(htmlMessage, POPUP_TYPE.TEXT);
break;
}
return JSON.stringify(jsonVariables);
}
/**
@@ -881,7 +906,35 @@ export function registerVariableCommands() {
name: 'listvar',
callback: listVariablesCallback,
aliases: ['listchatvar'],
helpString: 'List registered chat variables.',
helpString: 'List registered chat variables. Displays variables in a popup by default. Use the <code>format</code> argument to change the output format.',
returns: 'JSON list of local variables',
namedArgumentList: [
SlashCommandNamedArgument.fromProps({
name: 'scope',
description: 'filter variables by scope',
typeList: [ARGUMENT_TYPE.STRING],
defaultValue: 'all',
isRequired: false,
forceEnum: true,
enumList: [
new SlashCommandEnumValue('all', 'All variables', enumTypes.enum, enumIcons.variable),
new SlashCommandEnumValue('local', 'Local variables', enumTypes.enum, enumIcons.localVariable),
new SlashCommandEnumValue('global', 'Global variables', enumTypes.enum, enumIcons.globalVariable),
],
}),
SlashCommandNamedArgument.fromProps({
name: 'format',
description: 'output format',
typeList: [ARGUMENT_TYPE.STRING],
isRequired: true,
forceEnum: true,
enumList: [
new SlashCommandEnumValue('popup', 'Show variables in a popup.', enumTypes.enum, enumIcons.default),
new SlashCommandEnumValue('chat', 'Post a system message to the chat.', enumTypes.enum, enumIcons.message),
new SlashCommandEnumValue('none', 'Just return the variables as a JSON list.', enumTypes.enum, enumIcons.array),
],
}),
],
}));
SlashCommandParser.addCommandObject(SlashCommand.fromProps({
name: 'setvar',
@@ -1430,8 +1483,8 @@ export function registerVariableCommands() {
<li>
<pre><code class="language-stscript">/while left={{getvar::currentword}} {: /setvar key=currentword {: /do-something-and-return :}() | /echo The current work is "{{getvar::currentword}}" :}</code></pre>
executes the defined subcommand as long as the "currentword" variable is truthy (has any content that is not false/empty)
</li>
</ul>
</ul>
</li>
</div>
<div>
Loops are limited to 100 iterations by default, pass <code>guard=off</code> to disable.
@@ -1546,7 +1599,7 @@ export function registerVariableCommands() {
typeList: [ARGUMENT_TYPE.NUMBER, ARGUMENT_TYPE.VARIABLE_NAME],
isRequired: true,
acceptsMultiple: true,
enumProvider: (executor, scope)=>{
enumProvider: (executor, scope) => {
const vars = commonEnumProviders.variables('all')(executor, scope);
vars.push(
new SlashCommandEnumValue(
@@ -1554,16 +1607,16 @@ export function registerVariableCommands() {
null,
enumTypes.variable,
enumIcons.variable,
(input)=>/^\w*$/.test(input),
(input)=>input,
(input) => /^\w*$/.test(input),
(input) => input,
),
new SlashCommandEnumValue(
'any number',
null,
enumTypes.number,
enumIcons.number,
(input)=>input == '' || !Number.isNaN(Number(input)),
(input)=>input,
(input) => input == '' || !Number.isNaN(Number(input)),
(input) => input,
),
);
return vars;