Merge remote-tracking branch 'alpin/staging' into sampler_order

This commit is contained in:
AlpinDale 2024-12-03 01:27:08 +00:00
commit 86b4247767
11 changed files with 154 additions and 66 deletions

4
package-lock.json generated
View File

@ -1,12 +1,12 @@
{
"name": "sillytavern",
"version": "1.12.7",
"version": "1.12.8",
"lockfileVersion": 3,
"requires": true,
"packages": {
"": {
"name": "sillytavern",
"version": "1.12.7",
"version": "1.12.8",
"hasInstallScript": true,
"license": "AGPL-3.0",
"dependencies": {

View File

@ -84,7 +84,7 @@
"type": "git",
"url": "https://github.com/SillyTavern/SillyTavern.git"
},
"version": "1.12.7",
"version": "1.12.8",
"scripts": {
"start": "node server.js",
"start:deno": "deno run --allow-run --allow-net --allow-read --allow-write --allow-sys --allow-env server.js",

View File

@ -820,8 +820,8 @@
</div>
</div>
<div class="range-block m-t-1">
<div class="range-block-title openai_restorable" data-i18n="World Info Format Template">
<span>World Info format template</span>
<div class="range-block-title openai_restorable">
<span data-i18n="World Info Format Template">World Info format template</span>
<div id="wi_format_restore" data-i18n="[title]Restore default format" title="Restore default format" class="right_menu_button">
<div class="fa-solid fa-clock-rotate-left"></div>
</div>
@ -1524,15 +1524,15 @@
<div class="">
<label class="checkbox_label" for="early_stopping_textgenerationwebui">
<input type="checkbox" id="early_stopping_textgenerationwebui" />
<small data-i18n="Early Stopping">Early Stopping
<small data-i18n="Early Stopping">Early Stopping</small>
<div class="fa-solid fa-circle-info opacity50p" data-i18n="[title]Controls the stopping condition for beam search. If checked, the generation stops as soon as there are '# of Beams' sequences. If not checked, a heuristic is applied and the generation is stopped when it's very unlikely to find better candidates." title="Controls the stopping condition for beam search. If checked, the generation stops as soon as there are '# of Beams' sequences. If not checked, a heuristic is applied and the generation is stopped when it's very unlikely to find better candidates."></div>
</small>
</label>
</div>
</div>
</div>
<div data-tg-type="ooba" id="contrastiveSearchBlock" name="contrastiveSearchBlock" class="alignitemscenter flex-container flexFlowColumn flexBasis48p flexGrow flexShrink gap0">
<h4 class="textAlignCenter" data-i18n="Contrastive search">Contrastive Search
<h4 class="textAlignCenter">
<span data-i18n="Contrastive search">Contrastive Search</span>
<div class=" fa-solid fa-circle-info opacity50p " data-i18n="Contrastive_search_txt" title="A sampler that encourages diversity while maintaining coherence, by exploiting the isotropicity of the representation space of most LLMs. For details, see the paper A Contrastive Framework for Neural Text Generation by Su et al. (2022)."></div>
</h4>
<div class="alignitemscenter flex-container flexFlowColumn wide100p gap0">
@ -1566,9 +1566,8 @@
</label>
<label data-tg-type="vllm, aphrodite, infermaticai" class="checkbox_label" for="ignore_eos_token_textgenerationwebui">
<input type="checkbox" id="ignore_eos_token_textgenerationwebui" />
<small data-i18n="Ignore EOS Token">Ignore EOS Token
<small data-i18n="Ignore EOS Token">Ignore EOS Token</small>
<div class="fa-solid fa-circle-info opacity50p " data-i18n="[title]Ignore the EOS Token even if it generates." title="Ignore the EOS Token even if it generates."></div>
</small>
</label>
<label class="checkbox_label flexGrow flexShrink" for="skip_special_tokens_textgenerationwebui">
<input type="checkbox" id="skip_special_tokens_textgenerationwebui" />
@ -1629,7 +1628,8 @@
</div>
<div id="cfg_block_ooba" data-tg-type="ooba, tabby" class="wide100p">
<hr class="width100p">
<h4 data-i18n="CFG" class="textAlignCenter">CFG
<h4 class="textAlignCenter">
<span data-i18n="CFG">CFG</span>
<div class="margin5 fa-solid fa-circle-info opacity50p " data-i18n="[title]Classifier Free Guidance. More helpful tip coming soon" title="Classifier Free Guidance. More helpful tip coming soon."></div>
</h4>
<div class="alignitemscenter flex-container flexFlowColumn flexShrink gap0">
@ -2032,8 +2032,10 @@
<div id="logit_bias_openai" class="range-block-title openai_restorable" data-i18n="Logit Bias">
Logit Bias
</div>
<div class="toggle-description justifyLeft" data-i18n="Helps to ban or reenforce the usage of certain words">
Helps to ban or reinforce the usage of certain tokens. Confirm token parsing with <a target="_blank" href="https://platform.openai.com/tokenizer/">Tokenizer</a>.
<div class="toggle-description justifyLeft">
<span data-i18n="Helps to ban or reenforce the usage of certain words">Helps to ban or reinforce the usage of certain tokens.</span>
<span data-i18n="Confirm token parsing with">Confirm token parsing with</span>
<a target="_blank" href="https://platform.openai.com/tokenizer/" data-i18n="Tokenizer">Tokenizer</a>.
</div>
<div class="openai_logit_bias_preset_form">
<select id="openai_logit_bias_preset">
@ -2334,8 +2336,8 @@
<div data-tg-type="mancer" class="flex-container flexFlowColumn">
<div class="flex-container flexFlowColumn">
</div>
<h4 data-i18n="Mancer API key">
Mancer API key
<h4>
<span data-i18n="Mancer API key">Mancer API key</span>
<a href="https://mancer.tech/" class="notes-link" target="_blank">
<span class="fa-solid fa-circle-question note-link-span"></span>
</a>
@ -2631,15 +2633,21 @@
<input id="koboldcpp_api_url_text" class="text_pole wide100p" value="" autocomplete="off" data-server-history="koboldcpp">
</div>
</div>
<div class="flex-container flexFlowColumn marginTopBot5">
<label data-tg-type="ooba" class="checkbox_label" for="bypass_status_check_textgenerationwebui">
<input type="checkbox" id="bypass_status_check_textgenerationwebui" />
<span data-i18n="Bypass status check">Bypass status check</span>
</label>
<label data-tg-type="koboldcpp, llamacpp" class="checkbox_label" for="context_size_derived">
<input type="checkbox" id="context_size_derived" />
<span data-i18n="Derive context size from backend">Derive context size from backend</span>
</label>
</div>
<div class="flex-container">
<div id="api_button_textgenerationwebui" class="api_button menu_button menu_button_icon" type="submit" data-i18n="Connect" data-server-connect="ooba_blocking,vllm,aphrodite,tabby,koboldcpp,ollama,llamacpp,huggingface">Connect</div>
<div data-tg-type="openrouter" class="menu_button menu_button_icon openrouter_authorize" title="Get your OpenRouter API token using OAuth flow. You will be redirected to openrouter.ai" data-i18n="Authorize;[title]Get your OpenRouter API token using OAuth flow. You will be redirected to openrouter.ai">Authorize</div>
<div class="api_loading menu_button menu_button_icon" data-i18n="Cancel">Cancel</div>
</div>
<label data-tg-type="ooba" class="checkbox_label margin-bot-10px" for="bypass_status_check_textgenerationwebui">
<input type="checkbox" id="bypass_status_check_textgenerationwebui" />
<span data-i18n="Bypass status check">Bypass status check</span>
</label>
</form>
<div class="online_status">
<div class="online_status_indicator"></div>
@ -2682,8 +2690,9 @@
</div>
<div class="toggle-description justifyLeft">
<span data-i18n="Saved addresses and passwords.">
Saved addresses and passwords.<br>
Saved addresses and passwords.
</span>
<br>
</div>
<div class="openai_logit_bias_preset_form">
<select id="openai_proxy_preset">
@ -2696,8 +2705,9 @@
</div>
<div class="toggle-description justifyLeft">
<span data-i18n="This will show up as your saved preset.">
This will show up as your saved preset.<br>
This will show up as your saved preset.
</span>
<br>
</div>
<div class="wide100p">
<input id="openai_reverse_proxy_name" type="text" class="text_pole" placeholder="..." />
@ -2707,8 +2717,9 @@
</div>
<div class="toggle-description justifyLeft wide100p">
<span data-i18n="Alternative server URL (leave empty to use the default value).">
Alternative server URL (leave empty to use the default value).<br>
Alternative server URL (leave empty to use the default value).
</span>
<br>
</div>
<div class="wide100p">
<input id="openai_reverse_proxy" type="text" class="text_pole" placeholder="https://api.openai.com/v1" />
@ -2721,8 +2732,9 @@
</div>
<div class="toggle-description justifyLeft">
<span data-i18n="Will be used as a password for the proxy instead of API key.">
Will be used as a password for the proxy instead of API key.<br>
Will be used as a password for the proxy instead of API key.
</span>
<br>
</div>
<div class="flex-container width100p">
<input id="openai_proxy_password" type="password" class="text_pole flex1" placeholder="" form="openai_form" autocomplete="off" />
@ -3810,7 +3822,11 @@
<div id="wiTopBlock" class="flex-container">
<div id="WIMultiSelector" class="flex1 flex alignSelfStart range-block">
<div class="range-block-title justifyLeft">
<span data-i18n="Active World(s) for all chats"><small>Active World(s) for all chats</small></span>
<span>
<small data-i18n="Active World(s) for all chats">
Active World(s) for all chats
</small>
</span>
</div>
<div class="range-block-range">
<select id="world_info" class="select2_multi_sameline" multiple>
@ -4135,7 +4151,10 @@
<div class="alignitemscenter flex-container flexFlowColumn flexBasis48p flexGrow flexShrink gap0">
<small>
<span data-i18n="Chat Width">Chat Width <i class="fa-solid fa-desktop"></i></span>
<span>
<span data-i18n="Chat Width">Chat Width</span>
<i class="fa-solid fa-desktop"></i>
</span>
<div class="fa-solid fa-circle-info opacity50p" data-i18n="[title]Width of the main chat window in % of screen width" title="Width of the main chat window in % of screen width"></div>
</small>
<input class="neo-range-slider" type="range" id="chat_width_slider" name="chat_width_slider" min="25" max="100" step="1">
@ -4307,7 +4326,7 @@
<div id="reload_chat" class="menu_button whitespacenowrap" data-i18n="[title]Reload and redraw the currently open chat" title="Reload and redraw the currently open chat.">
<small data-i18n="Reload Chat">Reload Chat</small>
</div>
<div id="debug_menu" class="menu_button whitespacenowrap" data-i18n="Debug Menu">
<div id="debug_menu" class="menu_button whitespacenowrap">
<small data-i18n="Debug Menu">Debug Menu</small>
</div>
</div>
@ -4426,8 +4445,10 @@
</div>
</div>
<div id="examples-behavior-block">
<label data-i18n="Example Messages Behavior">
<small>Example Messages Behavior:</small>
<label>
<small data-i18n="Example Messages Behavior">
Example Messages Behavior:
</small>
</label>
<select id="example_messages_behavior">
<option value="normal" data-i18n="Gradual push-out">Gradual push-out</option>
@ -4591,7 +4612,9 @@
</label>
<div class="flex-container">
<div class="flex1" title="Determines how entries are found for autocomplete." data-i18n="[title]Determines how entries are found for autocomplete.">
<label for="stscript_matching" data-i18n="Autocomplete Matching"><small>Matching</small></label>
<label for="stscript_matching">
<small data-i18n="Autocomplete Matching">Matching</small>
</label>
<select id="stscript_matching">
<option data-i18n="Starts with" value="strict">Starts with</option>
<option data-i18n="Includes" value="includes">Includes</option>
@ -4599,7 +4622,9 @@
</select>
</div>
<div class="flex1" title="Sets the style of the autocomplete." data-i18n="[title]Sets the style of the autocomplete.">
<label for="stscript_autocomplete_style" data-i18n="Autocomplete Style"><small>Style</small></label>
<label for="stscript_autocomplete_style">
<small data-i18n="Autocomplete Style">Style</small>
</label>
<div class="flex-container flexFlowRow alignItemsBaseline">
<select id="stscript_autocomplete_style">
<option data-i18n="Follow Theme" value="theme">Follow Theme</option>
@ -4611,8 +4636,8 @@
</div>
</div>
<div title="Determines which keys select an item from the AutoComplete suggestions">
<label data-i18n="Keyboard">
<small>Keyboard:</small>
<label>
<small data-i18n="Keyboard">Keyboard:</small>
</label>
<select id="stscript_autocomplete_select">
<option value="3" data-i18n="Select with Tab or Enter">Select with Tab or Enter</option>
@ -4626,7 +4651,9 @@
<input class="neo-range-input" type="number" min="0.5" max="2" step="0.01" data-for="stscript_autocomplete_font_scale" id="stscript_autocomplete_font_scale_counter">
</div>
<div title="Sets the width of the autocomplete." data-i18n="[title]Sets the width of the autocomplete.">
<label for="stscript_autocomplete_width" data-i18n="Autocomplete Width"><small>Width</small></label>
<label for="stscript_autocomplete_width">
<small data-i18n="Autocomplete Width">Width</small>
</label>
<div class="doubleRangeContainer">
<div class="doubleRangeInputContainer">
<input type="range" id="stscript_autocomplete_width_left" min="0" max="2" step="1">
@ -4654,14 +4681,18 @@
<label><small data-i18n="Parser Flags">Parser Flags</small></label>
<label class="checkbox_label" title="Switch to stricter escaping, allowing all delimiting characters to be escaped with a backslash, and backslashes to be escaped as well." data-i18n="[title]Switch to stricter escaping, allowing all delimiting characters to be escaped with a backslash, and backslashes to be escaped as well.">
<input id="stscript_parser_flag_strict_escaping" type="checkbox" />
<span data-i18n="STRICT_ESCAPING"><small>STRICT_ESCAPING</small></span>
<span>
<small data-i18n="STRICT_ESCAPING">STRICT_ESCAPING</small>
</span>
<a href="https://docs.sillytavern.app/usage/st-script/#strict-escaping" target="_blank" class="notes-link">
<span class="fa-solid fa-circle-question note-link-span"></span>
</a>
</label>
<label class="checkbox_label" title="Prevents {{getvar::}} {{getglobalvar::}} macros from having literal macro-like values auto-evaluated.&NewLine;e.g. &quot;{{newline}}&quot; remains as literal string &quot;{{newline}}&quot;&NewLine;&NewLine;(This is done by internally replacing {{getvar::}} {{getglobalvar::}} macros with scoped variables.)" data-i18n="[title]stscript_parser_flag_replace_getvar_label">
<input id="stscript_parser_flag_replace_getvar" type="checkbox" />
<span data-i18n="REPLACE_GETVAR"><small>REPLACE_GETVAR</small></span>
<span>
<small data-i18n="REPLACE_GETVAR">REPLACE_GETVAR</small>
</span>
<a href="https://docs.sillytavern.app/usage/st-script/#replace-variable-macros" target="_blank" class="notes-link">
<span class="fa-solid fa-circle-question note-link-span"></span>
</a>
@ -4792,10 +4823,12 @@
<div class="flex-container wide100p alignitemscenter spaceBetween flexNoGap">
<div class="flex-container alignItemsBaseline wide100p">
<div class="flex1 flex-container alignItemsBaseline">
<h3 class="margin0" data-i18n="Persona Management">Persona Management</h3>
<a href="https://docs.sillytavern.app/usage/core-concepts/personas/" target="_blank" data-i18n="How do I use this?">
<h3 class="margin0" >
<span data-i18n="Persona Management">Persona Management</span>
<a href="https://docs.sillytavern.app/usage/core-concepts/personas/" target="_blank">
<span class="fa-solid fa-circle-question note-link-span"></span>
</a>
</h3>
</div>
<div class="flex-container">
<div class="menu_button menu_button_icon user_stats_button" data-i18n="[title]Click for stats!" title="Click for stats!">
@ -5516,7 +5549,12 @@
<span data-i18n="Examples of dialogue" class="mdhotkey_location">Examples of dialogue</span>
<i class="editor_maximize fa-solid fa-maximize right_menu_button" data-for="mes_example_textarea" title="Expand the editor" data-i18n="[title]Expand the editor"></i>
</h4>
<h5 data-i18n="Important to set the character's writing style.">Important to set the character's writing style. <a href="https://docs.sillytavern.app/usage/core-concepts/characterdesign/#examples-of-dialogue" class="notes-link" target="_blank"><span class="fa-solid fa-circle-question note-link-span"></span></a></h5>
<h5>
<span data-i18n="Important to set the character's writing style.">Important to set the character's writing style.</span>
<a href="https://docs.sillytavern.app/usage/core-concepts/characterdesign/#examples-of-dialogue" class="notes-link" target="_blank">
<span class="fa-solid fa-circle-question note-link-span"></span>
</a>
</h5>
</div>
<textarea id="mes_example_textarea" class="flexGrow mdHotkeys" name="mes_example" data-i18n="[placeholder](Examples of chat dialog. Begin each example with START on a new line.)" placeholder="(Examples of chat dialog. Begin each example with &lt;START&gt; on a new line.)" form="form_create" rows="6"></textarea>
<div class="extension_token_counter">
@ -5592,7 +5630,8 @@
<div class="character_world range-block flexFlowColumn flex-container">
<div class="range-block-title">
<h3>
<span data-i18n="Select a World Info file for"> Select a World Info file for <span class="character_name"></span></span>:
<span data-i18n="Select a World Info file for">Select a World Info file for</span>
<span class="character_name"></span>:
</h3>
</div>
<h4 data-i18n="Primary Lorebook">Primary Lorebook</h4>
@ -5893,7 +5932,7 @@
</div>
<div class="flex2 flex-container flexFlowColumn flexNoGap" data-i18n="[title]Sticky entries will stay active for N messages after being triggered." title="Sticky entries will stay active for N messages after being triggered.">
<div class="flex-container justifySpaceBetween marginBot5">
<small class="flex-container alignItemsBaseline" for="sticky" data-i18n="Sticky">
<small class="flex-container alignItemsBaseline" for="sticky">
<span data-i18n="Sticky">
Sticky
</span>
@ -5906,7 +5945,7 @@
</div>
<div class="flex2 flex-container flexFlowColumn flexNoGap" data-i18n="[title]Entries with a cooldown can't be activated N messages after being triggered." title="Entries with a cooldown can't be activated N messages after being triggered.">
<div class="flex-container justifySpaceBetween marginBot5">
<small class="flex-container alignItemsBaseline" for="cooldown" data-i18n="Cooldown">
<small class="flex-container alignItemsBaseline" for="cooldown">
<span data-i18n="Cooldown">
Cooldown
</span>
@ -5919,7 +5958,7 @@
</div>
<div class="flex2 flex-container flexFlowColumn flexNoGap" data-i18n="[title]Entries with a delay can't be activated until there are N messages present in the chat." title="Entries with a delay can't be activated until there are N messages present in the chat.">
<div class="flex-container justifySpaceBetween marginBot5">
<small class="flex-container alignItemsBaseline" for="delay" data-i18n="Delay">
<small class="flex-container alignItemsBaseline" for="delay">
<span data-i18n="Delay">
Delay
</span>

View File

@ -242,7 +242,7 @@ import { BulkEditOverlay, CharacterContextMenu } from './scripts/BulkEditOverlay
import { loadFeatherlessModels, loadMancerModels, loadOllamaModels, loadTogetherAIModels, loadInfermaticAIModels, loadOpenRouterModels, loadVllmModels, loadAphroditeModels, loadDreamGenModels, initTextGenModels, loadTabbyModels } from './scripts/textgen-models.js';
import { appendFileContent, hasPendingFileAttachment, populateFileAttachment, decodeStyleTags, encodeStyleTags, isExternalMediaAllowed, getCurrentEntityId, preserveNeutralChat, restoreNeutralChat } from './scripts/chats.js';
import { initPresetManager } from './scripts/preset-manager.js';
import { MacrosParser, evaluateMacros, getLastMessageId } from './scripts/macros.js';
import { MacrosParser, evaluateMacros, getLastMessageId, initMacros } from './scripts/macros.js';
import { currentUser, setUserControls } from './scripts/user.js';
import { POPUP_RESULT, POPUP_TYPE, Popup, callGenericPopup, fixToastrForDialogs } from './scripts/popup.js';
import { renderTemplate, renderTemplateAsync } from './scripts/templates.js';
@ -957,6 +957,7 @@ async function firstLoadInit() {
initDynamicStyles();
initTags();
initBookmarks();
initMacros();
await getUserAvatars(true, user_avatar);
await getCharacters();
await getBackgrounds();
@ -1238,8 +1239,9 @@ async function getStatusTextgen() {
const wantsInstructDerivation = (power_user.instruct.enabled && power_user.instruct.derived);
const wantsContextDerivation = power_user.context_derived;
const wantsContextSize = power_user.context_size_derived;
const supportsChatTemplate = [textgen_types.KOBOLDCPP, textgen_types.LLAMACPP].includes(textgen_settings.type);
if (supportsChatTemplate && (wantsInstructDerivation || wantsContextDerivation)) {
if (supportsChatTemplate && (wantsInstructDerivation || wantsContextDerivation || wantsContextSize)) {
const response = await fetch('/api/backends/text-completions/props', {
method: 'POST',
headers: getRequestHeaders(),
@ -1253,6 +1255,17 @@ async function getStatusTextgen() {
const data = await response.json();
if (data) {
const { chat_template, chat_template_hash } = data;
if (wantsContextSize && 'default_generation_settings' in data) {
const backend_max_context = data['default_generation_settings']['n_ctx'];
const old_value = max_context;
if (max_context !== backend_max_context) {
setGenerationParamsFromPreset({ max_length: backend_max_context });
}
if (old_value !== max_context) {
console.log(`Auto-switched max context from ${old_value} to ${max_context}`);
toastr.info(`${old_value}${max_context}`, 'Context Size Changed');
}
}
console.log(`We have chat template ${chat_template.split('\n')[0]}...`);
const templates = await deriveTemplatesFromChatTemplate(chat_template, chat_template_hash);
if (templates) {
@ -6822,6 +6835,10 @@ export async function saveSettings(type) {
});
}
/**
* Sets the generation parameters from a preset object.
* @param {{ genamt?: number, max_length?: number }} preset Preset object
*/
export function setGenerationParamsFromPreset(preset) {
const needsUnlock = (preset.max_length ?? max_context) > MAX_CONTEXT_DEFAULT || (preset.genamt ?? amount_gen) > MAX_RESPONSE_DEFAULT;
$('#max_context_unlocked').prop('checked', needsUnlock).trigger('change');

View File

@ -40,7 +40,7 @@
<div class="expression_fallback_block m-b-1 m-t-1">
<label for="expression_fallback" data-i18n="Default / Fallback Expression">Default / Fallback Expression</label>
<small data-i18n="Set the default and fallback expression being used when no matching expression is found.">Set the default and fallback expression being used when no matching expression is found.</small>
<select id="expression_fallback" class="flex1 margin0" data-i18n="Fallback Expression" placeholder="Fallback Expression"></select>
<select id="expression_fallback" class="flex1 margin0"></select>
</div>
<div class="expression_custom_block m-b-1 m-t-1">
<label for="expression_custom" data-i18n="Custom Expressions">Custom Expressions</label>

View File

@ -57,7 +57,7 @@
</div>
<div id="playback_rate_block" class="range-block">
<hr>
<div class="range-block-title justifyLeft" data-i18n="Audio Playback Speed">
<div class="range-block-title justifyLeft">
<small data-i18n="Audio Playback Speed">Audio Playback Speed</small>
</div>
<div class="range-block-range-and-counter">

View File

@ -1,5 +1,5 @@
import { Handlebars, moment, seedrandom, droll } from '../lib.js';
import { chat, chat_metadata, main_api, getMaxContextSize, getCurrentChatId, substituteParams } from '../script.js';
import { chat, chat_metadata, main_api, getMaxContextSize, getCurrentChatId, substituteParams, eventSource, event_types } from '../script.js';
import { timestampToMoment, isDigitsOnly, getStringHash, escapeRegex, uuidv4 } from './utils.js';
import { textgenerationwebui_banned_in_macros } from './textgen-settings.js';
import { getInstructMacros } from './instruct-mode.js';
@ -521,3 +521,22 @@ export function evaluateMacros(content, env, postProcessFn) {
return content;
}
export function initMacros() {
function initLastGenerationType() {
let lastGenerationType = '';
MacrosParser.registerMacro('lastGenerationType', () => lastGenerationType);
eventSource.on(event_types.GENERATION_STARTED, (type, _params, isDryRun) => {
if (isDryRun) return;
lastGenerationType = type || 'normal';
});
eventSource.on(event_types.CHAT_CHANGED, () => {
lastGenerationType = '';
});
}
initLastGenerationType();
}

View File

@ -246,6 +246,7 @@ let power_user = {
},
context_derived: false,
context_size_derived: false,
sysprompt: {
enabled: true,
@ -1482,6 +1483,7 @@ async function loadPowerUserSettings(settings, data) {
$('#example_messages_behavior').val(getExampleMessagesBehavior());
$(`#example_messages_behavior option[value="${getExampleMessagesBehavior()}"]`).prop('selected', true);
$('#context_derived').parent().find('i').toggleClass('toggleEnabled', !!power_user.context_derived);
$('#context_size_derived').prop('checked', !!power_user.context_size_derived);
$('#console_log_prompts').prop('checked', power_user.console_log_prompts);
$('#request_token_probabilities').prop('checked', power_user.request_token_probabilities);
@ -3079,6 +3081,16 @@ $(document).ready(() => {
$('#context_derived').parent().find('i').toggleClass('toggleEnabled', !!power_user.context_derived);
});
$('#context_size_derived').on('input', function () {
const value = !!$(this).prop('checked');
power_user.context_size_derived = value;
saveSettingsDebounced();
});
$('#context_size_derived').on('change', function () {
$('#context_size_derived').prop('checked', !!power_user.context_size_derived);
});
$('#always-force-name2-checkbox').change(function () {
power_user.always_force_name2 = !!$(this).prop('checked');
saveSettingsDebounced();

View File

@ -8,6 +8,7 @@
<li><tt>&lcub;&lcub;noop&rcub;&rcub;</tt> <span data-i18n="help_macros_4">no operation, just an empty string.</span></li>
<li><tt>&lcub;&lcub;original&rcub;&rcub;</tt> <span data-i18n="help_macros_5">global prompts defined in API settings. Only valid in Advanced Definitions prompt overrides.</span></li>
<li><tt>&lcub;&lcub;input&rcub;&rcub;</tt> <span data-i18n="help_macros_6">the user input</span></li>
<li><tt>&lcub;&lcub;lastGenerationType&rcub;&rcub;</tt> - <span>the type of the last queued generation request. Empty if no generations were performed yet or the active chat was switched. Possible values: "normal", "impersonate", "regenerate", "quiet", "swipe", "continue".</span></li>
<li><tt>&lcub;&lcub;charPrompt&rcub;&rcub;</tt> <span data-i18n="help_macros_7">the Character's Main Prompt override</span></li>
<li><tt>&lcub;&lcub;charInstruction&rcub;&rcub;</tt> <span data-i18n="help_macros_8">the Character's Post-History Instructions override</span></li>
<li><tt>&lcub;&lcub;description&rcub;&rcub;</tt> <span data-i18n="help_macros_9">the Character's Description</span></li>

View File

@ -152,7 +152,7 @@ router.post('/status', jsonParser, async function (request, response) {
if (!modelsReply.ok) {
console.log('Models endpoint is offline.');
return response.status(400);
return response.sendStatus(400);
}
/** @type {any} */
@ -173,7 +173,7 @@ router.post('/status', jsonParser, async function (request, response) {
if (!Array.isArray(data.data)) {
console.log('Models response is not an array.');
return response.status(400);
return response.sendStatus(400);
}
const modelIds = data.data.map(x => x.id);
@ -224,7 +224,7 @@ router.post('/status', jsonParser, async function (request, response) {
return response.send({ result, data: data.data });
} catch (error) {
console.error(error);
return response.status(500);
return response.sendStatus(500);
}
});
@ -244,7 +244,7 @@ router.post('/props', jsonParser, async function (request, response) {
const propsReply = await fetch(propsUrl, args);
if (!propsReply.ok) {
return response.status(400);
return response.sendStatus(400);
}
/** @type {any} */
@ -258,7 +258,7 @@ router.post('/props', jsonParser, async function (request, response) {
return response.send(props);
} catch (error) {
console.error(error);
return response.status(500);
return response.sendStatus(500);
}
});
@ -450,7 +450,7 @@ ollama.post('/download', jsonParser, async function (request, response) {
return response.send({ ok: true });
} catch (error) {
console.error(error);
return response.status(500);
return response.sendStatus(500);
}
});
@ -493,7 +493,7 @@ ollama.post('/caption-image', jsonParser, async function (request, response) {
return response.send({ caption });
} catch (error) {
console.error(error);
return response.status(500);
return response.sendStatus(500);
}
});
@ -540,7 +540,7 @@ llamacpp.post('/caption-image', jsonParser, async function (request, response) {
} catch (error) {
console.error(error);
return response.status(500);
return response.sendStatus(500);
}
});
@ -569,7 +569,7 @@ llamacpp.post('/props', jsonParser, async function (request, response) {
} catch (error) {
console.error(error);
return response.status(500);
return response.sendStatus(500);
}
});
@ -619,7 +619,7 @@ llamacpp.post('/slots', jsonParser, async function (request, response) {
} catch (error) {
console.error(error);
return response.status(500);
return response.sendStatus(500);
}
});
@ -665,7 +665,7 @@ tabby.post('/download', jsonParser, async function (request, response) {
return response.send({ ok: true });
} catch (error) {
console.error(error);
return response.status(500);
return response.sendStatus(500);
}
});

View File

@ -2214,9 +2214,9 @@
}
},
"node_modules/cross-spawn": {
"version": "7.0.3",
"resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.3.tgz",
"integrity": "sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w==",
"version": "7.0.6",
"resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.6.tgz",
"integrity": "sha512-uV2QOWP2nWzsy2aMp8aRibhi9dlzF5Hgh5SHaB9OiTGEyDTiJJyx0uy51QXdyWbtAHNua4XJzUKca3OzKUd3vA==",
"license": "MIT",
"dependencies": {
"path-key": "^3.1.0",