From cdbca6d9fde09bc3ad429a99130cfe1cf887b3ec Mon Sep 17 00:00:00 2001 From: kingbri Date: Thu, 17 Aug 2023 23:51:17 -0400 Subject: [PATCH 01/32] CFG: Include the entire prompt with negative prompt CFG with LLMs works differently than stable diffusion. The main principle is prompt mixing and utilizing the differences between the two prompts rather than a full "negative prompt" of what the user doesn't want. SillyTavern its own way of formatting a prompt sent to an LLM backend. Therefore, take that prompt and add negatives to it. Signed-off-by: kingbri --- public/script.js | 6 +++++- public/scripts/extensions/cfg/index.js | 1 + public/scripts/extensions/cfg/util.js | 27 +++++++++++++++++++++----- public/scripts/textgen-settings.js | 4 +--- 4 files changed, 29 insertions(+), 9 deletions(-) diff --git a/public/script.js b/public/script.js index 97b22af2c..430deda8f 100644 --- a/public/script.js +++ b/public/script.js @@ -164,6 +164,7 @@ import { deviceInfo } from "./scripts/RossAscends-mods.js"; import { registerPromptManagerMigration } from "./scripts/PromptManager.js"; import { getRegexedString, regex_placement } from "./scripts/extensions/regex/engine.js"; import { FILTER_TYPES, FilterHelper } from "./scripts/filters.js"; +import { getCfg, getNegativePrompt } from "./scripts/extensions/cfg/util.js"; //exporting functions and vars for mods export { @@ -2905,6 +2906,8 @@ async function Generate(type, { automatic_trigger, force_name2, resolve, reject, let this_amount_gen = parseInt(amount_gen); // how many tokens the AI will be requested to generate let this_settings = koboldai_settings[koboldai_setting_names[preset_settings]]; + const cfgValues = getCfg(finalPromt); + if (isMultigenEnabled() && type !== 'quiet') { // if nothing has been generated yet.. this_amount_gen = getMultigenAmount(); @@ -2912,6 +2915,7 @@ async function Generate(type, { automatic_trigger, force_name2, resolve, reject, let thisPromptBits = []; + // TODO: Make this a switch if (main_api == 'koboldhorde' && horde_settings.auto_adjust_response_length) { this_amount_gen = Math.min(this_amount_gen, adjustedParams.maxLength); this_amount_gen = Math.max(this_amount_gen, MIN_AMOUNT_GEN); // prevent validation errors @@ -2934,7 +2938,7 @@ async function Generate(type, { automatic_trigger, force_name2, resolve, reject, } } else if (main_api == 'textgenerationwebui') { - generate_data = getTextGenGenerationData(finalPromt, this_amount_gen, isImpersonate); + generate_data = getTextGenGenerationData(finalPromt, this_amount_gen, isImpersonate, cfgValues); generate_data.use_mancer = api_use_mancer_webui; } else if (main_api == 'novel') { diff --git a/public/scripts/extensions/cfg/index.js b/public/scripts/extensions/cfg/index.js index 31f2af7dc..a5b1f5773 100644 --- a/public/scripts/extensions/cfg/index.js +++ b/public/scripts/extensions/cfg/index.js @@ -273,6 +273,7 @@ jQuery(async () => { saveSettingsDebounced(); }); + // TODO: Add negative insertion depth windowHtml.find('#global_cfg_negative_prompt').on('input', function() { extension_settings.cfg.global.negative_prompt = $(this).val(); saveSettingsDebounced(); diff --git a/public/scripts/extensions/cfg/util.js b/public/scripts/extensions/cfg/util.js index 8637ced3a..0f6cdfb75 100644 --- a/public/scripts/extensions/cfg/util.js +++ b/public/scripts/extensions/cfg/util.js @@ -12,18 +12,23 @@ export const metadataKeys = { guidance_scale: "cfg_guidance_scale", negative_prompt: "cfg_negative_prompt", negative_combine: "cfg_negative_combine", - groupchat_individual_chars: "cfg_groupchat_individual_chars" + groupchat_individual_chars: "cfg_groupchat_individual_chars", + negative_insertion_depth: "cfg_negative_insertion_depth" } // Gets the CFG value from hierarchy of chat -> character -> global // Returns undefined values which should be handled in the respective backend APIs -export function getCfg() { +// TODO: Include a custom negative separator +// TODO: Maybe use existing prompt building/substitution? +export function getCfg(prompt) { + const splitPrompt = prompt?.split("\n") ?? []; let splitNegativePrompt = []; const charaCfg = extension_settings.cfg.chara?.find((e) => e.name === getCharaFilename(this_chid)); const guidanceScale = getGuidanceScale(charaCfg); const chatNegativeCombine = chat_metadata[metadataKeys.negative_combine] ?? []; // If there's a guidance scale, continue. Otherwise assume undefined + // TODO: Run substitute params if (guidanceScale?.value && guidanceScale?.value !== 1) { if (guidanceScale.type === cfgType.chat || chatNegativeCombine.includes(cfgType.chat)) { splitNegativePrompt.push(chat_metadata[metadataKeys.negative_prompt]?.trim()); @@ -37,12 +42,15 @@ export function getCfg() { splitNegativePrompt.push(extension_settings.cfg.global.negative_prompt?.trim()); } - const combinedNegatives = splitNegativePrompt.filter((e) => e.length > 0).join(", "); - console.debug(`Setting CFG with guidance scale: ${guidanceScale.value}, negatives: ${combinedNegatives}`) + // TODO: use a custom separator for join + const combinedNegatives = splitNegativePrompt.filter((e) => e.length > 0).join("\n"); + const insertionDepth = chat_metadata[metadataKeys.negative_insertion_depth] ?? 1; + splitPrompt.splice(splitPrompt.length - insertionDepth, 0, combinedNegatives); + console.log(`Setting CFG with guidance scale: ${guidanceScale.value}, negatives: ${combinedNegatives}`); return { guidanceScale: guidanceScale.value, - negativePrompt: combinedNegatives + negativePrompt: splitPrompt.join("\n") } } } @@ -70,3 +78,12 @@ function getGuidanceScale(charaCfg) { value: extension_settings.cfg.global.guidance_scale }; } + +export function getNegativePrompt(prompt) { + const splitPrompt = prompt.split("\n"); + const insertionDepth = chat_metadata[metadataKeys.negative_insertion_depth] ?? 1; + splitPrompt.splice(splitPrompt.length - insertionDepth, 0, "Test negative list"); + console.log(splitPrompt); + const negativePrompt = splitPrompt.join("\n"); + //console.log(negativePrompt); +} diff --git a/public/scripts/textgen-settings.js b/public/scripts/textgen-settings.js index 9ac736306..48a7c162e 100644 --- a/public/scripts/textgen-settings.js +++ b/public/scripts/textgen-settings.js @@ -235,9 +235,7 @@ async function generateTextGenWithStreaming(generate_data, signal) { } } -export function getTextGenGenerationData(finalPromt, this_amount_gen, isImpersonate) { - const cfgValues = getCfg(); - +export function getTextGenGenerationData(finalPromt, this_amount_gen, isImpersonate, cfgValues) { return { 'prompt': finalPromt, 'max_new_tokens': this_amount_gen, From 7191f7a8ad8d1378ebf5da1e1636079a8905f93c Mon Sep 17 00:00:00 2001 From: kingbri Date: Sat, 19 Aug 2023 01:19:22 -0400 Subject: [PATCH 02/32] CFG: Add insertion depth and custom separator Insertion depth allows for CFG to variably inject itself into the negative prompt. This is similar to how Author's note works. However, this method of insertion depth conflicts with AN and world info where negatives can be meshed between two lines of those specific insertions. A custom separator must be wrapped in quotes, otherwise the default separator is a newline for negative cascading. Signed-off-by: kingbri --- public/script.js | 5 +-- public/scripts/extensions/cfg/index.js | 29 ++++++++++++- public/scripts/extensions/cfg/util.js | 29 +++++-------- public/scripts/extensions/cfg/window.html | 52 ++++++++++++++--------- public/scripts/nai-settings.js | 2 +- public/scripts/textgen-settings.js | 7 ++- 6 files changed, 79 insertions(+), 45 deletions(-) diff --git a/public/script.js b/public/script.js index 430deda8f..273843ff7 100644 --- a/public/script.js +++ b/public/script.js @@ -164,7 +164,6 @@ import { deviceInfo } from "./scripts/RossAscends-mods.js"; import { registerPromptManagerMigration } from "./scripts/PromptManager.js"; import { getRegexedString, regex_placement } from "./scripts/extensions/regex/engine.js"; import { FILTER_TYPES, FilterHelper } from "./scripts/filters.js"; -import { getCfg, getNegativePrompt } from "./scripts/extensions/cfg/util.js"; //exporting functions and vars for mods export { @@ -2906,8 +2905,6 @@ async function Generate(type, { automatic_trigger, force_name2, resolve, reject, let this_amount_gen = parseInt(amount_gen); // how many tokens the AI will be requested to generate let this_settings = koboldai_settings[koboldai_setting_names[preset_settings]]; - const cfgValues = getCfg(finalPromt); - if (isMultigenEnabled() && type !== 'quiet') { // if nothing has been generated yet.. this_amount_gen = getMultigenAmount(); @@ -2938,7 +2935,7 @@ async function Generate(type, { automatic_trigger, force_name2, resolve, reject, } } else if (main_api == 'textgenerationwebui') { - generate_data = getTextGenGenerationData(finalPromt, this_amount_gen, isImpersonate, cfgValues); + generate_data = getTextGenGenerationData(finalPromt, this_amount_gen, isImpersonate); generate_data.use_mancer = api_use_mancer_webui; } else if (main_api == 'novel') { diff --git a/public/scripts/extensions/cfg/index.js b/public/scripts/extensions/cfg/index.js index a5b1f5773..e38a255e0 100644 --- a/public/scripts/extensions/cfg/index.js +++ b/public/scripts/extensions/cfg/index.js @@ -182,6 +182,24 @@ function loadSettings() { }); } + // Display the negative separator in quotes if not quoted already + let negativeSeparatorDisplay = []; + const negativeSeparator = chat_metadata[metadataKeys.negative_separator]; + if (negativeSeparator) { + negativeSeparatorDisplay.push(negativeSeparator); + if (!negativeSeparator.startsWith(`"`)) { + negativeSeparatorDisplay.unshift(`"`); + } + + if (!negativeSeparator.endsWith(`"`)) { + negativeSeparatorDisplay.push(`"`); + } + } + + $('#cfg_negative_separator').val(negativeSeparatorDisplay.length === 0 ? '' : negativeSeparatorDisplay.join('')); + + $('#cfg_negative_insertion_depth').val(chat_metadata[metadataKeys.negative_insertion_depth] ?? 1); + // Set character CFG if it exists if (!selected_group) { const charaCfg = extension_settings.cfg.chara.find((e) => e.name === getCharaFilename()); @@ -273,7 +291,6 @@ jQuery(async () => { saveSettingsDebounced(); }); - // TODO: Add negative insertion depth windowHtml.find('#global_cfg_negative_prompt').on('input', function() { extension_settings.cfg.global.negative_prompt = $(this).val(); saveSettingsDebounced(); @@ -290,6 +307,16 @@ jQuery(async () => { saveMetadataDebounced(); }); + windowHtml.find(`#cfg_negative_insertion_depth`).on('input', function() { + chat_metadata[metadataKeys.negative_insertion_depth] = Number($(this).val()); + saveMetadataDebounced(); + }); + + windowHtml.find(`#cfg_negative_separator`).on('input', function() { + chat_metadata[metadataKeys.negative_separator] = $(this).val(); + saveMetadataDebounced(); + }); + windowHtml.find('#groupchat_cfg_use_chara').on('input', function() { const checked = !!$(this).prop('checked'); chat_metadata[metadataKeys.groupchat_individual_chars] = checked diff --git a/public/scripts/extensions/cfg/util.js b/public/scripts/extensions/cfg/util.js index 0f6cdfb75..07439d7c3 100644 --- a/public/scripts/extensions/cfg/util.js +++ b/public/scripts/extensions/cfg/util.js @@ -1,4 +1,4 @@ -import { chat_metadata, this_chid } from "../../../script.js"; +import { chat_metadata, substituteParams, this_chid } from "../../../script.js"; import { extension_settings, getContext } from "../../extensions.js" import { selected_group } from "../../group-chats.js"; import { getCharaFilename } from "../../utils.js"; @@ -13,13 +13,14 @@ export const metadataKeys = { negative_prompt: "cfg_negative_prompt", negative_combine: "cfg_negative_combine", groupchat_individual_chars: "cfg_groupchat_individual_chars", - negative_insertion_depth: "cfg_negative_insertion_depth" + negative_insertion_depth: "cfg_negative_insertion_depth", + negative_separator: "cfg_negative_separator" } // Gets the CFG value from hierarchy of chat -> character -> global // Returns undefined values which should be handled in the respective backend APIs -// TODO: Include a custom negative separator // TODO: Maybe use existing prompt building/substitution? +// TODO: Insertion depth conflicts with author's note. Shouldn't matter though since CFG is prompt mixing. export function getCfg(prompt) { const splitPrompt = prompt?.split("\n") ?? []; let splitNegativePrompt = []; @@ -28,23 +29,24 @@ export function getCfg(prompt) { const chatNegativeCombine = chat_metadata[metadataKeys.negative_combine] ?? []; // If there's a guidance scale, continue. Otherwise assume undefined - // TODO: Run substitute params if (guidanceScale?.value && guidanceScale?.value !== 1) { if (guidanceScale.type === cfgType.chat || chatNegativeCombine.includes(cfgType.chat)) { - splitNegativePrompt.push(chat_metadata[metadataKeys.negative_prompt]?.trim()); + splitNegativePrompt.unshift(substituteParams(chat_metadata[metadataKeys.negative_prompt])?.trim()); } if (guidanceScale.type === cfgType.chara || chatNegativeCombine.includes(cfgType.chara)) { - splitNegativePrompt.push(charaCfg.negative_prompt?.trim()) + splitNegativePrompt.unshift(substituteParams(charaCfg.negative_prompt)?.trim()) } if (guidanceScale.type === cfgType.global || chatNegativeCombine.includes(cfgType.global)) { - splitNegativePrompt.push(extension_settings.cfg.global.negative_prompt?.trim()); + splitNegativePrompt.unshift(substituteParams(extension_settings.cfg.global.negative_prompt)?.trim()); } - // TODO: use a custom separator for join - const combinedNegatives = splitNegativePrompt.filter((e) => e.length > 0).join("\n"); + // This line is a bit hacky with a JSON.stringify and JSON.parse. Fix this if possible. + const negativeSeparator = JSON.parse(chat_metadata[metadataKeys.negative_separator] || JSON.stringify("\n")) ?? "\n"; + const combinedNegatives = splitNegativePrompt.filter((e) => e.length > 0).join(negativeSeparator); const insertionDepth = chat_metadata[metadataKeys.negative_insertion_depth] ?? 1; + console.log(insertionDepth) splitPrompt.splice(splitPrompt.length - insertionDepth, 0, combinedNegatives); console.log(`Setting CFG with guidance scale: ${guidanceScale.value}, negatives: ${combinedNegatives}`); @@ -78,12 +80,3 @@ function getGuidanceScale(charaCfg) { value: extension_settings.cfg.global.guidance_scale }; } - -export function getNegativePrompt(prompt) { - const splitPrompt = prompt.split("\n"); - const insertionDepth = chat_metadata[metadataKeys.negative_insertion_depth] ?? 1; - splitPrompt.splice(splitPrompt.length - insertionDepth, 0, "Test negative list"); - console.log(splitPrompt); - const negativePrompt = splitPrompt.join("\n"); - //console.log(negativePrompt); -} diff --git a/public/scripts/extensions/cfg/window.html b/public/scripts/extensions/cfg/window.html index 3db65c9ad..5e0d21f7c 100644 --- a/public/scripts/extensions/cfg/window.html +++ b/public/scripts/extensions/cfg/window.html @@ -117,27 +117,39 @@
- - Combine negative prompts from other boxes. -
- For example, ticking the chat, global, and character boxes combine all negative prompts into a comma-separated string. -
+
+ + Combine negative prompts from other boxes. +
+ For example, ticking the chat, global, and character boxes combine all negative prompts into a comma-separated string. +
+

- - - - +
+ + + + +
+
+ + +
diff --git a/public/scripts/nai-settings.js b/public/scripts/nai-settings.js index 0dff75b3c..6fcf5baca 100644 --- a/public/scripts/nai-settings.js +++ b/public/scripts/nai-settings.js @@ -410,7 +410,7 @@ export function getNovelGenerationData(finalPrompt, this_settings, this_amount_g : undefined; const prefix = selectPrefix(nai_settings.prefix, finalPrompt); - const cfgSettings = getCfg(); + const cfgSettings = getCfg(finalPrompt); let logitBias = []; if (tokenizerType !== tokenizers.NONE && Array.isArray(nai_settings.logit_bias) && nai_settings.logit_bias.length) { diff --git a/public/scripts/textgen-settings.js b/public/scripts/textgen-settings.js index 48a7c162e..c2a29d749 100644 --- a/public/scripts/textgen-settings.js +++ b/public/scripts/textgen-settings.js @@ -235,7 +235,12 @@ async function generateTextGenWithStreaming(generate_data, signal) { } } -export function getTextGenGenerationData(finalPromt, this_amount_gen, isImpersonate, cfgValues) { +export function getTextGenGenerationData(finalPromt, this_amount_gen, isImpersonate) { + let cfgValues = {}; + if (!isImpersonate) { + cfgValues = getCfg(finalPromt); + } + return { 'prompt': finalPromt, 'max_new_tokens': this_amount_gen, From 92e6c6a998180a69872162ec076cbc2100ad151a Mon Sep 17 00:00:00 2001 From: kingbri Date: Sun, 20 Aug 2023 00:44:39 -0400 Subject: [PATCH 03/32] CFG: Use ST prompt builder for negatives Make the generate function build a negative prompt in addition to the normal one. This allows for nonconflicting insertion with other extension prompts and World Info. Signed-off-by: kingbri --- public/script.js | 111 ++++++++++++++++---------- public/scripts/extensions/cfg/util.js | 75 ++++++++--------- public/scripts/nai-settings.js | 8 +- public/scripts/textgen-settings.js | 13 +-- 4 files changed, 107 insertions(+), 100 deletions(-) diff --git a/public/script.js b/public/script.js index 273843ff7..c22628ff5 100644 --- a/public/script.js +++ b/public/script.js @@ -164,6 +164,7 @@ import { deviceInfo } from "./scripts/RossAscends-mods.js"; import { registerPromptManagerMigration } from "./scripts/PromptManager.js"; import { getRegexedString, regex_placement } from "./scripts/extensions/regex/engine.js"; import { FILTER_TYPES, FilterHelper } from "./scripts/filters.js"; +import { getCfgPrompt, getGuidanceScale } from "./scripts/extensions/cfg/util.js"; //exporting functions and vars for mods export { @@ -2762,6 +2763,7 @@ async function Generate(type, { automatic_trigger, force_name2, resolve, reject, } const anchorDepth = Math.abs(i - arrMes.length + 1); + // NOTE: Depth injected here! const extensionAnchor = getExtensionPrompt(extension_prompt_types.IN_CHAT, anchorDepth); if (anchorDepth > 0 && extensionAnchor && extensionAnchor.length) { @@ -2773,7 +2775,6 @@ async function Generate(type, { automatic_trigger, force_name2, resolve, reject, } let mesExmString = ''; - let mesSendString = ''; function setPromtString() { if (main_api == 'openai') { @@ -2782,65 +2783,57 @@ async function Generate(type, { automatic_trigger, force_name2, resolve, reject, console.debug('--setting Prompt string'); mesExmString = pinExmString ?? mesExamplesArray.slice(0, count_exm_add).join(''); - mesSendString = ''; - for (let j = 0; j < mesSend.length; j++) { - const isBottom = j === mesSend.length - 1; - mesSendString += mesSend[j]; - - if (isBottom) { - mesSendString = modifyLastPromptLine(mesSendString); - } - } + mesSend[mesSend.length - 1] = modifyLastPromptLine(mesSend[mesSend.length - 1]); } - function modifyLastPromptLine(mesSendString) { + function modifyLastPromptLine(lastMesString) { // Add quiet generation prompt at depth 0 if (quiet_prompt && quiet_prompt.length) { const name = is_pygmalion ? 'You' : name1; const quietAppend = isInstruct ? formatInstructModeChat(name, quiet_prompt, false, true, false, name1, name2) : `\n${name}: ${quiet_prompt}`; - mesSendString += quietAppend; + lastMesString += quietAppend; // Bail out early - return mesSendString; + return lastMesString; } // Get instruct mode line if (isInstruct && tokens_already_generated === 0) { const name = isImpersonate ? (is_pygmalion ? 'You' : name1) : name2; - mesSendString += formatInstructModePrompt(name, isImpersonate, promptBias, name1, name2); + lastMesString += formatInstructModePrompt(name, isImpersonate, promptBias, name1, name2); } // Get non-instruct impersonation line if (!isInstruct && isImpersonate && tokens_already_generated === 0) { const name = is_pygmalion ? 'You' : name1; - if (!mesSendString.endsWith('\n')) { - mesSendString += '\n'; + if (!lastMesString.endsWith('\n')) { + lastMesString += '\n'; } - mesSendString += name + ':'; + lastMesString += name + ':'; } // Add character's name if (!isInstruct && force_name2 && tokens_already_generated === 0) { - if (!mesSendString.endsWith('\n')) { - mesSendString += '\n'; + if (!lastMesString.endsWith('\n')) { + lastMesString += '\n'; } // Add a leading space to the prompt bias if applicable if (!promptBias || promptBias.length === 0) { console.debug("No prompt bias was found."); - mesSendString += `${name2}:`; + lastMesString += `${name2}:`; } else if (promptBias.startsWith(' ')) { console.debug(`A prompt bias with a leading space was found: ${promptBias}`); - mesSendString += `${name2}:${promptBias}` + lastMesString += `${name2}:${promptBias}` } else { console.debug(`A prompt bias was found: ${promptBias}`); - mesSendString += `${name2}: ${promptBias}`; + lastMesString += `${name2}: ${promptBias}`; } } else if (power_user.user_prompt_bias && !isImpersonate && !isInstruct) { console.debug(`A prompt bias was found without character's name appended: ${promptBias}`); - mesSendString += substituteParams(power_user.user_prompt_bias); + lastMesString += substituteParams(power_user.user_prompt_bias); } - return mesSendString; + return lastMesString; } function checkPromtSize() { @@ -2849,7 +2842,7 @@ async function Generate(type, { automatic_trigger, force_name2, resolve, reject, const prompt = [ storyString, mesExmString, - mesSendString, + mesSend.join(''), generatedPromtCache, allAnchors, quiet_prompt, @@ -2878,30 +2871,60 @@ async function Generate(type, { automatic_trigger, force_name2, resolve, reject, setPromtString(); } + const cfgGuidanceScale = getGuidanceScale(); + function getCombinedPrompt(isNegative) { + if (isNegative && cfgGuidanceScale !== 1) { + const negativePrompt = getCfgPrompt(cfgGuidanceScale); + if (negativePrompt && negativePrompt?.value) { + // TODO: kingbri: use the insertion depth method instead of splicing + mesSend.splice(mesSend.length - negativePrompt.depth, 0, `${negativePrompt.value}\n`); + } + } + + let mesSendString = mesSend.join(''); + + // add chat preamble + mesSendString = addChatsPreamble(mesSendString); + + // add a custom dingus (if defined) + mesSendString = addChatsSeparator(mesSendString); + + if (zeroDepthAnchor && zeroDepthAnchor.length) { + if (!isMultigenEnabled() || tokens_already_generated == 0) { + combinedPrompt = appendZeroDepthAnchor(force_name2, zeroDepthAnchor, combinedPrompt); + } + } + + let combinedPrompt = + storyString + + afterScenarioAnchor + + mesExmString + + mesSendString + + generatedPromtCache; + + combinedPrompt = combinedPrompt.replace(/\r/gm, ''); + + if (power_user.collapse_newlines) { + combinedPrompt = collapseNewlines(combinedPrompt); + } + + return combinedPrompt; + } + + let mesSendString = mesSend.join(''); // add chat preamble mesSendString = addChatsPreamble(mesSendString); // add a custom dingus (if defined) mesSendString = addChatsSeparator(mesSendString); - let finalPromt = - storyString + - afterScenarioAnchor + - mesExmString + - mesSendString + - generatedPromtCache; + let finalPromt = getCombinedPrompt(false); + let negativePrompt = getCombinedPrompt(true); + const cfgValues = { + guidanceScale: cfgGuidanceScale?.value, + negativePrompt: negativePrompt + }; - if (zeroDepthAnchor && zeroDepthAnchor.length) { - if (!isMultigenEnabled() || tokens_already_generated == 0) { - finalPromt = appendZeroDepthAnchor(force_name2, zeroDepthAnchor, finalPromt); - } - } - - finalPromt = finalPromt.replace(/\r/gm, ''); - - if (power_user.collapse_newlines) { - finalPromt = collapseNewlines(finalPromt); - } let this_amount_gen = parseInt(amount_gen); // how many tokens the AI will be requested to generate let this_settings = koboldai_settings[koboldai_setting_names[preset_settings]]; @@ -2935,12 +2958,12 @@ async function Generate(type, { automatic_trigger, force_name2, resolve, reject, } } else if (main_api == 'textgenerationwebui') { - generate_data = getTextGenGenerationData(finalPromt, this_amount_gen, isImpersonate); + generate_data = getTextGenGenerationData(finalPromt, this_amount_gen, isImpersonate, cfgValues); generate_data.use_mancer = api_use_mancer_webui; } else if (main_api == 'novel') { const this_settings = novelai_settings[novelai_setting_names[nai_settings.preset_settings_novel]]; - generate_data = getNovelGenerationData(finalPromt, this_settings, this_amount_gen, isImpersonate); + generate_data = getNovelGenerationData(finalPromt, this_settings, this_amount_gen, isImpersonate, cfgValues); } else if (main_api == 'openai') { let [prompt, counts] = prepareOpenAIMessages({ diff --git a/public/scripts/extensions/cfg/util.js b/public/scripts/extensions/cfg/util.js index 07439d7c3..d37d54725 100644 --- a/public/scripts/extensions/cfg/util.js +++ b/public/scripts/extensions/cfg/util.js @@ -17,50 +17,13 @@ export const metadataKeys = { negative_separator: "cfg_negative_separator" } -// Gets the CFG value from hierarchy of chat -> character -> global -// Returns undefined values which should be handled in the respective backend APIs -// TODO: Maybe use existing prompt building/substitution? -// TODO: Insertion depth conflicts with author's note. Shouldn't matter though since CFG is prompt mixing. -export function getCfg(prompt) { - const splitPrompt = prompt?.split("\n") ?? []; - let splitNegativePrompt = []; - const charaCfg = extension_settings.cfg.chara?.find((e) => e.name === getCharaFilename(this_chid)); - const guidanceScale = getGuidanceScale(charaCfg); - const chatNegativeCombine = chat_metadata[metadataKeys.negative_combine] ?? []; - - // If there's a guidance scale, continue. Otherwise assume undefined - if (guidanceScale?.value && guidanceScale?.value !== 1) { - if (guidanceScale.type === cfgType.chat || chatNegativeCombine.includes(cfgType.chat)) { - splitNegativePrompt.unshift(substituteParams(chat_metadata[metadataKeys.negative_prompt])?.trim()); - } - - if (guidanceScale.type === cfgType.chara || chatNegativeCombine.includes(cfgType.chara)) { - splitNegativePrompt.unshift(substituteParams(charaCfg.negative_prompt)?.trim()) - } - - if (guidanceScale.type === cfgType.global || chatNegativeCombine.includes(cfgType.global)) { - splitNegativePrompt.unshift(substituteParams(extension_settings.cfg.global.negative_prompt)?.trim()); - } - - // This line is a bit hacky with a JSON.stringify and JSON.parse. Fix this if possible. - const negativeSeparator = JSON.parse(chat_metadata[metadataKeys.negative_separator] || JSON.stringify("\n")) ?? "\n"; - const combinedNegatives = splitNegativePrompt.filter((e) => e.length > 0).join(negativeSeparator); - const insertionDepth = chat_metadata[metadataKeys.negative_insertion_depth] ?? 1; - console.log(insertionDepth) - splitPrompt.splice(splitPrompt.length - insertionDepth, 0, combinedNegatives); - console.log(`Setting CFG with guidance scale: ${guidanceScale.value}, negatives: ${combinedNegatives}`); - - return { - guidanceScale: guidanceScale.value, - negativePrompt: splitPrompt.join("\n") - } - } -} - +// Gets the CFG guidance scale // If the guidance scale is 1, ignore the CFG negative prompt since it won't be used anyways -function getGuidanceScale(charaCfg) { +export function getGuidanceScale() { + const charaCfg = extension_settings.cfg.chara?.find((e) => e.name === getCharaFilename(this_chid)); const chatGuidanceScale = chat_metadata[metadataKeys.guidance_scale]; const groupchatCharOverride = chat_metadata[metadataKeys.groupchat_individual_chars] ?? false; + if (chatGuidanceScale && chatGuidanceScale !== 1 && !groupchatCharOverride) { return { type: cfgType.chat, @@ -80,3 +43,33 @@ function getGuidanceScale(charaCfg) { value: extension_settings.cfg.global.guidance_scale }; } + +// Gets the CFG prompt. Currently only gets the negative prompt +export function getCfgPrompt(guidanceScale) { + let splitNegativePrompt = []; + + const chatNegativeCombine = chat_metadata[metadataKeys.negative_combine] ?? []; + if (guidanceScale.type === cfgType.chat || chatNegativeCombine.includes(cfgType.chat)) { + splitNegativePrompt.unshift(substituteParams(chat_metadata[metadataKeys.negative_prompt])?.trim()); + } + + const charaCfg = extension_settings.cfg.chara?.find((e) => e.name === getCharaFilename(this_chid)); + if (guidanceScale.type === cfgType.chara || chatNegativeCombine.includes(cfgType.chara)) { + splitNegativePrompt.unshift(substituteParams(charaCfg.negative_prompt)?.trim()) + } + + if (guidanceScale.type === cfgType.global || chatNegativeCombine.includes(cfgType.global)) { + splitNegativePrompt.unshift(substituteParams(extension_settings.cfg.global.negative_prompt)?.trim()); + } + + // This line is a bit hacky with a JSON.stringify and JSON.parse. Fix this if possible. + const negativeSeparator = JSON.parse(chat_metadata[metadataKeys.negative_separator] || JSON.stringify("\n")) ?? "\n"; + const combinedNegatives = splitNegativePrompt.filter((e) => e.length > 0).join(negativeSeparator); + const insertionDepth = chat_metadata[metadataKeys.negative_insertion_depth] ?? 1; + console.log(`Setting CFG with guidance scale: ${guidanceScale.value}, negatives: ${combinedNegatives}`); + + return { + value: combinedNegatives, + depth: insertionDepth + }; +} diff --git a/public/scripts/nai-settings.js b/public/scripts/nai-settings.js index 6fcf5baca..b18306922 100644 --- a/public/scripts/nai-settings.js +++ b/public/scripts/nai-settings.js @@ -7,7 +7,6 @@ import { saveSettingsDebounced, setGenerationParamsFromPreset } from "../script.js"; -import { getCfg } from "./extensions/cfg/util.js"; import { MAX_CONTEXT_DEFAULT, tokenizers } from "./power-user.js"; import { getSortableDelay, @@ -395,7 +394,7 @@ function getBadWordPermutations(text) { return result; } -export function getNovelGenerationData(finalPrompt, this_settings, this_amount_gen, isImpersonate) { +export function getNovelGenerationData(finalPrompt, this_settings, this_amount_gen, isImpersonate, cfgValues) { const clio = nai_settings.model_novel.includes('clio'); const kayra = nai_settings.model_novel.includes('kayra'); @@ -410,7 +409,6 @@ export function getNovelGenerationData(finalPrompt, this_settings, this_amount_g : undefined; const prefix = selectPrefix(nai_settings.prefix, finalPrompt); - const cfgSettings = getCfg(finalPrompt); let logitBias = []; if (tokenizerType !== tokenizers.NONE && Array.isArray(nai_settings.logit_bias) && nai_settings.logit_bias.length) { @@ -437,8 +435,8 @@ export function getNovelGenerationData(finalPrompt, this_settings, this_amount_g "typical_p": parseFloat(nai_settings.typical_p), "mirostat_lr": parseFloat(nai_settings.mirostat_lr), "mirostat_tau": parseFloat(nai_settings.mirostat_tau), - "cfg_scale": cfgSettings?.guidanceScale ?? parseFloat(nai_settings.cfg_scale), - "cfg_uc": cfgSettings?.negativePrompt ?? nai_settings.cfg_uc ?? "", + "cfg_scale": cfgValues?.guidanceScale ?? parseFloat(nai_settings.cfg_scale), + "cfg_uc": cfgValues?.negativePrompt ?? nai_settings.cfg_uc ?? "", "phrase_rep_pen": nai_settings.phrase_rep_pen, "stop_sequences": stopSequences, "bad_words_ids": badWordIds, diff --git a/public/scripts/textgen-settings.js b/public/scripts/textgen-settings.js index c2a29d749..d61d0dd25 100644 --- a/public/scripts/textgen-settings.js +++ b/public/scripts/textgen-settings.js @@ -6,8 +6,6 @@ import { setGenerationParamsFromPreset, } from "../script.js"; -import { getCfg } from "./extensions/cfg/util.js"; - import { power_user, } from "./power-user.js"; @@ -235,12 +233,7 @@ async function generateTextGenWithStreaming(generate_data, signal) { } } -export function getTextGenGenerationData(finalPromt, this_amount_gen, isImpersonate) { - let cfgValues = {}; - if (!isImpersonate) { - cfgValues = getCfg(finalPromt); - } - +export function getTextGenGenerationData(finalPromt, this_amount_gen, isImpersonate, cfgValues) { return { 'prompt': finalPromt, 'max_new_tokens': this_amount_gen, @@ -258,8 +251,8 @@ export function getTextGenGenerationData(finalPromt, this_amount_gen, isImperson 'penalty_alpha': textgenerationwebui_settings.penalty_alpha, 'length_penalty': textgenerationwebui_settings.length_penalty, 'early_stopping': textgenerationwebui_settings.early_stopping, - 'guidance_scale': cfgValues?.guidanceScale ?? textgenerationwebui_settings.guidance_scale ?? 1, - 'negative_prompt': cfgValues?.negativePrompt ?? textgenerationwebui_settings.negative_prompt ?? '', + 'guidance_scale': isImpersonate ? 1 : cfgValues?.guidanceScale ?? textgenerationwebui_settings.guidance_scale ?? 1, + 'negative_prompt': isImpersonate ? '' : cfgValues?.negativePrompt ?? textgenerationwebui_settings.negative_prompt ?? '', 'seed': textgenerationwebui_settings.seed, 'add_bos_token': textgenerationwebui_settings.add_bos_token, 'stopping_strings': getStoppingStrings(isImpersonate, false), From ac319dbd30882a8d6e3212bfc8b8a6b4dcd965ef Mon Sep 17 00:00:00 2001 From: kingbri Date: Sun, 20 Aug 2023 02:19:25 -0400 Subject: [PATCH 04/32] CFG: Add positive prompt support and fixes Positive prompts are the opposite of negative prompts. This helps make the mixing process more accurate by keeping the negative differences as close as possible to the positive ones by including this prompt. In addition, fix prompt insertion order at a depth of 0 by hijacking the same function used for Author's Note as a zero depth anchor. Signed-off-by: kingbri --- public/script.js | 55 ++++++---- public/scripts/extensions/cfg/index.js | 117 +++++++++++++++------- public/scripts/extensions/cfg/util.js | 56 +++++++---- public/scripts/extensions/cfg/window.html | 39 +++++--- 4 files changed, 180 insertions(+), 87 deletions(-) diff --git a/public/script.js b/public/script.js index c22628ff5..7567a7c02 100644 --- a/public/script.js +++ b/public/script.js @@ -2720,6 +2720,7 @@ async function Generate(type, { automatic_trigger, force_name2, resolve, reject, is_send_press = true; } + console.log(cycleGenerationPromt) generatedPromtCache += cycleGenerationPromt; if (generatedPromtCache.length == 0 || type === 'continue') { if (main_api === 'openai') { @@ -2871,17 +2872,28 @@ async function Generate(type, { automatic_trigger, force_name2, resolve, reject, setPromtString(); } + // Fetches the combined prompt for both negative and positive prompts const cfgGuidanceScale = getGuidanceScale(); function getCombinedPrompt(isNegative) { - if (isNegative && cfgGuidanceScale !== 1) { - const negativePrompt = getCfgPrompt(cfgGuidanceScale); - if (negativePrompt && negativePrompt?.value) { - // TODO: kingbri: use the insertion depth method instead of splicing - mesSend.splice(mesSend.length - negativePrompt.depth, 0, `${negativePrompt.value}\n`); + // Use a negative mesSend if present + let negativeMesSend = []; + let cfgPrompt = {}; + if (cfgGuidanceScale && cfgGuidanceScale?.value !== 1) { + cfgPrompt = getCfgPrompt(cfgGuidanceScale, isNegative); + } + + if (cfgPrompt && cfgPrompt?.value && cfgPrompt?.depth !== 0) { + const cfgPromptValue = `${cfgPrompt.value}\n` + // TODO: kingbri: use the insertion depth method instead of splicing + if (isNegative) { + negativeMesSend = [...mesSend]; + negativeMesSend.splice(mesSend.length - cfgPrompt.depth, 0, cfgPromptValue); + } else { + mesSend.splice(mesSend.length - cfgPrompt.depth, 0, cfgPromptValue); } } - let mesSendString = mesSend.join(''); + let mesSendString = isNegative ? negativeMesSend.join('') : mesSend.join(''); // add chat preamble mesSendString = addChatsPreamble(mesSendString); @@ -2889,12 +2901,6 @@ async function Generate(type, { automatic_trigger, force_name2, resolve, reject, // add a custom dingus (if defined) mesSendString = addChatsSeparator(mesSendString); - if (zeroDepthAnchor && zeroDepthAnchor.length) { - if (!isMultigenEnabled() || tokens_already_generated == 0) { - combinedPrompt = appendZeroDepthAnchor(force_name2, zeroDepthAnchor, combinedPrompt); - } - } - let combinedPrompt = storyString + afterScenarioAnchor + @@ -2902,6 +2908,19 @@ async function Generate(type, { automatic_trigger, force_name2, resolve, reject, mesSendString + generatedPromtCache; + if (zeroDepthAnchor && zeroDepthAnchor.length) { + if (!isMultigenEnabled() || tokens_already_generated == 0) { + combinedPrompt = appendZeroDepthAnchor(force_name2, zeroDepthAnchor, combinedPrompt); + } + } + + // Append zero-depth anchor for CFG + if (cfgPrompt && cfgPrompt?.value && cfgPrompt?.depth === 0) { + if (!isMultigenEnabled() || tokens_already_generated == 0) { + combinedPrompt = appendZeroDepthAnchor(force_name2, cfgPrompt.value, combinedPrompt); + } + } + combinedPrompt = combinedPrompt.replace(/\r/gm, ''); if (power_user.collapse_newlines) { @@ -2911,15 +2930,9 @@ async function Generate(type, { automatic_trigger, force_name2, resolve, reject, return combinedPrompt; } - let mesSendString = mesSend.join(''); - // add chat preamble - mesSendString = addChatsPreamble(mesSendString); - - // add a custom dingus (if defined) - mesSendString = addChatsSeparator(mesSendString); - - let finalPromt = getCombinedPrompt(false); + // Get the negative prompt first since it has the unmodified mesSend array let negativePrompt = getCombinedPrompt(true); + let finalPromt = getCombinedPrompt(false); const cfgValues = { guidanceScale: cfgGuidanceScale?.value, negativePrompt: negativePrompt @@ -3018,7 +3031,7 @@ async function Generate(type, { automatic_trigger, force_name2, resolve, reject, storyString: storyString, afterScenarioAnchor: afterScenarioAnchor, examplesString: examplesString, - mesSendString: mesSendString, + mesSendString: mesSend.join(''), generatedPromtCache: generatedPromtCache, promptBias: promptBias, finalPromt: finalPromt, diff --git a/public/scripts/extensions/cfg/index.js b/public/scripts/extensions/cfg/index.js index e38a255e0..c850ca6d6 100644 --- a/public/scripts/extensions/cfg/index.js +++ b/public/scripts/extensions/cfg/index.js @@ -23,7 +23,8 @@ const defaultSettings = { }; const settingType = { guidance_scale: 0, - negative_prompt: 1 + negative_prompt: 1, + positive_prompt: 2 } // Used for character and chat CFG values @@ -36,19 +37,19 @@ function setCharCfg(tempValue, setting) { const avatarName = getCharaFilename(); // Assign temp object - let tempCharaCfg; + let tempCharaCfg = { + name: avatarName + }; + switch(setting) { case settingType.guidance_scale: - tempCharaCfg = { - "name": avatarName, - "guidance_scale": Number(tempValue) - } + tempCharaCfg["guidance_scale"] = Number(tempValue); break; case settingType.negative_prompt: - tempCharaCfg = { - "name": avatarName, - "negative_prompt": tempValue - } + tempCharaCfg["negative_prompt"] = tempValue; + break; + case settingType.positive_prompt: + tempCharaCfg["positive_prompt"] = tempValue; break; default: return false; @@ -66,7 +67,11 @@ function setCharCfg(tempValue, setting) { const tempAssign = Object.assign(existingCharaCfg, tempCharaCfg); // If both values are default, remove the entry - if (!existingCharaCfg.useChara && (tempAssign.guidance_scale ?? 1.00) === 1.00 && (tempAssign.negative_prompt?.length ?? 0) === 0) { + if (!existingCharaCfg.useChara && + (tempAssign.guidance_scale ?? 1.00) === 1.00 && + (tempAssign.negative_prompt?.length ?? 0) === 0 && + (tempAssign.positive_prompt?.length ?? 0) === 0) + { extension_settings.cfg.chara.splice(existingCharaCfgIndex, 1); } } else if (avatarName && tempValue.length > 0) { @@ -95,6 +100,9 @@ function setChatCfg(tempValue, setting) { case settingType.negative_prompt: chat_metadata[metadataKeys.negative_prompt] = tempValue; break; + case settingType.positive_prompt: + chat_metadata[metadataKeys.positive_prompt] = tempValue; + break; default: return false; } @@ -174,31 +182,32 @@ function loadSettings() { $('#chat_cfg_guidance_scale').val(chat_metadata[metadataKeys.guidance_scale] ?? 1.0.toFixed(2)); $('#chat_cfg_guidance_scale_counter').text(chat_metadata[metadataKeys.guidance_scale]?.toFixed(2) ?? 1.0.toFixed(2)); $('#chat_cfg_negative_prompt').val(chat_metadata[metadataKeys.negative_prompt] ?? ''); + $('#chat_cfg_positive_prompt').val(chat_metadata[metadataKeys.positive_prompt] ?? ''); $('#groupchat_cfg_use_chara').prop('checked', chat_metadata[metadataKeys.groupchat_individual_chars] ?? false); - if (chat_metadata[metadataKeys.negative_combine]?.length > 0) { - chat_metadata[metadataKeys.negative_combine].forEach((element) => { - $(`input[name="cfg_negative_combine"][value="${element}"]`) + if (chat_metadata[metadataKeys.prompt_combine]?.length > 0) { + chat_metadata[metadataKeys.prompt_combine].forEach((element) => { + $(`input[name="cfg_prompt_combine"][value="${element}"]`) .prop("checked", true); }); } // Display the negative separator in quotes if not quoted already - let negativeSeparatorDisplay = []; - const negativeSeparator = chat_metadata[metadataKeys.negative_separator]; - if (negativeSeparator) { - negativeSeparatorDisplay.push(negativeSeparator); - if (!negativeSeparator.startsWith(`"`)) { - negativeSeparatorDisplay.unshift(`"`); + let promptSeparatorDisplay = []; + const promptSeparator = chat_metadata[metadataKeys.prompt_separator]; + if (promptSeparator) { + promptSeparatorDisplay.push(promptSeparator); + if (!promptSeparator.startsWith(`"`)) { + promptSeparatorDisplay.unshift(`"`); } - if (!negativeSeparator.endsWith(`"`)) { - negativeSeparatorDisplay.push(`"`); + if (!promptSeparator.endsWith(`"`)) { + promptSeparatorDisplay.push(`"`); } } - $('#cfg_negative_separator').val(negativeSeparatorDisplay.length === 0 ? '' : negativeSeparatorDisplay.join('')); + $('#cfg_prompt_separator').val(promptSeparatorDisplay.length === 0 ? '' : promptSeparatorDisplay.join('')); - $('#cfg_negative_insertion_depth').val(chat_metadata[metadataKeys.negative_insertion_depth] ?? 1); + $('#cfg_prompt_insertion_depth').val(chat_metadata[metadataKeys.prompt_insertion_depth] ?? 1); // Set character CFG if it exists if (!selected_group) { @@ -206,6 +215,7 @@ function loadSettings() { $('#chara_cfg_guidance_scale').val(charaCfg?.guidance_scale ?? 1.00); $('#chara_cfg_guidance_scale_counter').text(charaCfg?.guidance_scale?.toFixed(2) ?? 1.0.toFixed(2)); $('#chara_cfg_negative_prompt').val(charaCfg?.negative_prompt ?? ''); + $('#chara_cfg_positive_prompt').val(charaCfg?.positive_prompt ?? ''); } } @@ -222,26 +232,50 @@ async function initialLoadSettings() { $('#global_cfg_guidance_scale').val(extension_settings.cfg.global.guidance_scale); $('#global_cfg_guidance_scale_counter').text(extension_settings.cfg.global.guidance_scale.toFixed(2)); $('#global_cfg_negative_prompt').val(extension_settings.cfg.global.negative_prompt); + $('#global_cfg_positive_prompt').val(extension_settings.cfg.global.positive_prompt); } function migrateSettings() { - let performSave = false; + let performSettingsSave = false; + let performMetaSave = false; if (power_user.guidance_scale) { extension_settings.cfg.global.guidance_scale = power_user.guidance_scale; delete power_user['guidance_scale']; - performSave = true; + performSettingsSave = true; } if (power_user.negative_prompt) { extension_settings.cfg.global.negative_prompt = power_user.negative_prompt; delete power_user['negative_prompt']; - performSave = true; + performSettingsSave = true; } - if (performSave) { + if (chat_metadata["cfg_negative_combine"]) { + chat_metadata[metadataKeys.prompt_combine] = chat_metadata["cfg_negative_combine"]; + chat_metadata["cfg_negative_combine"] = undefined; + performMetaSave = true; + } + + if (chat_metadata["cfg_negative_insertion_depth"]) { + chat_metadata[metadataKeys.prompt_insertion_depth] = chat_metadata["cfg_negative_insertion_depth"]; + chat_metadata["cfg_negative_insertion_depth"] = undefined; + performMetaSave = true; + } + + if (chat_metadata["cfg_negative_separator"]) { + chat_metadata[metadataKeys.prompt_separator] = chat_metadata["cfg_negative_separator"]; + chat_metadata["cfg_negative_separator"] = undefined; + performMetaSave = true; + } + + if (performSettingsSave) { saveSettingsDebounced(); } + + if (performMetaSave) { + saveMetadataDebounced(); + } } // This function is called when the extension is loaded @@ -273,6 +307,10 @@ jQuery(async () => { setChatCfg($(this).val(), settingType.negative_prompt); }); + windowHtml.find('#chat_cfg_positive_prompt').on('input', function() { + setChatCfg($(this).val(), settingType.positive_prompt); + }); + windowHtml.find('#chara_cfg_guidance_scale').on('input', function() { const value = $(this).val(); const success = setCharCfg(value, settingType.guidance_scale); @@ -285,6 +323,10 @@ jQuery(async () => { setCharCfg($(this).val(), settingType.negative_prompt); }); + windowHtml.find('#chara_cfg_positive_prompt').on('input', function() { + setCharCfg($(this).val(), settingType.positive_prompt); + }); + windowHtml.find('#global_cfg_guidance_scale').on('input', function() { extension_settings.cfg.global.guidance_scale = Number($(this).val()); $('#global_cfg_guidance_scale_counter').text(extension_settings.cfg.global.guidance_scale.toFixed(2)); @@ -296,24 +338,29 @@ jQuery(async () => { saveSettingsDebounced(); }); - windowHtml.find(`input[name="cfg_negative_combine"]`).on('input', function() { - const values = windowHtml.find(`input[name="cfg_negative_combine"]`) + windowHtml.find('#global_cfg_positive_prompt').on('input', function() { + extension_settings.cfg.global.positive_prompt = $(this).val(); + saveSettingsDebounced(); + }); + + windowHtml.find(`input[name="cfg_prompt_combine"]`).on('input', function() { + const values = windowHtml.find(`input[name="cfg_prompt_combine"]`) .filter(":checked") .map(function() { return parseInt($(this).val()) }) .get() .filter((e) => e !== NaN) || []; - chat_metadata[metadataKeys.negative_combine] = values; + chat_metadata[metadataKeys.prompt_combine] = values; saveMetadataDebounced(); }); - windowHtml.find(`#cfg_negative_insertion_depth`).on('input', function() { - chat_metadata[metadataKeys.negative_insertion_depth] = Number($(this).val()); + windowHtml.find(`#cfg_prompt_insertion_depth`).on('input', function() { + chat_metadata[metadataKeys.prompt_insertion_depth] = Number($(this).val()); saveMetadataDebounced(); }); - windowHtml.find(`#cfg_negative_separator`).on('input', function() { - chat_metadata[metadataKeys.negative_separator] = $(this).val(); + windowHtml.find(`#cfg_prompt_separator`).on('input', function() { + chat_metadata[metadataKeys.prompt_separator] = $(this).val(); saveMetadataDebounced(); }); diff --git a/public/scripts/extensions/cfg/util.js b/public/scripts/extensions/cfg/util.js index d37d54725..108c22646 100644 --- a/public/scripts/extensions/cfg/util.js +++ b/public/scripts/extensions/cfg/util.js @@ -11,14 +11,15 @@ export const cfgType = { export const metadataKeys = { guidance_scale: "cfg_guidance_scale", negative_prompt: "cfg_negative_prompt", - negative_combine: "cfg_negative_combine", + positive_prompt: "cfg_positive_prompt", + prompt_combine: "cfg_prompt_combine", groupchat_individual_chars: "cfg_groupchat_individual_chars", - negative_insertion_depth: "cfg_negative_insertion_depth", - negative_separator: "cfg_negative_separator" + prompt_insertion_depth: "cfg_prompt_insertion_depth", + prompt_separator: "cfg_prompt_separator" } // Gets the CFG guidance scale -// If the guidance scale is 1, ignore the CFG negative prompt since it won't be used anyways +// If the guidance scale is 1, ignore the CFG prompt(s) since it won't be used anyways export function getGuidanceScale() { const charaCfg = extension_settings.cfg.chara?.find((e) => e.name === getCharaFilename(this_chid)); const chatGuidanceScale = chat_metadata[metadataKeys.guidance_scale]; @@ -44,32 +45,49 @@ export function getGuidanceScale() { }; } -// Gets the CFG prompt. Currently only gets the negative prompt -export function getCfgPrompt(guidanceScale) { - let splitNegativePrompt = []; +// Gets the CFG prompt +export function getCfgPrompt(guidanceScale, isNegative) { + let splitCfgPrompt = []; - const chatNegativeCombine = chat_metadata[metadataKeys.negative_combine] ?? []; - if (guidanceScale.type === cfgType.chat || chatNegativeCombine.includes(cfgType.chat)) { - splitNegativePrompt.unshift(substituteParams(chat_metadata[metadataKeys.negative_prompt])?.trim()); + const cfgPromptCombine = chat_metadata[metadataKeys.prompt_combine] ?? []; + if (guidanceScale.type === cfgType.chat || cfgPromptCombine.includes(cfgType.chat)) { + splitCfgPrompt.unshift( + substituteParams( + chat_metadata[isNegative ? metadataKeys.negative_prompt : metadataKeys.positive_prompt] + ) + ?.trim() + ); } const charaCfg = extension_settings.cfg.chara?.find((e) => e.name === getCharaFilename(this_chid)); - if (guidanceScale.type === cfgType.chara || chatNegativeCombine.includes(cfgType.chara)) { - splitNegativePrompt.unshift(substituteParams(charaCfg.negative_prompt)?.trim()) + if (guidanceScale.type === cfgType.chara || cfgPromptCombine.includes(cfgType.chara)) { + splitCfgPrompt.unshift( + substituteParams( + isNegative ? charaCfg.negative_prompt : charaCfg.positive_prompt + ) + ?.trim() + ); } - if (guidanceScale.type === cfgType.global || chatNegativeCombine.includes(cfgType.global)) { - splitNegativePrompt.unshift(substituteParams(extension_settings.cfg.global.negative_prompt)?.trim()); + console.log(guidanceScale.type); + console.log(cfgPromptCombine); + if (guidanceScale.type === cfgType.global || cfgPromptCombine.includes(cfgType.global)) { + splitCfgPrompt.unshift( + substituteParams( + isNegative ? extension_settings.cfg.global.negative_prompt : extension_settings.cfg.global.positive_prompt + ) + ?.trim() + ); } // This line is a bit hacky with a JSON.stringify and JSON.parse. Fix this if possible. - const negativeSeparator = JSON.parse(chat_metadata[metadataKeys.negative_separator] || JSON.stringify("\n")) ?? "\n"; - const combinedNegatives = splitNegativePrompt.filter((e) => e.length > 0).join(negativeSeparator); - const insertionDepth = chat_metadata[metadataKeys.negative_insertion_depth] ?? 1; - console.log(`Setting CFG with guidance scale: ${guidanceScale.value}, negatives: ${combinedNegatives}`); + const customSeparator = JSON.parse(chat_metadata[metadataKeys.prompt_separator] || JSON.stringify("\n")) ?? "\n"; + const combinedCfgPrompt = splitCfgPrompt.filter((e) => e.length > 0).join(customSeparator); + const insertionDepth = chat_metadata[metadataKeys.prompt_insertion_depth] ?? 1; + console.log(`Setting CFG with guidance scale: ${guidanceScale.value}, negatives: ${combinedCfgPrompt}`); return { - value: combinedNegatives, + value: combinedCfgPrompt, depth: insertionDepth }; } diff --git a/public/scripts/extensions/cfg/window.html b/public/scripts/extensions/cfg/window.html index 5e0d21f7c..889d5b0bb 100644 --- a/public/scripts/extensions/cfg/window.html +++ b/public/scripts/extensions/cfg/window.html @@ -14,7 +14,7 @@ Unique to this chat.
- + + +
@@ -86,7 +96,7 @@
Will be used as the default CFG options for every chat unless overridden.
- + + +
-
+

- Negative Cascading + CFG Prompt Cascading
- Combine negative prompts from other boxes. + Combine positive/negative prompts from other boxes.
For example, ticking the chat, global, and character boxes combine all negative prompts into a comma-separated string.

-
From 7ecc0295dca66f7d338d91ae56ffca40efeffe81 Mon Sep 17 00:00:00 2001 From: based Date: Sun, 20 Aug 2023 20:55:37 +1000 Subject: [PATCH 05/32] scale cookie method --- public/index.html | 36 +++++++++++---- public/scripts/RossAscends-mods.js | 2 +- public/scripts/openai.js | 74 ++++++++++++++++++++++++++++-- public/scripts/secrets.js | 2 + server.js | 72 ++++++++++++++++++++++++++++- 5 files changed, 170 insertions(+), 16 deletions(-) diff --git a/public/index.html b/public/index.html index 695de9556..692493f4f 100644 --- a/public/index.html +++ b/public/index.html @@ -667,7 +667,7 @@ Max prompt cost: Unknown

-
+
Temperature
@@ -2000,18 +2000,34 @@
-

Scale API Key

-
- - +
+

Scale API Key

+
+ + +
+
+ For privacy reasons, your API key will be hidden after you reload the page. +
+

Scale API URL

+
-
- For privacy reasons, your API key will be hidden after you reload the page. +
+

Scale Cookie (_jwt)

+
+ + +
+
+ For privacy reasons, your cookie will be hidden after you reload the page. +
-

Scale API URL

- +
@@ -4356,4 +4372,4 @@ - \ No newline at end of file + diff --git a/public/scripts/RossAscends-mods.js b/public/scripts/RossAscends-mods.js index 3a2d0a395..c27b3d6a4 100644 --- a/public/scripts/RossAscends-mods.js +++ b/public/scripts/RossAscends-mods.js @@ -474,7 +474,7 @@ function RA_autoconnect(PrevApi) { case 'openai': if (((secret_state[SECRET_KEYS.OPENAI] || oai_settings.reverse_proxy) && oai_settings.chat_completion_source == chat_completion_sources.OPENAI) || ((secret_state[SECRET_KEYS.CLAUDE] || oai_settings.reverse_proxy) && oai_settings.chat_completion_source == chat_completion_sources.CLAUDE) - || (secret_state[SECRET_KEYS.SCALE] && oai_settings.chat_completion_source == chat_completion_sources.SCALE) + || ((secret_state[SECRET_KEYS.SCALE] || secret_state[SECRET_KEYS.SCALE_COOKIE]) && oai_settings.chat_completion_source == chat_completion_sources.SCALE) || (oai_settings.chat_completion_source == chat_completion_sources.WINDOWAI) || (secret_state[SECRET_KEYS.OPENROUTER] && oai_settings.chat_completion_source == chat_completion_sources.OPENROUTER) || (secret_state[SECRET_KEYS.AI21] && oai_settings.chat_completion_source == chat_completion_sources.AI21) diff --git a/public/scripts/openai.js b/public/scripts/openai.js index e83af5d91..0a3317fcf 100644 --- a/public/scripts/openai.js +++ b/public/scripts/openai.js @@ -110,8 +110,8 @@ const max_4k = 4095; const max_8k = 8191; const max_16k = 16383; const max_32k = 32767; -const scale_max = 7900; // Probably more. Save some for the system prompt defined on Scale site. -const claude_max = 8000; // We have a proper tokenizer, so theoretically could be larger (up to 9k) +const scale_max = 8191; // Probably more. Save some for the system prompt defined on Scale site. +const claude_max = 9000; // We have a proper tokenizer, so theoretically could be larger (up to 9k) const palm2_max = 7500; // The real context window is 8192, spare some for padding due to using turbo tokenizer const claude_100k_max = 99000; let ai21_max = 9200; //can easily fit 9k gpt tokens because j2's tokenizer is efficient af @@ -219,6 +219,7 @@ const default_settings = { proxy_password: '', assistant_prefill: '', use_ai21_tokenizer: false, + use_alt_scale: true, }; const oai_settings = { @@ -260,6 +261,7 @@ const oai_settings = { proxy_password: '', assistant_prefill: '', use_ai21_tokenizer: false, + use_alt_scale: true, }; let openai_setting_names; @@ -1062,6 +1064,35 @@ function saveModelList(data) { } } +async function sendAltScaleRequest(openai_msgs_tosend, signal) { + const generate_url = '/generate_altscale'; + + let firstMsg = substituteParams(openai_msgs_tosend[0].content); + let subsequentMsgs = openai_msgs_tosend.slice(1); + + const joinedMsgs = subsequentMsgs.reduce((acc, obj) => { + return acc + obj.role + ": " + obj.content + "\n"; + }, ""); + openai_msgs_tosend = substituteParams(joinedMsgs); + console.log(openai_msgs_tosend) + + const generate_data = { + sysprompt: firstMsg, + prompt: openai_msgs_tosend, + temp: parseFloat(oai_settings.temp_openai), + max_tokens: parseFloat(oai_settings.openai_max_tokens), + } + + const response = await fetch(generate_url, { + method: 'POST', + body: JSON.stringify(generate_data), + headers: getRequestHeaders(), + signal: signal + }); + const data = await response.json(); + return data.output; +} + async function sendOpenAIRequest(type, openai_msgs_tosend, signal) { // Provide default abort signal if (!signal) { @@ -1092,6 +1123,10 @@ async function sendOpenAIRequest(type, openai_msgs_tosend, signal) { openai_msgs_tosend = substituteParams(joinedMsgs); } + if (isScale && !!$('#scale-alt').prop('checked')) { + return sendAltScaleRequest(openai_msgs_tosend, signal) + } + // If we're using the window.ai extension, use that instead // Doesn't support logit bias yet if (oai_settings.chat_completion_source == chat_completion_sources.WINDOWAI) { @@ -1934,6 +1969,7 @@ function loadOpenAISettings(data, settings) { if (settings.names_in_completion !== undefined) oai_settings.names_in_completion = !!settings.names_in_completion; if (settings.openai_model !== undefined) oai_settings.openai_model = settings.openai_model; if (settings.use_ai21_tokenizer !== undefined) oai_settings.use_ai21_tokenizer = !!settings.use_ai21_tokenizer; + if (settings.use_alt_scale !== undefined) { oai_settings.use_alt_scale = !!settings.use_alt_scale; updateScaleForm(); } $('#stream_toggle').prop('checked', oai_settings.stream_openai); $('#api_url_scale').val(oai_settings.api_url_scale); $('#openai_proxy_password').val(oai_settings.proxy_password); @@ -1963,6 +1999,7 @@ function loadOpenAISettings(data, settings) { $('#openai_show_external_models').prop('checked', oai_settings.show_external_models); $('#openai_external_category').toggle(oai_settings.show_external_models); $('#use_ai21_tokenizer').prop('checked', oai_settings.use_ai21_tokenizer); + $('#scale-alt').prop('checked', oai_settings.use_alt_scale); if (settings.impersonation_prompt !== undefined) oai_settings.impersonation_prompt = settings.impersonation_prompt; $('#impersonation_prompt_textarea').val(oai_settings.impersonation_prompt); @@ -2160,6 +2197,7 @@ async function saveOpenAIPreset(name, settings, triggerUi = true) { show_external_models: settings.show_external_models, assistant_prefill: settings.assistant_prefill, use_ai21_tokenizer: settings.use_ai21_tokenizer, + use_alt_scale: settings.use_alt_scale, }; const savePresetSettings = await fetch(`/savepreset_openai?name=${name}`, { @@ -2496,6 +2534,7 @@ function onSettingsPresetChange() { proxy_password: ['#openai_proxy_password', 'proxy_password', false], assistant_prefill: ['#claude_assistant_prefill', 'assistant_prefill', false], use_ai21_tokenizer: ['#use_ai21_tokenizer', 'use_ai21_tokenizer', false], + use_alt_scale: ['#use_alt_scale', 'use_alt_scale', false], }; const presetName = $('#settings_perset_openai').find(":selected").text(); @@ -2791,20 +2830,31 @@ async function onConnectButtonClick(e) { if (oai_settings.chat_completion_source == chat_completion_sources.SCALE) { const api_key_scale = $('#api_key_scale').val().trim(); + const scale_cookie = $('#scale_cookie').val().trim(); if (api_key_scale.length) { await writeSecret(SECRET_KEYS.SCALE, api_key_scale); } - if (!oai_settings.api_url_scale) { + if (scale_cookie.length) { + await writeSecret(SECRET_KEYS.SCALE_COOKIE, scale_cookie); + } + + if (!oai_settings.api_url_scale && !oai_settings.use_alt_scale) { console.log('No API URL saved for Scale'); return; } - if (!secret_state[SECRET_KEYS.SCALE]) { + if (!secret_state[SECRET_KEYS.SCALE] && !oai_settings.use_alt_scale) { console.log('No secret key saved for Scale'); return; } + + if (!secret_state[SECRET_KEYS.SCALE_COOKIE] && oai_settings.use_alt_scale) { + console.log("No cookie set for Scale"); + return; + } + } if (oai_settings.chat_completion_source == chat_completion_sources.CLAUDE) { @@ -2914,11 +2964,27 @@ function onProxyPasswordShowClick() { $(this).toggleClass('fa-eye-slash fa-eye'); } +function updateScaleForm() { + if (oai_settings.use_alt_scale) { + $('#normal_scale_form').css('display', 'none'); + $('#alt_scale_form').css('display', ''); + } else { + $('#normal_scale_form').css('display', ''); + $('#alt_scale_form').css('display', 'none'); + } +} + $(document).ready(async function () { await loadTokenCache(); $('#test_api_button').on('click', testApiConnection); + $('#scale-alt').on('change', function () { + oai_settings.use_alt_scale = !!$('#scale-alt').prop('checked'); + saveSettingsDebounced(); + updateScaleForm(); + }); + $(document).on('input', '#temp_openai', function () { oai_settings.temp_openai = $(this).val(); $('#temp_counter_openai').text(Number($(this).val()).toFixed(2)); diff --git a/public/scripts/secrets.js b/public/scripts/secrets.js index 202870c50..c695ca88c 100644 --- a/public/scripts/secrets.js +++ b/public/scripts/secrets.js @@ -9,6 +9,7 @@ export const SECRET_KEYS = { OPENROUTER: 'api_key_openrouter', SCALE: 'api_key_scale', AI21: 'api_key_ai21', + SCALE_COOKIE: 'scale_cookie', } const INPUT_MAP = { @@ -20,6 +21,7 @@ const INPUT_MAP = { [SECRET_KEYS.OPENROUTER]: '#api_key_openrouter', [SECRET_KEYS.SCALE]: '#api_key_scale', [SECRET_KEYS.AI21]: '#api_key_ai21', + [SECRET_KEYS.SCALE_COOKIE]: '#scale_cookie', } async function clearSecret() { diff --git a/server.js b/server.js index 5f6841bfd..6a1279594 100644 --- a/server.js +++ b/server.js @@ -3231,6 +3231,75 @@ async function sendScaleRequest(request, response) { } } +app.post("/generate_altscale", jsonParser, function (request, response_generate_scale) { + if(!request.body) return response_generate_scale.sendStatus(400); + + fetch('https://dashboard.scale.com/spellbook/api/trpc/v2.variant.run', { + method: 'POST', + headers: { + 'Content-Type': 'application/json', + 'cookie': `_jwt=${readSecret(SECRET_KEYS.SCALE_COOKIE)}`, + }, + body: JSON.stringify({ + json: { + variant: { + name: 'New Variant', + appId: '', + taxonomy: null + }, + prompt: { + id: '', + template: '{{input}}\n', + exampleVariables: {}, + variablesSourceDataId: null, + systemMessage: request.body.sysprompt + }, + modelParameters: { + id: '', + modelId: 'GPT4', + modelType: 'OpenAi', + maxTokens: request.body.max_tokens, + temperature: request.body.temp, + stop: null, + suffix: null, + topP: null, + logprobs: null, + logitBias: null + }, + inputs: [ + { + index: '-1', + valueByName: { + input: request.body.prompt + } + } + ] + }, + meta: { + values: { + 'variant.taxonomy': ['undefined'], + 'prompt.variablesSourceDataId': ['undefined'], + 'modelParameters.stop': ['undefined'], + 'modelParameters.suffix': ['undefined'], + 'modelParameters.topP': ['undefined'], + 'modelParameters.logprobs': ['undefined'], + 'modelParameters.logitBias': ['undefined'] + } + } + }) + }) + .then(response => response.json()) + .then(data => { + console.log(data.result.data.json.outputs[0]) + return response_generate_scale.send({output: data.result.data.json.outputs[0]}); + }) + .catch((error) => { + console.error('Error:', error) + return response_generate_scale.send({error: true}) + }); + +}); + async function sendClaudeRequest(request, response) { const fetch = require('node-fetch').default; @@ -3917,7 +3986,8 @@ const SECRET_KEYS = { DEEPL: 'deepl', OPENROUTER: 'api_key_openrouter', SCALE: 'api_key_scale', - AI21: 'api_key_ai21' + AI21: 'api_key_ai21', + SCALE_COOKIE: 'scale_cookie', } function migrateSecrets() { From ad2be21531021a09df4993e935ace7f64e56de84 Mon Sep 17 00:00:00 2001 From: kingbri Date: Sun, 20 Aug 2023 17:36:35 -0400 Subject: [PATCH 06/32] Generate: Fix appends on continue Continue was in a semi-working state. However, the generated prompt by continue was a mess and didn't remove essential parts such as character name and prompt bias. This caused duplication and bad generations. Now, append the prompt bias after the CFG has been appended and then clean up the continued cache before adding it to the final prompt. Signed-off-by: kingbri --- public/script.js | 84 ++++++++++++++++----------- public/scripts/extensions/cfg/util.js | 2 - 2 files changed, 50 insertions(+), 36 deletions(-) diff --git a/public/script.js b/public/script.js index 7567a7c02..934beb504 100644 --- a/public/script.js +++ b/public/script.js @@ -2720,7 +2720,6 @@ async function Generate(type, { automatic_trigger, force_name2, resolve, reject, is_send_press = true; } - console.log(cycleGenerationPromt) generatedPromtCache += cycleGenerationPromt; if (generatedPromtCache.length == 0 || type === 'continue') { if (main_api === 'openai') { @@ -2813,30 +2812,33 @@ async function Generate(type, { automatic_trigger, force_name2, resolve, reject, } // Add character's name - if (!isInstruct && force_name2 && tokens_already_generated === 0) { + // Force name append on continue + if (!isInstruct && force_name2 && (tokens_already_generated === 0 || isContinue)) { if (!lastMesString.endsWith('\n')) { lastMesString += '\n'; } - // Add a leading space to the prompt bias if applicable - if (!promptBias || promptBias.length === 0) { - console.debug("No prompt bias was found."); - lastMesString += `${name2}:`; - } else if (promptBias.startsWith(' ')) { - console.debug(`A prompt bias with a leading space was found: ${promptBias}`); - lastMesString += `${name2}:${promptBias}` - } else { - console.debug(`A prompt bias was found: ${promptBias}`); - lastMesString += `${name2}: ${promptBias}`; - } - } else if (power_user.user_prompt_bias && !isImpersonate && !isInstruct) { - console.debug(`A prompt bias was found without character's name appended: ${promptBias}`); - lastMesString += substituteParams(power_user.user_prompt_bias); - } + lastMesString += `${name2}:`; + } return lastMesString; } + // Clean up the already generated prompt for seamless addition + function cleanupPromptCache(promptCache) { + // Remove the first occurrance of character's name + if (promptCache.trimStart().startsWith(`${name2}:`)) { + promptCache = promptCache.replace(`${name2}:`, '').trimStart(); + } + + // Remove the first occurrance of prompt bias + if (promptCache.trimStart().startsWith(promptBias)) { + promptCache = promptCache.replace(promptBias, ''); + } + + return promptCache; + } + function checkPromtSize() { console.debug('---checking Prompt size'); setPromtString(); @@ -2875,25 +2877,45 @@ async function Generate(type, { automatic_trigger, force_name2, resolve, reject, // Fetches the combined prompt for both negative and positive prompts const cfgGuidanceScale = getGuidanceScale(); function getCombinedPrompt(isNegative) { - // Use a negative mesSend if present - let negativeMesSend = []; + let finalMesSend = [...mesSend]; let cfgPrompt = {}; if (cfgGuidanceScale && cfgGuidanceScale?.value !== 1) { cfgPrompt = getCfgPrompt(cfgGuidanceScale, isNegative); } - if (cfgPrompt && cfgPrompt?.value && cfgPrompt?.depth !== 0) { - const cfgPromptValue = `${cfgPrompt.value}\n` - // TODO: kingbri: use the insertion depth method instead of splicing - if (isNegative) { - negativeMesSend = [...mesSend]; - negativeMesSend.splice(mesSend.length - cfgPrompt.depth, 0, cfgPromptValue); + if (cfgPrompt && cfgPrompt?.value) { + if (cfgPrompt?.depth === 0) { + finalMesSend[finalMesSend.length - 1] += + /\s/.test(finalMesSend[finalMesSend.length - 1].slice(-1)) + ? cfgPrompt.value + : ` ${cfgPrompt.value}`; } else { - mesSend.splice(mesSend.length - cfgPrompt.depth, 0, cfgPromptValue); + // TODO: Switch from splice method to insertion depth method + finalMesSend.splice(mesSend.length - cfgPrompt.depth, 0, `${cfgPrompt.value}\n`); } } - let mesSendString = isNegative ? negativeMesSend.join('') : mesSend.join(''); + // Add prompt bias after everything else + // Always run with continue + if (!isInstruct && !isImpersonate && (tokens_already_generated === 0 || isContinue)) { + const trimmedBias = promptBias.trimStart(); + finalMesSend[finalMesSend.length - 1] += + /\s/.test(finalMesSend[finalMesSend.length - 1].slice(-1)) + ? trimmedBias + : ` ${trimmedBias}`; + } + + // Prune from prompt cache if it exists + if (generatedPromtCache.length !== 0) { + generatedPromtCache = cleanupPromptCache(generatedPromtCache); + } + + // Override for prompt bits data + if (!isNegative) { + mesSend = finalMesSend; + } + + let mesSendString = finalMesSend.join(''); // add chat preamble mesSendString = addChatsPreamble(mesSendString); @@ -2908,19 +2930,13 @@ async function Generate(type, { automatic_trigger, force_name2, resolve, reject, mesSendString + generatedPromtCache; + // TODO: Move zero-depth anchor append to work like CFG and bias appends if (zeroDepthAnchor && zeroDepthAnchor.length) { if (!isMultigenEnabled() || tokens_already_generated == 0) { combinedPrompt = appendZeroDepthAnchor(force_name2, zeroDepthAnchor, combinedPrompt); } } - // Append zero-depth anchor for CFG - if (cfgPrompt && cfgPrompt?.value && cfgPrompt?.depth === 0) { - if (!isMultigenEnabled() || tokens_already_generated == 0) { - combinedPrompt = appendZeroDepthAnchor(force_name2, cfgPrompt.value, combinedPrompt); - } - } - combinedPrompt = combinedPrompt.replace(/\r/gm, ''); if (power_user.collapse_newlines) { diff --git a/public/scripts/extensions/cfg/util.js b/public/scripts/extensions/cfg/util.js index 108c22646..850978884 100644 --- a/public/scripts/extensions/cfg/util.js +++ b/public/scripts/extensions/cfg/util.js @@ -69,8 +69,6 @@ export function getCfgPrompt(guidanceScale, isNegative) { ); } - console.log(guidanceScale.type); - console.log(cfgPromptCombine); if (guidanceScale.type === cfgType.global || cfgPromptCombine.includes(cfgType.global)) { splitCfgPrompt.unshift( substituteParams( From 60faaf18314673904afe0d14c26ce8b6e3087f21 Mon Sep 17 00:00:00 2001 From: kingbri Date: Sun, 20 Aug 2023 19:51:01 -0400 Subject: [PATCH 07/32] CFG: NAI fixes NAI does CFG differently, so add special cases for them. Also gate CFG access to only work on textgenwebui since that's where it's implemented. Signed-off-by: kingbri --- public/script.js | 2 +- public/scripts/nai-settings.js | 5 +++++ 2 files changed, 6 insertions(+), 1 deletion(-) diff --git a/public/script.js b/public/script.js index 25a564c12..80429d4b5 100644 --- a/public/script.js +++ b/public/script.js @@ -2971,7 +2971,7 @@ async function Generate(type, { automatic_trigger, force_name2, resolve, reject, } // Get the negative prompt first since it has the unmodified mesSend array - let negativePrompt = getCombinedPrompt(true); + let negativePrompt = main_api == 'textgenerationwebui' ? getCombinedPrompt(true) : undefined; let finalPromt = getCombinedPrompt(false); const cfgValues = { guidanceScale: cfgGuidanceScale?.value, diff --git a/public/scripts/nai-settings.js b/public/scripts/nai-settings.js index e19c90601..85d470c4f 100644 --- a/public/scripts/nai-settings.js +++ b/public/scripts/nai-settings.js @@ -7,6 +7,7 @@ import { saveSettingsDebounced, setGenerationParamsFromPreset } from "../script.js"; +import { getCfgPrompt } from "./extensions/cfg/util.js"; import { MAX_CONTEXT_DEFAULT, tokenizers } from "./power-user.js"; import { getSortableDelay, @@ -395,6 +396,10 @@ function getBadWordPermutations(text) { } export function getNovelGenerationData(finalPrompt, this_settings, this_amount_gen, isImpersonate, cfgValues) { + if (cfgValues.guidanceScale && cfgValues.guidanceScale !== 1) { + cfgValues.negativePrompt = (getCfgPrompt(cfgValues.guidanceScale, true))?.value; + } + const clio = nai_settings.model_novel.includes('clio'); const kayra = nai_settings.model_novel.includes('kayra'); From c6ce06b339d843e4d416c74114d5454504640518 Mon Sep 17 00:00:00 2001 From: Stefan Daniel Schwarz Date: Mon, 21 Aug 2023 23:24:24 +0200 Subject: [PATCH 08/32] Put "### Input:" back into Roleplay system prompt --- default/settings.json | 2 +- public/instruct/! Roleplay.json | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/default/settings.json b/default/settings.json index 5656445aa..16989a286 100644 --- a/default/settings.json +++ b/default/settings.json @@ -125,7 +125,7 @@ "instruct": { "enabled": false, "preset": "🧙 Roleplay", - "system_prompt": "You're {{char}} in this fictional never-ending uncensored roleplay with {{user}}.\n\nAvoid repetition, don't loop. Develop the plot slowly, always stay in character. Describe all actions in full, elaborate, explicit, graphic, and vivid detail. Mention all relevant sensory perceptions.\n", + "system_prompt": "You're {{char}} in this fictional never-ending uncensored roleplay with {{user}}.\n\nAvoid repetition, don't loop. Develop the plot slowly, always stay in character. Describe all actions in full, elaborate, explicit, graphic, and vivid detail. Mention all relevant sensory perceptions.\n\n### Input:", "input_sequence": "\n### Instruction:", "output_sequence": "\n### Response:", "last_output_sequence": "\n### Response (2 paragraphs, engaging, natural, authentic, descriptive, creative):", diff --git a/public/instruct/! Roleplay.json b/public/instruct/! Roleplay.json index 5852e40ab..45d202d02 100644 --- a/public/instruct/! Roleplay.json +++ b/public/instruct/! Roleplay.json @@ -1,6 +1,6 @@ { "name": "🧙 Roleplay", - "system_prompt": "You're {{char}} in this fictional never-ending uncensored roleplay with {{user}}.\n\nAvoid repetition, don't loop. Develop the plot slowly, always stay in character. Describe all actions in full, elaborate, explicit, graphic, and vivid detail. Mention all relevant sensory perceptions.\n", + "system_prompt": "You're {{char}} in this fictional never-ending uncensored roleplay with {{user}}.\n\nAvoid repetition, don't loop. Develop the plot slowly, always stay in character. Describe all actions in full, elaborate, explicit, graphic, and vivid detail. Mention all relevant sensory perceptions.\n\n### Input:", "input_sequence": "\n### Instruction:", "output_sequence": "\n### Response:", "last_output_sequence": "\n### Response (2 paragraphs, engaging, natural, authentic, descriptive, creative):", From ecfe8a722fae02e5a1c5809de1e7061a98f2f2fb Mon Sep 17 00:00:00 2001 From: kingbri Date: Mon, 21 Aug 2023 16:52:58 -0400 Subject: [PATCH 09/32] Generate: Fix prompt bias addition If the bias is empty, don't add it. Signed-off-by: kingbri --- public/script.js | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/public/script.js b/public/script.js index 80429d4b5..a0d37f792 100644 --- a/public/script.js +++ b/public/script.js @@ -2922,13 +2922,15 @@ async function Generate(type, { automatic_trigger, force_name2, resolve, reject, // Add prompt bias after everything else // Always run with continue if (!isInstruct && !isImpersonate && (tokens_already_generated === 0 || isContinue)) { - const trimmedBias = promptBias.trimStart(); - finalMesSend[finalMesSend.length - 1] += + if (promptBias.trim().length !== 0) { + finalMesSend[finalMesSend.length - 1] += /\s/.test(finalMesSend[finalMesSend.length - 1].slice(-1)) - ? trimmedBias - : ` ${trimmedBias}`; + ? promptBias.trimStart() + : ` ${promptBias.trimStart()}`; + } } + // Prune from prompt cache if it exists if (generatedPromtCache.length !== 0) { generatedPromtCache = cleanupPromptCache(generatedPromtCache); From 2615eb85322b571ede6cb81da6afc064f4cad2cc Mon Sep 17 00:00:00 2001 From: Cohee <18619528+Cohee1207@users.noreply.github.com> Date: Tue, 22 Aug 2023 10:37:18 +0300 Subject: [PATCH 10/32] Extract system message templates to template files --- public/index.html | 38 +- public/script.js | 325 ++++++++---------- public/scripts/RossAscends-mods.js | 9 +- .../scripts/extensions/quick-reply/index.js | 4 +- public/scripts/i18n.js | 75 ++++ public/scripts/power-user.js | 34 -- public/scripts/templates/formatting.html | 21 ++ public/scripts/templates/help.html | 11 + public/scripts/templates/hotkeys.html | 13 + public/scripts/templates/macros.html | 11 + public/scripts/templates/welcome.html | 72 ++++ 11 files changed, 349 insertions(+), 264 deletions(-) create mode 100644 public/scripts/i18n.js create mode 100644 public/scripts/templates/formatting.html create mode 100644 public/scripts/templates/help.html create mode 100644 public/scripts/templates/hotkeys.html create mode 100644 public/scripts/templates/macros.html create mode 100644 public/scripts/templates/welcome.html diff --git a/public/index.html b/public/index.html index 59ea701aa..63ab8f9ce 100644 --- a/public/index.html +++ b/public/index.html @@ -63,42 +63,8 @@ - - + + diff --git a/public/script.js b/public/script.js index 8ba7ecf54..819847b05 100644 --- a/public/script.js +++ b/public/script.js @@ -170,6 +170,7 @@ import { getInstructStoppingSequences, autoSelectInstructPreset, } from "./scripts/instruct-mode.js"; +import { applyLocale } from "./scripts/i18n.js"; //exporting functions and vars for mods export { @@ -241,6 +242,14 @@ export { printCharacters, } +// Allow target="_blank" in links +DOMPurify.addHook('afterSanitizeAttributes', function (node) { + if ('target' in node) { + node.setAttribute('target', '_blank'); + node.setAttribute('rel', 'noopener'); + } +}); + // API OBJECT FOR EXTERNAL WIRING window["SillyTavern"] = {}; @@ -357,186 +366,102 @@ const extension_prompt_types = { IN_CHAT: 1 }; -const system_messages = { - help: { - name: systemUserName, - force_avatar: system_avatar, - is_user: false, - is_system: true, - is_name: true, - mes: - `Hello there! Please select the help topic you would like to learn more about: - -
Still got questions left? The Official SillyTavern Documentation Website has much more information!` - }, - slash_commands: { - name: systemUserName, - force_avatar: system_avatar, - is_user: false, - is_system: true, - is_name: true, - mes: '', - }, - hotkeys: { - name: systemUserName, - force_avatar: system_avatar, - is_user: false, - is_system: true, - is_name: true, - mes: - `Hotkeys/Keybinds: -
    -
  • Up = Edit last message in chat
  • -
  • Ctrl+Up = Edit last USER message in chat
  • -
  • Left = swipe left
  • -
  • Right = swipe right (NOTE: swipe hotkeys are disabled when chatbar has something typed into it)
  • -
  • Ctrl+Left = view locally stored variables (in the browser console window)
  • -
  • Enter (with chat bar selected) = send your message to AI
  • -
  • Ctrl+Enter = Regenerate the last AI response
  • -
  • Escape = stop AI response generation
  • -
  • Ctrl+Shift+Up = Scroll to context line
  • -
  • Ctrl+Shift+Down = Scroll chat to bottom
  • -
` - }, - formatting: { - name: systemUserName, - force_avatar: system_avatar, - is_user: false, - is_system: true, - is_name: true, - mes: - `Text formatting commands: -
    -
  • *text* - displays as italics
  • -
  • **text** - displays as bold
  • -
  • ***text*** - displays as bold italics
  • -
  • ` + "```" + `text` + "```" + ` - displays as a code block (new lines allowed between the backticks)
  • -
    -
    -like
    -this
    -
    -            
    -
  • ` + "`" + `text` + "`" + ` - displays as inline code
  • -
  • ` + "> " + `text` + ` - displays as a blockquote (note the space after >)
  • -
    like this
    -
  • ` + "# " + `text` + ` - displays as a large header (note the space)
  • -

    like this

    -
  • ` + "## " + `text` + ` - displays as a medium header (note the space)
  • -

    like this

    -
  • ` + "### " + `text` + ` - displays as a small header (note the space)
  • -

    like this

    -
  • $$ text $$ - renders a LaTeX formula (if enabled)
  • -
  • $ text $ - renders an AsciiMath formula (if enabled)
  • -
` - }, - macros: { - name: systemUserName, - force_avatar: system_avatar, - is_user: false, - is_system: true, - is_name: true, - mes: - `System-wide Replacement Macros: -
    -
  • {​{user}​} - your current Persona username
  • -
  • {​{char}​} - the Character's name
  • -
  • {​{input}​} - the user input
  • -
  • {​{time}​} - the current time
  • -
  • {​{date}​} - the current date
  • -
  • {{idle_duration}} - the time since the last user message was sent
  • -
  • {{random:(args)}} - returns a random item from the list. (ex: {{random:1,2,3,4}} will return 1 of the 4 numbers at random. Works with text lists too.
  • -
  • {{roll:(formula)}} - rolls a dice. (ex: {{roll:1d6}} will roll a 6-sided dice and return a number between 1 and 6)
  • -
` - }, - welcome: - { - name: systemUserName, - force_avatar: system_avatar, - is_user: false, - is_system: true, - is_name: true, - mes: [ - '

SillyTavern

', - "Want to update?", - '
', - '

How to start chatting?

', - '
    ', - '
  1. Click and select a Chat API.
  2. ', - '
  3. Click and pick a character
  4. ', - '
', - '
', - '

Want more characters?

', - 'Not controlled by SillyTavern team.', - '', - '
', - '

Confused or lost?

', - '', - - '
', - '

Still have questions?

', - '
    ', - '
  • Join the SillyTavern Discord
  • ', - '
  • Post a GitHub issue
  • ', - '
  • Contact the developers
  • ', - ].join('') - }, - group: { - name: systemUserName, - force_avatar: system_avatar, - is_user: false, - is_system: true, - is_name: true, - is_group: true, - mes: "Group chat created. Say 'Hi' to lovely people!", - }, - empty: { - name: systemUserName, - force_avatar: system_avatar, - is_user: false, - is_system: true, - is_name: true, - mes: "No one hears you. Hint: add more members to the group!", - }, - generic: { - name: systemUserName, - force_avatar: system_avatar, - is_user: false, - is_system: true, - is_name: true, - mes: "Generic system message. User `text` parameter to override the contents", - }, - bookmark_created: { - name: systemUserName, - force_avatar: system_avatar, - is_user: false, - is_system: true, - is_name: true, - mes: `Bookmark created! Click here to open the bookmark chat: {1}`, - }, - bookmark_back: { - name: systemUserName, - force_avatar: system_avatar, - is_user: false, - is_system: true, - is_name: true, - mes: `Click here to return to the previous chat: Return`, - }, -}; +function getSystemMessages() { + system_messages = { + help: { + name: systemUserName, + force_avatar: system_avatar, + is_user: false, + is_system: true, + is_name: true, + mes: renderTemplate("help"), + }, + slash_commands: { + name: systemUserName, + force_avatar: system_avatar, + is_user: false, + is_system: true, + is_name: true, + mes: '', + }, + hotkeys: { + name: systemUserName, + force_avatar: system_avatar, + is_user: false, + is_system: true, + is_name: true, + mes: renderTemplate("hotkeys"), + }, + formatting: { + name: systemUserName, + force_avatar: system_avatar, + is_user: false, + is_system: true, + is_name: true, + mes: renderTemplate("formatting"), + }, + macros: { + name: systemUserName, + force_avatar: system_avatar, + is_user: false, + is_system: true, + is_name: true, + mes: renderTemplate("macros"), + }, + welcome: + { + name: systemUserName, + force_avatar: system_avatar, + is_user: false, + is_system: true, + is_name: true, + mes: renderTemplate("welcome"), + }, + group: { + name: systemUserName, + force_avatar: system_avatar, + is_user: false, + is_system: true, + is_name: true, + is_group: true, + mes: "Group chat created. Say 'Hi' to lovely people!", + }, + empty: { + name: systemUserName, + force_avatar: system_avatar, + is_user: false, + is_system: true, + is_name: true, + mes: "No one hears you. Hint: add more members to the group!", + }, + generic: { + name: systemUserName, + force_avatar: system_avatar, + is_user: false, + is_system: true, + is_name: true, + mes: "Generic system message. User `text` parameter to override the contents", + }, + bookmark_created: { + name: systemUserName, + force_avatar: system_avatar, + is_user: false, + is_system: true, + is_name: true, + mes: `Bookmark created! Click here to open the bookmark chat: {1}`, + }, + bookmark_back: { + name: systemUserName, + force_avatar: system_avatar, + is_user: false, + is_system: true, + is_name: true, + mes: `Click here to return to the previous chat: Return`, + }, + }; +} // Register configuration migrations registerPromptManagerMigration(); @@ -551,6 +476,36 @@ $(document).ajaxError(function myErrorHandler(_, xhr) { } }); +function getUrlSync(url, cache = true) { + return $.ajax({ + type: "GET", + url: url, + cache: cache, + async: false + }).responseText; +} + +function renderTemplate(templateId, templateData = {}, sanitize = true, localize = true) { + try { + const templateContent = getUrlSync(`/scripts/templates/${templateId}.html`); + const template = Handlebars.compile(templateContent); + let result = template(templateData); + + if (sanitize) { + result = DOMPurify.sanitize(result); + } + + if (localize) { + result = applyLocale(result); + } + + return result; + } catch (err) { + console.error("Error rendering template", templateId, templateData, err); + toastr.error("Check the DevTools console for more information.", "Error rendering template"); + } +} + async function getClientVersion() { try { const response = await fetch('/version'); @@ -837,6 +792,7 @@ $.ajaxPrefilter((options, originalOptions, xhr) => { ///// initialization protocol //////// $.get("/csrf-token").then(async (data) => { token = data.token; + getSystemMessages(); sendSystemMessage(system_message_types.WELCOME); await readSecretState(); await getClientVersion(); @@ -1399,17 +1355,6 @@ function messageFormatting(mes, ch_name, isSystem, isUser) { mes = mes.replace(new RegExp(`(^|\n)${ch_name}:`, 'g'), "$1"); } - //function to hide any from AI response output - /* if (power_user.removeXML && ch_name && !isUser && !isSystem) { - //console.log('incoming mes') - //console.log(mes) - mes = mes.replaceAll(/</g, "<"); - mes = mes.replaceAll(/>/g, ">"); - mes = mes.replaceAll(/<<[^>>]+>>/g, ""); - //console.log('mes after removed ') - //console.log(mes) - } */ - mes = DOMPurify.sanitize(mes); return mes; diff --git a/public/scripts/RossAscends-mods.js b/public/scripts/RossAscends-mods.js index 9a8d3597b..73aa98021 100644 --- a/public/scripts/RossAscends-mods.js +++ b/public/scripts/RossAscends-mods.js @@ -33,7 +33,7 @@ import { SECRET_KEYS, secret_state, } from "./secrets.js"; -import { debounce, delay, getStringHash } from "./utils.js"; +import { debounce, delay, getStringHash, waitUntilCondition } from "./utils.js"; import { chat_completion_sources, oai_settings } from "./openai.js"; var NavToggle = document.getElementById("nav-toggle"); @@ -717,7 +717,12 @@ export async function initMovingUI() { // --------------------------------------------------- -$("document").ready(function () { +jQuery(async function () { + try { + await waitUntilCondition(() => online_status !== undefined, 1000, 10); + } catch { + console.log('Timeout waiting for online_status'); + } // initial status check setTimeout(() => { diff --git a/public/scripts/extensions/quick-reply/index.js b/public/scripts/extensions/quick-reply/index.js index 36fb38ff0..9beb88a5f 100644 --- a/public/scripts/extensions/quick-reply/index.js +++ b/public/scripts/extensions/quick-reply/index.js @@ -28,7 +28,7 @@ async function updateQuickReplyPresetList() { if (result.ok) { var data = await result.json(); presets = data.quickReplyPresets?.length ? data.quickReplyPresets : []; - console.log(presets) + console.debug('Quick Reply presets', presets); $("#quickReplyPresets").find('option[value!=""]').remove(); @@ -284,7 +284,7 @@ async function doQR(_, text) { } text = Number(text) - //use scale starting with 0 + //use scale starting with 0 //ex: user inputs "/qr 2" >> qr with data-index 1 (but 2nd item displayed) gets triggered let QRnum = Number(text - 1) if (QRnum <= 0) { QRnum = 0 } diff --git a/public/scripts/i18n.js b/public/scripts/i18n.js new file mode 100644 index 000000000..d5ac1d115 --- /dev/null +++ b/public/scripts/i18n.js @@ -0,0 +1,75 @@ +import { waitUntilCondition } from "./utils.js"; + +const storageKey = "language"; +export const localeData = await fetch("i18n.json").then(response => response.json()); + +export function applyLocale(root = document) { + const overrideLanguage = localStorage.getItem("language"); + var language = overrideLanguage || navigator.language || navigator.userLanguage; + language = language.toLowerCase(); + //load the appropriate language file + if (localeData.lang.indexOf(language) < 0) language = "en"; + + const $root = root instanceof Document ? $(root) : $(new DOMParser().parseFromString(root, "text/html")); + + //find all the elements with `data-i18n` attribute + $root.find("[data-i18n]").each(function () { + //read the translation from the language data + const keys = $(this).data("i18n").split(';'); // Multi-key entries are ; delimited + for (const key of keys) { + const attributeMatch = key.match(/\[(\S+)\](.+)/); // [attribute]key + if (attributeMatch) { // attribute-tagged key + const localizedValue = localeData?.[language]?.[attributeMatch[2]]; + if (localizedValue) { + $(this).attr(attributeMatch[1], localizedValue); + } + } else { // No attribute tag, treat as 'text' + const localizedValue = localeData?.[language]?.[key]; + if (localizedValue) { + $(this).text(localizedValue); + } + } + } + }); + + if (root !== document) { + return $root.get(0).body.innerHTML; + } +} + +function addLanguagesToDropdown() { + if (!Array.isArray(localeData?.lang)) { + return; + } + + for (const lang of localeData.lang) { + const option = document.createElement('option'); + option.value = lang; + option.innerText = lang; + $('#ui_language_select').append(option); + } + + const selectedLanguage = localStorage.getItem(storageKey); + if (selectedLanguage) { + $('#ui_language_select').val(selectedLanguage); + } +} + +jQuery(async () => { + waitUntilCondition(() => !!localeData); + window["applyLocale"] = applyLocale; + applyLocale(); + addLanguagesToDropdown(); + + $('#ui_language_select').on('change', async function () { + const language = $(this).val(); + + if (language) { + localStorage.setItem(storageKey, language); + } else { + localStorage.removeItem(storageKey); + } + + location.reload(); + }); +}); diff --git a/public/scripts/power-user.js b/public/scripts/power-user.js index 3d4832bb1..973796e6b 100644 --- a/public/scripts/power-user.js +++ b/public/scripts/power-user.js @@ -207,7 +207,6 @@ let movingUIPresets = []; let context_presets = []; const storage_keys = { - ui_language: "language", fast_ui_mode: "TavernAI_fast_ui_mode", avatar_style: "TavernAI_avatar_style", chat_display: "TavernAI_chat_display", @@ -1277,26 +1276,6 @@ function doResetPanels() { $("#movingUIreset").trigger('click'); } -function addLanguagesToDropdown() { - $.getJSON('i18n.json', function (data) { - if (!Array.isArray(data?.lang)) { - return; - } - - for (const lang of data.lang) { - const option = document.createElement('option'); - option.value = lang; - option.innerText = lang; - $('#ui_language_select').append(option); - } - - const selectedLanguage = localStorage.getItem(storage_keys.ui_language); - if (selectedLanguage) { - $('#ui_language_select').val(selectedLanguage); - } - }); -} - function setAvgBG() { const bgimg = new Image(); bgimg.src = $('#bg1') @@ -2025,18 +2004,6 @@ $(document).ready(() => { saveSettingsDebounced(); }); - $('#ui_language_select').on('change', async function () { - const language = $(this).val(); - - if (language) { - localStorage.setItem(storage_keys.ui_language, language); - } else { - localStorage.removeItem(storage_keys.ui_language); - } - - location.reload(); - }); - $(window).on('focus', function () { browser_has_focus = true; }); @@ -2052,5 +2019,4 @@ $(document).ready(() => { registerSlashCommand('cut', doMesCut, [], ' (requred number) – cuts the specified message from the chat', true, true); registerSlashCommand('resetpanels', doResetPanels, ['resetui'], ' – resets UI panels to original state.', true, true); registerSlashCommand('bgcol', setAvgBG, [], ' – WIP test of auto-bg avg coloring', true, true); - addLanguagesToDropdown(); }); diff --git a/public/scripts/templates/formatting.html b/public/scripts/templates/formatting.html new file mode 100644 index 000000000..71671a3b0 --- /dev/null +++ b/public/scripts/templates/formatting.html @@ -0,0 +1,21 @@ +Text formatting commands: +
      +
    • *text* - displays as italics
    • +
    • **text** - displays as bold
    • +
    • ***text*** - displays as bold italics
    • +
    • ```text``` - displays as a code block (new lines allowed between the backticks)
    • +
    +
     like this
    +
      +
    • `text` - displays as inline code
    • +
    • text - displays as a blockquote (note the space after >)
    • +
      like this
      +
    • # text - displays as a large header (note the space)
    • +

      like this

      +
    • ## text - displays as a medium header (note the space)
    • +

      like this

      +
    • ### text - displays as a small header (note the space)
    • +

      like this

      +
    • $$ text $$ - renders a LaTeX formula (if enabled)
    • +
    • $ text $ - renders an AsciiMath formula (if enabled)
    • +
    diff --git a/public/scripts/templates/help.html b/public/scripts/templates/help.html new file mode 100644 index 000000000..66b858f48 --- /dev/null +++ b/public/scripts/templates/help.html @@ -0,0 +1,11 @@ +Hello there! Please select the help topic you would like to learn more about: + +
    + + Still got questions left? The Official SillyTavern Documentation Website has much more information! + diff --git a/public/scripts/templates/hotkeys.html b/public/scripts/templates/hotkeys.html new file mode 100644 index 000000000..18e751bd9 --- /dev/null +++ b/public/scripts/templates/hotkeys.html @@ -0,0 +1,13 @@ +Hotkeys/Keybinds: +
      +
    • Up = Edit last message in chat
    • +
    • Ctrl+Up = Edit last USER message in chat
    • +
    • Left = swipe left
    • +
    • Right = swipe right (NOTE: swipe hotkeys are disabled when chatbar has something typed into it)
    • +
    • Ctrl+Left = view locally stored variables (in the browser console window)
    • +
    • Enter (with chat bar selected) = send your message to AI
    • +
    • Ctrl+Enter = Regenerate the last AI response
    • +
    • Escape = stop AI response generation
    • +
    • Ctrl+Shift+Up = Scroll to context line
    • +
    • Ctrl+Shift+Down = Scroll chat to bottom
    • +
    diff --git a/public/scripts/templates/macros.html b/public/scripts/templates/macros.html new file mode 100644 index 000000000..dd163197f --- /dev/null +++ b/public/scripts/templates/macros.html @@ -0,0 +1,11 @@ +System-wide Replacement Macros: +
      +
    • {{user}} - your current Persona username
    • +
    • {{char}} - the Character's name
    • +
    • {{input}} - the user input
    • +
    • {{time}} - the current time
    • +
    • {{date}} - the current date
    • +
    • {{idle_duration}} - the time since the last user message was sent
    • +
    • {{random:(args)}} - returns a random item from the list. (ex: {{random:1,2,3,4}} will return 1 of the 4 numbers at random. Works with text lists too.
    • +
    • {{roll:(formula)}} - rolls a dice. (ex: {{roll:1d6}} will roll a 6-sided dice and return a number between 1 and 6)
    • +
    diff --git a/public/scripts/templates/welcome.html b/public/scripts/templates/welcome.html new file mode 100644 index 000000000..7c80d19ab --- /dev/null +++ b/public/scripts/templates/welcome.html @@ -0,0 +1,72 @@ +

    + SillyTavern +
    +

    + + Want to update? + +
    +

    How to start chatting?

    +
      +
    1. Click and select a Chat API.
    2. +
    3. Click and pick a character
    4. +
    +
    +

    + Want more characters? +

    + + Not controlled by SillyTavern team. + + +
    +

    Confused or lost?

    + + +
    +

    Still have questions?

    + From e2bac7ec5f120d73ce117d451ee3d4edb8bdef68 Mon Sep 17 00:00:00 2001 From: Cohee <18619528+Cohee1207@users.noreply.github.com> Date: Tue, 22 Aug 2023 13:07:24 +0300 Subject: [PATCH 11/32] Add type checking and JSDoc comments to some utils --- package-lock.json | 64 +++++++ package.json | 4 + public/jsconfig.json | 22 +++ public/script.js | 24 ++- public/scripts/RossAscends-mods.js | 60 +----- public/scripts/power-user.js | 45 +++-- public/scripts/stats.js | 3 +- public/scripts/tags.js | 25 ++- public/scripts/utils.js | 283 ++++++++++++++++++++++++++--- public/scripts/world-info.js | 17 +- 10 files changed, 409 insertions(+), 138 deletions(-) create mode 100644 public/jsconfig.json diff --git a/package-lock.json b/package-lock.json index 929d67a14..e781292b9 100644 --- a/package-lock.json +++ b/package-lock.json @@ -52,8 +52,12 @@ "sillytavern": "server.js" }, "devDependencies": { + "@popperjs/core": "^2.11.8", + "@types/moment": "^2.13.0", + "dompurify": "^3.0.5", "pkg": "^5.8.1", "pkg-fetch": "^3.5.2", + "showdown": "^2.1.0", "toastr": "^2.1.4" } }, @@ -645,11 +649,31 @@ "node": ">= 8" } }, + "node_modules/@popperjs/core": { + "version": "2.11.8", + "resolved": "https://registry.npmjs.org/@popperjs/core/-/core-2.11.8.tgz", + "integrity": "sha512-P1st0aksCrn9sGZhp8GMYwBnQsbvAWsZAX44oXNNvLHGqAOcoVxmjZiohstwQ7SqKnbR47akdNi+uleWD8+g6A==", + "dev": true, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/popperjs" + } + }, "node_modules/@tokenizer/token": { "version": "0.3.0", "resolved": "https://registry.npmjs.org/@tokenizer/token/-/token-0.3.0.tgz", "integrity": "sha512-OvjF+z51L3ov0OyAU0duzsYuvO01PH7x4t6DJx+guahgTnBHkhJdG7soQeTSFLWN3efnHyibZ4Z8l2EuWwJN3A==" }, + "node_modules/@types/moment": { + "version": "2.13.0", + "resolved": "https://registry.npmjs.org/@types/moment/-/moment-2.13.0.tgz", + "integrity": "sha512-DyuyYGpV6r+4Z1bUznLi/Y7HpGn4iQ4IVcGn8zrr1P4KotKLdH0sbK1TFR6RGyX6B+G8u83wCzL+bpawKU/hdQ==", + "deprecated": "This is a stub types definition for Moment (https://github.com/moment/moment). Moment provides its own type definitions, so you don't need @types/moment installed!", + "dev": true, + "dependencies": { + "moment": "*" + } + }, "node_modules/@types/node": { "version": "16.9.1", "resolved": "https://registry.npmjs.org/@types/node/-/node-16.9.1.tgz", @@ -1028,6 +1052,15 @@ "resolved": "https://registry.npmjs.org/command-exists/-/command-exists-1.2.9.tgz", "integrity": "sha512-LTQ/SGc+s0Xc0Fu5WaKnR0YiygZkm9eKFvyS+fRsU7/ZWFF8ykFM6Pc9aCVf1+xasOOZpO3BAVgVrKvsqKHV7w==" }, + "node_modules/commander": { + "version": "9.5.0", + "resolved": "https://registry.npmjs.org/commander/-/commander-9.5.0.tgz", + "integrity": "sha512-KRs7WVDKg86PWiuAqhDrAQnTXZKraVcCc6vFdL14qrZ/DcWwuRo7VoiYXalXO7S5GKpqYiVEwCbgFDfxNHKJBQ==", + "dev": true, + "engines": { + "node": "^12.20.0 || >=14" + } + }, "node_modules/compressible": { "version": "2.0.18", "resolved": "https://registry.npmjs.org/compressible/-/compressible-2.0.18.tgz", @@ -1265,6 +1298,12 @@ "resolved": "https://registry.npmjs.org/dom-walk/-/dom-walk-0.1.2.tgz", "integrity": "sha512-6QvTW9mrGeIegrFXdtQi9pk7O/nSK6lSdXW2eqUspN5LWD7UTji2Fqw5V2YLjBpHEoU9Xl/eUWNpDeZvoyOv2w==" }, + "node_modules/dompurify": { + "version": "3.0.5", + "resolved": "https://registry.npmjs.org/dompurify/-/dompurify-3.0.5.tgz", + "integrity": "sha512-F9e6wPGtY+8KNMRAVfxeCOHU0/NPWMSENNq4pQctuXRqqdEPW7q3CrLbR5Nse044WwacyjHGOMlvNsBe1y6z9A==", + "dev": true + }, "node_modules/ee-first": { "version": "1.1.1", "resolved": "https://registry.npmjs.org/ee-first/-/ee-first-1.1.1.tgz", @@ -2157,6 +2196,15 @@ "integrity": "sha512-gKLcREMhtuZRwRAfqP3RFW+TK4JqApVBtOIftVgjuABpAtpxhPGaDcfvbhNvD0B8iD1oUr/txX35NjcaY6Ns/A==", "dev": true }, + "node_modules/moment": { + "version": "2.29.4", + "resolved": "https://registry.npmjs.org/moment/-/moment-2.29.4.tgz", + "integrity": "sha512-5LC9SOxjSc2HF6vO2CyuTDNivEdoz2IvyJJGj6X8DJ0eFyfszE0QiEd+iXmBvUP3WHxSjFH/vIsA0EN00cgr8w==", + "dev": true, + "engines": { + "node": "*" + } + }, "node_modules/ms": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", @@ -3069,6 +3117,22 @@ "resolved": "https://registry.npmjs.org/setprototypeof/-/setprototypeof-1.2.0.tgz", "integrity": "sha512-E5LDX7Wrp85Kil5bhZv46j8jOeboKq5JMmYM3gVGdGH8xFpPWXUMsNrlODCrkoxMEeNi/XZIwuRvY4XNwYMJpw==" }, + "node_modules/showdown": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/showdown/-/showdown-2.1.0.tgz", + "integrity": "sha512-/6NVYu4U819R2pUIk79n67SYgJHWCce0a5xTP979WbNp0FL9MN1I1QK662IDU1b6JzKTvmhgI7T7JYIxBi3kMQ==", + "dev": true, + "dependencies": { + "commander": "^9.0.0" + }, + "bin": { + "showdown": "bin/showdown.js" + }, + "funding": { + "type": "individual", + "url": "https://www.paypal.me/tiviesantos" + } + }, "node_modules/side-channel": { "version": "1.0.4", "resolved": "https://registry.npmjs.org/side-channel/-/side-channel-1.0.4.tgz", diff --git a/package.json b/package.json index f54b0f507..83701b589 100644 --- a/package.json +++ b/package.json @@ -80,8 +80,12 @@ ] }, "devDependencies": { + "@popperjs/core": "^2.11.8", + "@types/moment": "^2.13.0", + "dompurify": "^3.0.5", "pkg": "^5.8.1", "pkg-fetch": "^3.5.2", + "showdown": "^2.1.0", "toastr": "^2.1.4" } } diff --git a/public/jsconfig.json b/public/jsconfig.json new file mode 100644 index 000000000..d0d7724b4 --- /dev/null +++ b/public/jsconfig.json @@ -0,0 +1,22 @@ +{ + "compilerOptions": { + "checkJs": true, + "target": "esnext", + "module": "commonjs", + "allowUmdGlobalAccess": true, + "allowSyntheticDefaultImports": true + }, + "exclude": [ + "node_modules" + ], + "typeAcquisition": { + "include": [ + "jquery", + "@popperjs/core", + "toastr", + "showdown", + "dompurify", + "@types/moment" + ] + } +} diff --git a/public/script.js b/public/script.js index 819847b05..0e41948f8 100644 --- a/public/script.js +++ b/public/script.js @@ -638,10 +638,10 @@ export function getTextTokens(tokenizerType, str) { function reloadMarkdownProcessor(render_formulas = false) { if (render_formulas) { converter = new showdown.Converter({ - emoji: "true", - underline: "true", - tables: "true", - parseImgDimensions: "true", + emoji: true, + underline: true, + tables: true, + parseImgDimensions: true, extensions: [ showdownKatex( { @@ -655,10 +655,10 @@ function reloadMarkdownProcessor(render_formulas = false) { } else { converter = new showdown.Converter({ - emoji: "true", - literalMidWordUnderscores: "true", - parseImgDimensions: "true", - tables: "true", + emoji: true, + literalMidWordUnderscores: true, + parseImgDimensions: true, + tables: true, }); } @@ -5571,7 +5571,7 @@ async function messageEditDone(div) { * * @param {Array} data - An array containing metadata about each chat such as file_name. * @param {boolean} isGroupChat - A flag indicating if the chat is a group chat. - * @returns {Object} chat_dict - A dictionary where each key is a file_name and the value is the + * @returns {Promise} chat_dict - A dictionary where each key is a file_name and the value is the * corresponding chat content fetched from the server. */ export async function getChatsFromFiles(data, isGroupChat) { @@ -5621,7 +5621,7 @@ export async function getChatsFromFiles(data, isGroupChat) { * The function sends a POST request to the server to retrieve all chats for the character. It then * processes the received data, sorts it by the file name, and returns the sorted data. * - * @returns {Array} - An array containing metadata of all past chats of the character, sorted + * @returns {Promise} - An array containing metadata of all past chats of the character, sorted * in descending order by file name. Returns `undefined` if the fetch request is unsuccessful. */ async function getPastCharacterChats() { @@ -7998,7 +7998,6 @@ $(document).ready(function () { /* $('#set_chat_scenario').on('click', setScenarioOverride); */ ///////////// OPTIMIZED LISTENERS FOR LEFT SIDE OPTIONS POPUP MENU ////////////////////// - $("#options [id]").on("click", function (event, customData) { const fromSlashCommand = customData?.fromSlashCommand || false; var id = $(this).attr("id"); @@ -8500,7 +8499,6 @@ $(document).ready(function () { showSwipeButtons(); }); - $(document).on("click", ".mes_edit_delete", async function (event, customData) { const fromSlashCommand = customData?.fromSlashCommand || false; const swipeExists = (!chat[this_edit_mes_id].swipes || chat[this_edit_mes_id].swipes.length <= 1 || chat.is_user || parseInt(this_edit_mes_id) !== chat.length - 1); @@ -9072,7 +9070,7 @@ $(document).ready(function () { await importWorldInfo(file); break; default: - toastr.warn('Unknown content type'); + toastr.warning('Unknown content type'); console.error('Unknown content type', customContentType); break; } diff --git a/public/scripts/RossAscends-mods.js b/public/scripts/RossAscends-mods.js index 73aa98021..f17335e59 100644 --- a/public/scripts/RossAscends-mods.js +++ b/public/scripts/RossAscends-mods.js @@ -36,8 +36,6 @@ import { import { debounce, delay, getStringHash, waitUntilCondition } from "./utils.js"; import { chat_completion_sources, oai_settings } from "./openai.js"; -var NavToggle = document.getElementById("nav-toggle"); - var RPanelPin = document.getElementById("rm_button_panel_pin"); var LPanelPin = document.getElementById("lm_button_panel_pin"); var WIPanelPin = document.getElementById("WI_panel_pin"); @@ -47,20 +45,8 @@ var LeftNavPanel = document.getElementById("left-nav-panel"); var WorldInfo = document.getElementById("WorldInfo"); var SelectedCharacterTab = document.getElementById("rm_button_selected_ch"); -var AdvancedCharDefsPopup = document.getElementById("character_popup"); -var ConfirmationPopup = document.getElementById("dialogue_popup"); var AutoConnectCheckbox = document.getElementById("auto-connect-checkbox"); var AutoLoadChatCheckbox = document.getElementById("auto-load-chat-checkbox"); -var SelectedNavTab = ("#" + LoadLocal('SelectedNavTab')); - -var create_save_name; -var create_save_description; -var create_save_personality; -var create_save_first_message; -var create_save_scenario; -var create_save_mes_example; -var count_tokens; -var perm_tokens; var connection_made = false; var retry_delay = 500; @@ -83,32 +69,6 @@ const observer = new MutationObserver(function (mutations) { observer.observe(document.documentElement, observerConfig); -/** - * Wait for an element before resolving a promise - * @param {String} querySelector - Selector of element to wait for - * @param {Integer} timeout - Milliseconds to wait before timing out, or 0 for no timeout - */ -function waitForElement(querySelector, timeout) { - return new Promise((resolve, reject) => { - var timer = false; - if (document.querySelectorAll(querySelector).length) return resolve(); - const observer = new MutationObserver(() => { - if (document.querySelectorAll(querySelector).length) { - observer.disconnect(); - if (timer !== false) clearTimeout(timer); - return resolve(); - } - }); - observer.observe(document.body, { - childList: true, - subtree: true - }); - if (timeout) timer = setTimeout(() => { - observer.disconnect(); - reject(); - }, timeout); - }); -} /** * Converts generation time from milliseconds to a human-readable format. @@ -225,14 +185,6 @@ export function getMessageTimeStamp() { // triggers: $("#rm_button_create").on("click", function () { //when "+New Character" is clicked $(SelectedCharacterTab).children("h2").html(''); // empty nav's 3rd panel tab - - //empty temp vars to store new char data for counting - create_save_name = ""; - create_save_description = ""; - create_save_personality = ""; - create_save_first_message = ""; - create_save_scenario = ""; - create_save_mes_example = ""; }); //when any input is made to the create/edit character form textareas $("#rm_ch_create_block").on("input", function () { countTokensDebounced(); }); @@ -804,7 +756,7 @@ jQuery(async function () { //console.log('setting pin class via local var'); $(RightNavPanel).addClass('pinnedOpen'); } - if ($(RPanelPin).prop('checked' == true)) { + if (!!$(RPanelPin).prop('checked')) { console.debug('setting pin class via checkbox state'); $(RightNavPanel).addClass('pinnedOpen'); } @@ -814,7 +766,7 @@ jQuery(async function () { //console.log('setting pin class via local var'); $(LeftNavPanel).addClass('pinnedOpen'); } - if ($(LPanelPin).prop('checked' == true)) { + if (!!$(LPanelPin).prop('checked')) { console.debug('setting pin class via checkbox state'); $(LeftNavPanel).addClass('pinnedOpen'); } @@ -826,7 +778,7 @@ jQuery(async function () { $(WorldInfo).addClass('pinnedOpen'); } - if ($(WIPanelPin).prop('checked' == true)) { + if (!!$(WIPanelPin).prop('checked')) { console.debug('setting pin class via checkbox state'); $(WorldInfo).addClass('pinnedOpen'); } @@ -889,8 +841,6 @@ jQuery(async function () { saveSettingsDebounced(); }); - - //this makes the chat input text area resize vertically to match the text size (limited by CSS at 50% window height) $('#send_textarea').on('input', function () { this.style.height = '40px'; @@ -901,7 +851,7 @@ jQuery(async function () { document.addEventListener('swiped-left', function (e) { var SwipeButR = $('.swipe_right:last'); - var SwipeTargetMesClassParent = e.target.closest('.last_mes'); + var SwipeTargetMesClassParent = $(e.target).closest('.last_mes'); if (SwipeTargetMesClassParent !== null) { if (SwipeButR.css('display') === 'flex') { SwipeButR.click(); @@ -910,7 +860,7 @@ jQuery(async function () { }); document.addEventListener('swiped-right', function (e) { var SwipeButL = $('.swipe_left:last'); - var SwipeTargetMesClassParent = e.target.closest('.last_mes'); + var SwipeTargetMesClassParent = $(e.target).closest('.last_mes'); if (SwipeTargetMesClassParent !== null) { if (SwipeButL.css('display') === 'flex') { SwipeButL.click(); diff --git a/public/scripts/power-user.js b/public/scripts/power-user.js index 973796e6b..de661f342 100644 --- a/public/scripts/power-user.js +++ b/public/scripts/power-user.js @@ -246,29 +246,42 @@ function playMessageSound() { } const audio = document.getElementById('audio_message_sound'); - audio.volume = 0.8; - audio.pause(); - audio.currentTime = 0; - audio.play(); + if (audio instanceof HTMLAudioElement) { + audio.volume = 0.8; + audio.pause(); + audio.currentTime = 0; + audio.play(); + } } +/** + * Replaces consecutive newlines with a single newline. + * @param {string} x String to be processed. + * @returns {string} Processed string. + * @example + * collapseNewlines("\n\n\n"); // "\n" + */ function collapseNewlines(x) { return x.replaceAll(/\n+/g, "\n"); } +/** + * Fix formatting problems in markdown. + * @param {string} text Text to be processed. + * @returns {string} Processed text. + * @example + * "^example * text*\n" // "^example *text*\n" + * "^*example * text\n"// "^*example* text\n" + * "^example *text *\n" // "^example *text*\n" + * "^* example * text\n" // "^*example* text\n" + * // take note that the side you move the asterisk depends on where its pairing is + * // i.e. both of the following strings have the same broken asterisk ' * ', + * // but you move the first to the left and the second to the right, to match the non-broken asterisk + * "^example * text*\n" // "^*example * text\n" + * // and you HAVE to handle the cases where multiple pairs of asterisks exist in the same line + * "^example * text* * harder problem *\n" // "^example *text* *harder problem*\n" + */ function fixMarkdown(text) { - // fix formatting problems in markdown - // e.g.: - // "^example * text*\n" -> "^example *text*\n" - // "^*example * text\n" -> "^*example* text\n" - // "^example *text *\n" -> "^example *text*\n" - // "^* example * text\n" -> "^*example* text\n" - // take note that the side you move the asterisk depends on where its pairing is - // i.e. both of the following strings have the same broken asterisk ' * ', - // but you move the first to the left and the second to the right, to match the non-broken asterisk "^example * text*\n" "^*example * text\n" - // and you HAVE to handle the cases where multiple pairs of asterisks exist in the same line - // i.e. "^example * text* * harder problem *\n" -> "^example *text* *harder problem*\n" - // Find pairs of formatting characters and capture the text in between them const format = /([\*_]{1,2})([\s\S]*?)\1/gm; let matches = []; diff --git a/public/scripts/stats.js b/public/scripts/stats.js index d80997223..93b34e4ae 100644 --- a/public/scripts/stats.js +++ b/public/scripts/stats.js @@ -25,13 +25,12 @@ function createStatBlock(statName, statValue) { * @returns {number} - The stat value if it is a number, otherwise 0. */ function verifyStatValue(stat) { - return isNaN(stat) ? 0 : stat; + return isNaN(Number(stat)) ? 0 : Number(stat); } /** * Calculates total stats from character statistics. * - * @param {Object} charStats - Object containing character statistics. * @returns {Object} - Object containing total statistics. */ function calculateTotalStats() { diff --git a/public/scripts/tags.js b/public/scripts/tags.js index a6887c7c6..f486fbb08 100644 --- a/public/scripts/tags.js +++ b/public/scripts/tags.js @@ -7,7 +7,7 @@ import { getCharacters, entitiesFilter, } from "../script.js"; -import { FILTER_TYPES } from "./filters.js"; +import { FILTER_TYPES, FilterHelper } from "./filters.js"; import { groupCandidatesFilter, selected_group } from "./group-chats.js"; import { uuidv4 } from "./utils.js"; @@ -24,7 +24,6 @@ export { importTags, }; -const random_id = () => uuidv4(); const CHARACTER_FILTER_SELECTOR = '#rm_characters_block .rm_tag_filter'; const GROUP_FILTER_SELECTOR = '#rm_group_chats_block .rm_tag_filter'; @@ -49,17 +48,21 @@ const InListActionable = { } const DEFAULT_TAGS = [ - { id: random_id(), name: "Plain Text" }, - { id: random_id(), name: "OpenAI" }, - { id: random_id(), name: "W++" }, - { id: random_id(), name: "Boostyle" }, - { id: random_id(), name: "PList" }, - { id: random_id(), name: "AliChat" }, + { id: uuidv4(), name: "Plain Text" }, + { id: uuidv4(), name: "OpenAI" }, + { id: uuidv4(), name: "W++" }, + { id: uuidv4(), name: "Boostyle" }, + { id: uuidv4(), name: "PList" }, + { id: uuidv4(), name: "AliChat" }, ]; let tags = []; let tag_map = {}; +/** + * Applies the favorite filter to the character list. + * @param {FilterHelper} filterHelper Instance of FilterHelper class. + */ function applyFavFilter(filterHelper) { const isSelected = $(this).hasClass('selected'); const displayFavoritesOnly = !isSelected; @@ -68,6 +71,10 @@ function applyFavFilter(filterHelper) { filterHelper.setFilterData(FILTER_TYPES.FAV, displayFavoritesOnly); } +/** + * Applies the "is group" filter to the character list. + * @param {FilterHelper} filterHelper Instance of FilterHelper class. + */ function filterByGroups(filterHelper) { const isSelected = $(this).hasClass('selected'); const displayGroupsOnly = !isSelected; @@ -253,7 +260,7 @@ async function importTags(imported_char) { function createNewTag(tagName) { const tag = { - id: random_id(), + id: uuidv4(), name: tagName, color: '', }; diff --git a/public/scripts/utils.js b/public/scripts/utils.js index aba6f2153..874552998 100644 --- a/public/scripts/utils.js +++ b/public/scripts/utils.js @@ -7,6 +7,14 @@ export function onlyUnique(value, index, array) { return array.indexOf(value) === index; } +/** + * Checks if a string only contains digits. + * @param {string} str The string to check. + * @returns {boolean} True if the string only contains digits, false otherwise. + * @example + * isDigitsOnly('123'); // true + * isDigitsOnly('abc'); // false + */ export function isDigitsOnly(str) { return /^\d+$/.test(str); } @@ -16,6 +24,13 @@ export function getSortableDelay() { return navigator.maxTouchPoints > 0 ? 750 : 100; } +/** + * Rearranges an array in a random order. + * @param {any[]} array The array to shuffle. + * @returns {any[]} The shuffled array. + * @example + * shuffle([1, 2, 3]); // [2, 3, 1] + */ export function shuffle(array) { let currentIndex = array.length, randomIndex; @@ -31,6 +46,12 @@ export function shuffle(array) { return array; } +/** + * Downloads a file to the user's devices. + * @param {BlobPart} content File content to download. + * @param {string} fileName File name. + * @param {string} contentType File content type. + */ export function download(content, fileName, contentType) { const a = document.createElement("a"); const file = new Blob([content], { type: contentType }); @@ -49,12 +70,17 @@ export async function urlContentToDataUri(url, params) { }); } +/** + * Returns a promise that resolves to the file's text. + * @param {Blob} file The file to read. + * @returns {Promise} A promise that resolves to the file's text. + */ export function getFileText(file) { return new Promise((resolve, reject) => { const reader = new FileReader(); reader.readAsText(file); reader.onload = function () { - resolve(reader.result); + resolve(String(reader.result)); }; reader.onerror = function (error) { reject(error); @@ -62,6 +88,10 @@ export function getFileText(file) { }); } +/** + * Returns a promise that resolves to the file's array buffer. + * @param {Blob} file The file to read. + */ export function getFileBuffer(file) { return new Promise((resolve, reject) => { const reader = new FileReader(); @@ -75,12 +105,17 @@ export function getFileBuffer(file) { }); } +/** + * Returns a promise that resolves to the base64 encoded string of a file. + * @param {Blob} file The file to read. + * @returns {Promise} A promise that resolves to the base64 encoded string. + */ export function getBase64Async(file) { return new Promise((resolve, reject) => { const reader = new FileReader(); reader.readAsDataURL(file); reader.onload = function () { - resolve(reader.result); + resolve(String(reader.result)); }; reader.onerror = function (error) { reject(error); @@ -88,15 +123,26 @@ export function getBase64Async(file) { }); } +/** + * Parses a file blob as a JSON object. + * @param {Blob} file The file to read. + * @returns {Promise} A promise that resolves to the parsed JSON object. + */ export async function parseJsonFile(file) { return new Promise((resolve, reject) => { const fileReader = new FileReader(); - fileReader.onload = event => resolve(JSON.parse(event.target.result)); - fileReader.onerror = error => reject(error); fileReader.readAsText(file); + fileReader.onload = event => resolve(JSON.parse(String(event.target.result))); + fileReader.onerror = error => reject(error); }); } +/** + * Calculates a hash code for a string. + * @param {string} str The string to hash. + * @param {number} [seed=0] The seed to use for the hash. + * @returns {number} The hash code. + */ export function getStringHash(str, seed = 0) { if (typeof str !== 'string') { return 0; @@ -116,6 +162,12 @@ export function getStringHash(str, seed = 0) { return 4294967296 * (2097151 & h2) + (h1 >>> 0); }; +/** + * Creates a debounced function that delays invoking func until after wait milliseconds have elapsed since the last time the debounced function was invoked. + * @param {function} func The function to debounce. + * @param {number} [timeout=300] The timeout in milliseconds. + * @returns {function} The debounced function. + */ export function debounce(func, timeout = 300) { let timer; return (...args) => { @@ -124,6 +176,12 @@ export function debounce(func, timeout = 300) { }; } +/** + * Creates a throttled function that only invokes func at most once per every limit milliseconds. + * @param {function} func The function to throttle. + * @param {number} [limit=300] The limit in milliseconds. + * @returns {function} The throttled function. + */ export function throttle(func, limit = 300) { let lastCall; return (...args) => { @@ -135,6 +193,11 @@ export function throttle(func, limit = 300) { }; } +/** + * Checks if an element is in the viewport. + * @param {any[]} el The element to check. + * @returns {boolean} True if the element is in the viewport, false otherwise. + */ export function isElementInViewport(el) { if (typeof jQuery === "function" && el instanceof jQuery) { el = el[0]; @@ -148,6 +211,12 @@ export function isElementInViewport(el) { ); } +/** + * Returns a name that is unique among the names that exist. + * @param {string} name The name to check. + * @param {{ (y: any): boolean; }} exists Function to check if name exists. + * @returns {string} A unique name. + */ export function getUniqueName(name, exists) { let i = 1; let baseName = name; @@ -158,18 +227,48 @@ export function getUniqueName(name, exists) { return name; } -export const delay = (ms) => new Promise((res) => setTimeout(res, ms)); -export const isSubsetOf = (a, b) => (Array.isArray(a) && Array.isArray(b)) ? b.every(val => a.includes(val)) : false; +/** + * Returns a promise that resolves after the specified number of milliseconds. + * @param {number} ms The number of milliseconds to wait. + * @returns {Promise} A promise that resolves after the specified number of milliseconds. + */ +export function delay(ms) { + return new Promise((res) => setTimeout(res, ms)); +} +/** + * Checks if an array is a subset of another array. + * @param {any[]} a Array A + * @param {any[]} b Array B + * @returns {boolean} True if B is a subset of A, false otherwise. + */ +export function isSubsetOf(a, b) { + return (Array.isArray(a) && Array.isArray(b)) ? b.every(val => a.includes(val)) : false; +} + +/** + * Increments the trailing number in a string. + * @param {string} str The string to process. + * @returns {string} The string with the trailing number incremented by 1. + * @example + * incrementString('Hello, world! 1'); // 'Hello, world! 2' + */ export function incrementString(str) { // Find the trailing number or it will match the empty string const count = str.match(/\d*$/); // Take the substring up until where the integer was matched // Concatenate it to the matched count incremented by 1 - return str.substr(0, count.index) + (++count[0]); + return str.substring(0, count.index) + (Number(count[0]) + 1); }; +/** + * Formats a string using the specified arguments. + * @param {string} format The format string. + * @returns {string} The formatted string. + * @example + * stringFormat('Hello, {0}!', 'world'); // 'Hello, world!' + */ export function stringFormat(format) { const args = Array.prototype.slice.call(arguments, 1); return format.replace(/{(\d+)}/g, function (match, number) { @@ -180,7 +279,11 @@ export function stringFormat(format) { }); }; -// Save the caret position in a contenteditable element +/** + * Save the caret position in a contenteditable element. + * @param {Element} element The element to save the caret position of. + * @returns {{ start: number, end: number }} An object with the start and end offsets of the caret. + */ export function saveCaretPosition(element) { // Get the current selection const selection = window.getSelection(); @@ -209,7 +312,11 @@ export function saveCaretPosition(element) { return position; } -// Restore the caret position in a contenteditable element +/** + * Restore the caret position in a contenteditable element. + * @param {Element} element The element to restore the caret position of. + * @param {{ start: any; end: any; }} position An object with the start and end offsets of the caret. + */ export function restoreCaretPosition(element, position) { // If the position is null, do nothing if (!position) { @@ -236,6 +343,11 @@ export async function resetScrollHeight(element) { $(element).css('height', $(element).prop('scrollHeight') + 3 + 'px'); } +/** + * Sets the height of an element to its scroll height. + * @param {JQuery} element The element to initialize the scroll height of. + * @returns {Promise} A promise that resolves when the scroll height has been initialized. + */ export async function initScrollHeight(element) { await delay(1); @@ -252,15 +364,27 @@ export async function initScrollHeight(element) { //resetScrollHeight(element); } +/** + * Compares elements by their CSS order property. Used for sorting. + * @param {any} a The first element. + * @param {any} b The second element. + * @returns {number} A negative number if a is before b, a positive number if a is after b, or 0 if they are equal. + */ export function sortByCssOrder(a, b) { const _a = Number($(a).css('order')); const _b = Number($(b).css('order')); return _a - _b; } +/** + * Trims a string to the end of a nearest sentence. + * @param {string} input The string to trim. + * @param {boolean} include_newline Whether to include a newline character in the trimmed string. + * @returns {string} The trimmed string. + * @example + * end_trim_to_sentence('Hello, world! I am from'); // 'Hello, world!' + */ export function end_trim_to_sentence(input, include_newline = false) { - // inspired from https://github.com/kaihordewebui/kaihordewebui.github.io/blob/06b95e6b7720eb85177fbaf1a7f52955d7cdbc02/index.html#L4853-L4867 - const punctuation = new Set(['.', '!', '?', '*', '"', ')', '}', '`', ']', '$', '。', '!', '?', '”', ')', '】', '】', '’', '」', '】']); // extend this as you see fit let last = -1; @@ -285,6 +409,15 @@ export function end_trim_to_sentence(input, include_newline = false) { return input.substring(0, last + 1).trimEnd(); } +/** + * Counts the number of occurrences of a character in a string. + * @param {string} string The string to count occurrences in. + * @param {string} character The character to count occurrences of. + * @returns {number} The number of occurrences of the character in the string. + * @example + * countOccurrences('Hello, world!', 'l'); // 3 + * countOccurrences('Hello, world!', 'x'); // 0 + */ export function countOccurrences(string, character) { let count = 0; @@ -297,6 +430,14 @@ export function countOccurrences(string, character) { return count; } +/** + * Checks if a number is odd. + * @param {number} number The number to check. + * @returns {boolean} True if the number is odd, false otherwise. + * @example + * isOdd(3); // true + * isOdd(4); // false + */ export function isOdd(number) { return number % 2 !== 0; } @@ -337,6 +478,12 @@ export function timestampToMoment(timestamp) { return moment.invalid(); } +/** + * Compare two moment objects for sorting. + * @param {*} a The first moment object. + * @param {*} b The second moment object. + * @returns {number} A negative number if a is before b, a positive number if a is after b, or 0 if they are equal. + */ export function sortMoments(a, b) { if (a.isBefore(b)) { return 1; @@ -347,14 +494,21 @@ export function sortMoments(a, b) { } } -/** Split string to parts no more than length in size */ -export function splitRecursive(input, length, delimitiers = ['\n\n', '\n', ' ', '']) { - const delim = delimitiers[0] ?? ''; +/** Split string to parts no more than length in size. + * @param {string} input The string to split. + * @param {number} length The maximum length of each part. + * @param {string[]} delimiters The delimiters to use when splitting the string. + * @returns {string[]} The split string. + * @example + * splitRecursive('Hello, world!', 3); // ['Hel', 'lo,', 'wor', 'ld!'] +*/ +export function splitRecursive(input, length, delimiters = ['\n\n', '\n', ' ', '']) { + const delim = delimiters[0] ?? ''; const parts = input.split(delim); const flatParts = parts.flatMap(p => { if (p.length < length) return p; - return splitRecursive(input, length, delimitiers.slice(1)); + return splitRecursive(input, length, delimiters.slice(1)); }); // Merge short chunks @@ -378,6 +532,13 @@ export function splitRecursive(input, length, delimitiers = ['\n\n', '\n', ' ', return result; } +/** + * Checks if a string is a valid data URL. + * @param {string} str The string to check. + * @returns {boolean} True if the string is a valid data URL, false otherwise. + * @example + * isDataURL('data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAAUA...'); // true + */ export function isDataURL(str) { const regex = /^data:([a-z]+\/[a-z0-9-+.]+(;[a-z-]+=[a-z0-9-]+)*;?)?(base64)?,([a-z0-9!$&',()*+;=\-_%.~:@\/?#]+)?$/i; return regex.test(str); @@ -392,6 +553,13 @@ export function getCharaFilename(chid) { } } +/** + * Extracts words from a string. + * @param {string} value The string to extract words from. + * @returns {string[]} The extracted words. + * @example + * extractAllWords('Hello, world!'); // ['hello', 'world'] + */ export function extractAllWords(value) { const words = []; @@ -406,21 +574,45 @@ export function extractAllWords(value) { return words; } +/** + * Escapes a string for use in a regular expression. + * @param {string} string The string to escape. + * @returns {string} The escaped string. + * @example + * escapeRegex('^Hello$'); // '\\^Hello\\$' + */ export function escapeRegex(string) { return string.replace(/[/\-\\^$*+?.()|[\]{}]/g, '\\$&'); } +/** + * Provides an interface for rate limiting function calls. + */ export class RateLimiter { - constructor(intervalMillis) { - this._intervalMillis = intervalMillis; - this._lastResolveTime = 0; - this._pendingResolve = Promise.resolve(); + /** + * Creates a new RateLimiter. + * @param {number} interval The interval in milliseconds. + * @example + * const rateLimiter = new RateLimiter(1000); + * rateLimiter.waitForResolve().then(() => { + * console.log('Waited 1000ms'); + * }); + */ + constructor(interval) { + this.interval = interval; + this.lastResolveTime = 0; + this.pendingResolve = Promise.resolve(); } + /** + * Waits for the remaining time in the interval. + * @param {AbortSignal} abortSignal An optional AbortSignal to abort the wait. + * @returns {Promise} A promise that resolves when the remaining time has elapsed. + */ _waitRemainingTime(abortSignal) { const currentTime = Date.now(); - const elapsedTime = currentTime - this._lastResolveTime; - const remainingTime = Math.max(0, this._intervalMillis - elapsedTime); + const elapsedTime = currentTime - this.lastResolveTime; + const remainingTime = Math.max(0, this.interval - elapsedTime); return new Promise((resolve, reject) => { const timeoutId = setTimeout(() => { @@ -436,19 +628,29 @@ export class RateLimiter { }); } + /** + * Waits for the next interval to elapse. + * @param {AbortSignal} abortSignal An optional AbortSignal to abort the wait. + * @returns {Promise} A promise that resolves when the next interval has elapsed. + */ async waitForResolve(abortSignal) { - await this._pendingResolve; - this._pendingResolve = this._waitRemainingTime(abortSignal); + await this.pendingResolve; + this.pendingResolve = this._waitRemainingTime(abortSignal); // Update the last resolve time - this._lastResolveTime = Date.now() + this._intervalMillis; - console.debug(`RateLimiter.waitForResolve() ${this._lastResolveTime}`); + this.lastResolveTime = Date.now() + this.interval; + console.debug(`RateLimiter.waitForResolve() ${this.lastResolveTime}`); } } -// Taken from https://github.com/LostRuins/lite.koboldai.net/blob/main/index.html -//import tavern png data. adapted from png-chunks-extract under MIT license -//accepts png input data, and returns the extracted JSON +/** + * Extracts a JSON object from a PNG file. + * Taken from https://github.com/LostRuins/lite.koboldai.net/blob/main/index.html + * Adapted from png-chunks-extract under MIT license + * @param {Uint8Array} data The PNG data to extract the JSON from. + * @param {string} identifier The identifier to look for in the PNG tEXT data. + * @returns {object} The extracted JSON object. + */ export function extractDataFromPng(data, identifier = 'chara') { console.log("Attempting PNG import..."); let uint8 = new Uint8Array(4); @@ -599,6 +801,13 @@ export async function saveBase64AsFile(base64Data, characterName, filename = "", } } +/** + * Creates a thumbnail from a data URL. + * @param {string} dataUrl The data URL encoded data of the image. + * @param {number} maxWidth The maximum width of the thumbnail. + * @param {number} maxHeight The maximum height of the thumbnail. + * @returns {Promise} A promise that resolves to the thumbnail data URL. + */ export function createThumbnail(dataUrl, maxWidth, maxHeight) { return new Promise((resolve, reject) => { const img = new Image(); @@ -634,6 +843,13 @@ export function createThumbnail(dataUrl, maxWidth, maxHeight) { }); } +/** + * Waits for a condition to be true. Throws an error if the condition is not true within the timeout. + * @param {{ (): boolean; }} condition The condition to wait for. + * @param {number} [timeout=1000] The timeout in milliseconds. + * @param {number} [interval=100] The interval in milliseconds. + * @returns {Promise} A promise that resolves when the condition is true. + */ export async function waitUntilCondition(condition, timeout = 1000, interval = 100) { return new Promise((resolve, reject) => { const timeoutId = setTimeout(() => { @@ -651,6 +867,12 @@ export async function waitUntilCondition(condition, timeout = 1000, interval = 1 }); } +/** + * Returns a UUID v4 string. + * @returns {string} A UUID v4 string. + * @example + * uuidv4(); // '3e2fd9e1-0a7a-4f6d-9aaf-8a7a4babe7eb' + */ export function uuidv4() { return 'xxxxxxxx-xxxx-4xxx-yxxx-xxxxxxxxxxxx'.replace(/[xy]/g, function (c) { const r = Math.random() * 16 | 0; @@ -659,6 +881,11 @@ export function uuidv4() { }); } +/** + * Clones an object using JSON serialization. + * @param {any} obj The object to clone. + * @returns {any} A deep clone of the object. + */ export function deepClone(obj) { return JSON.parse(JSON.stringify(obj)); } diff --git a/public/scripts/world-info.js b/public/scripts/world-info.js index 347045913..9c4118687 100644 --- a/public/scripts/world-info.js +++ b/public/scripts/world-info.js @@ -418,7 +418,7 @@ function getWorldEntry(name, data, entry) { keyInput.on("input", function () { const uid = $(this).data("uid"); - const value = $(this).val(); + const value = String($(this).val()); resetScrollHeight(this); data.entries[uid].key = value .split(",") @@ -454,7 +454,7 @@ function getWorldEntry(name, data, entry) { keySecondaryInput.data("uid", entry.uid); keySecondaryInput.on("input", function () { const uid = $(this).data("uid"); - const value = $(this).val(); + const value = String($(this).val()); resetScrollHeight(this); data.entries[uid].keysecondary = value .split(",") @@ -1506,19 +1506,6 @@ jQuery(() => { return; } - /* - if (deviceInfo.device.type === 'desktop') { - let selectScrollTop = null; - e.preventDefault(); - const option = $(e.target); - const selectElement = $(this)[0]; - selectScrollTop = selectElement.scrollTop; - option.prop('selected', !option.prop('selected')); - await delay(1); - selectElement.scrollTop = selectScrollTop; - } - */ - onWorldInfoChange('__notSlashCommand__'); }); From ba925f388c4ce270feb0c258b1b00bb0d3bab195 Mon Sep 17 00:00:00 2001 From: based Date: Tue, 22 Aug 2023 21:29:18 +1000 Subject: [PATCH 12/32] added more options to the scale request --- public/index.html | 4 ++-- public/scripts/openai.js | 36 ++++++++++++++++++++++++------------ server.js | 9 +++------ 3 files changed, 29 insertions(+), 20 deletions(-) diff --git a/public/index.html b/public/index.html index d61fb0250..14327cb34 100644 --- a/public/index.html +++ b/public/index.html @@ -710,7 +710,7 @@ -
    +
    Top P
    @@ -1578,7 +1578,7 @@
    -
    +
    Logit Bias
    diff --git a/public/scripts/openai.js b/public/scripts/openai.js index 0d64843aa..535660ac8 100644 --- a/public/scripts/openai.js +++ b/public/scripts/openai.js @@ -1084,23 +1084,34 @@ function saveModelList(data) { } } -async function sendAltScaleRequest(openai_msgs_tosend, signal) { +async function sendAltScaleRequest(openai_msgs_tosend, logit_bias, signal) { const generate_url = '/generate_altscale'; - let firstMsg = substituteParams(openai_msgs_tosend[0].content); - let subsequentMsgs = openai_msgs_tosend.slice(1); + let firstSysMsgs = [] + for(let msg of openai_msgs_tosend){ + if(msg.role === 'system') { + firstSysMsgs.push(substituteParams(msg.content)); + } else { + break; + } + } - const joinedMsgs = subsequentMsgs.reduce((acc, obj) => { + let subsequentMsgs = openai_msgs_tosend.slice(firstSysMsgs.length); + + const joinedSysMsgs = substituteParams(firstSysMsgs.join("\n")); + const joinedSubsequentMsgs = subsequentMsgs.reduce((acc, obj) => { return acc + obj.role + ": " + obj.content + "\n"; }, ""); - openai_msgs_tosend = substituteParams(joinedMsgs); - console.log(openai_msgs_tosend) + + openai_msgs_tosend = substituteParams(joinedSubsequentMsgs); const generate_data = { - sysprompt: firstMsg, + sysprompt: joinedSysMsgs, prompt: openai_msgs_tosend, temp: parseFloat(oai_settings.temp_openai), + top_p: parseFloat(oai_settings.top_p_openai), max_tokens: parseFloat(oai_settings.openai_max_tokens), + logit_bias: logit_bias, } const response = await fetch(generate_url, { @@ -1109,6 +1120,7 @@ async function sendAltScaleRequest(openai_msgs_tosend, signal) { headers: getRequestHeaders(), signal: signal }); + const data = await response.json(); return data.output; } @@ -1143,17 +1155,13 @@ async function sendOpenAIRequest(type, openai_msgs_tosend, signal) { openai_msgs_tosend = substituteParams(joinedMsgs); } - if (isScale && !!$('#scale-alt').prop('checked')) { - return sendAltScaleRequest(openai_msgs_tosend, signal) - } - // If we're using the window.ai extension, use that instead // Doesn't support logit bias yet if (oai_settings.chat_completion_source == chat_completion_sources.WINDOWAI) { return sendWindowAIRequest(openai_msgs_tosend, signal, stream); } - const logitBiasSources = [chat_completion_sources.OPENAI, chat_completion_sources.OPENROUTER]; + const logitBiasSources = [chat_completion_sources.OPENAI, chat_completion_sources.OPENROUTER, chat_completion_sources.SCALE]; if (oai_settings.bias_preset_selected && logitBiasSources.includes(oai_settings.chat_completion_source) && Array.isArray(oai_settings.bias_presets[oai_settings.bias_preset_selected]) @@ -1162,6 +1170,10 @@ async function sendOpenAIRequest(type, openai_msgs_tosend, signal) { biasCache = logit_bias; } + if (isScale && oai_settings.use_alt_scale) { + return sendAltScaleRequest(openai_msgs_tosend, logit_bias, signal) + } + const model = getChatCompletionModel(); const generate_data = { "messages": openai_msgs_tosend, diff --git a/server.js b/server.js index fea7e4a37..f990e049a 100644 --- a/server.js +++ b/server.js @@ -3329,11 +3329,11 @@ app.post("/generate_altscale", jsonParser, function (request, response_generate_ modelType: 'OpenAi', maxTokens: request.body.max_tokens, temperature: request.body.temp, - stop: null, + stop: "user:", suffix: null, - topP: null, + topP: request.body.top_p, logprobs: null, - logitBias: null + logitBias: request.body.logit_bias }, inputs: [ { @@ -3348,11 +3348,8 @@ app.post("/generate_altscale", jsonParser, function (request, response_generate_ values: { 'variant.taxonomy': ['undefined'], 'prompt.variablesSourceDataId': ['undefined'], - 'modelParameters.stop': ['undefined'], 'modelParameters.suffix': ['undefined'], - 'modelParameters.topP': ['undefined'], 'modelParameters.logprobs': ['undefined'], - 'modelParameters.logitBias': ['undefined'] } } }) From 9deaec58776f0385a7032754ace3377e8df6cf81 Mon Sep 17 00:00:00 2001 From: Cohee <18619528+Cohee1207@users.noreply.github.com> Date: Tue, 22 Aug 2023 14:30:49 +0300 Subject: [PATCH 13/32] More typechecks and comments --- package-lock.json | 76 +-------------------------------- package.json | 7 +-- public/jsconfig.json | 8 +++- public/script.js | 22 +++++----- public/scripts/filters.js | 63 +++++++++++++++++++++++++++ public/scripts/group-chats.js | 8 +--- public/scripts/i18n.js | 2 +- public/scripts/instruct-mode.js | 11 ++++- public/scripts/kai-settings.js | 26 ++++++++--- public/scripts/power-user.js | 50 +++++++++++++--------- 10 files changed, 144 insertions(+), 129 deletions(-) diff --git a/package-lock.json b/package-lock.json index e781292b9..1e47541ab 100644 --- a/package-lock.json +++ b/package-lock.json @@ -52,13 +52,8 @@ "sillytavern": "server.js" }, "devDependencies": { - "@popperjs/core": "^2.11.8", - "@types/moment": "^2.13.0", - "dompurify": "^3.0.5", "pkg": "^5.8.1", - "pkg-fetch": "^3.5.2", - "showdown": "^2.1.0", - "toastr": "^2.1.4" + "pkg-fetch": "^3.5.2" } }, "node_modules/@agnai/sentencepiece-js": { @@ -649,31 +644,11 @@ "node": ">= 8" } }, - "node_modules/@popperjs/core": { - "version": "2.11.8", - "resolved": "https://registry.npmjs.org/@popperjs/core/-/core-2.11.8.tgz", - "integrity": "sha512-P1st0aksCrn9sGZhp8GMYwBnQsbvAWsZAX44oXNNvLHGqAOcoVxmjZiohstwQ7SqKnbR47akdNi+uleWD8+g6A==", - "dev": true, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/popperjs" - } - }, "node_modules/@tokenizer/token": { "version": "0.3.0", "resolved": "https://registry.npmjs.org/@tokenizer/token/-/token-0.3.0.tgz", "integrity": "sha512-OvjF+z51L3ov0OyAU0duzsYuvO01PH7x4t6DJx+guahgTnBHkhJdG7soQeTSFLWN3efnHyibZ4Z8l2EuWwJN3A==" }, - "node_modules/@types/moment": { - "version": "2.13.0", - "resolved": "https://registry.npmjs.org/@types/moment/-/moment-2.13.0.tgz", - "integrity": "sha512-DyuyYGpV6r+4Z1bUznLi/Y7HpGn4iQ4IVcGn8zrr1P4KotKLdH0sbK1TFR6RGyX6B+G8u83wCzL+bpawKU/hdQ==", - "deprecated": "This is a stub types definition for Moment (https://github.com/moment/moment). Moment provides its own type definitions, so you don't need @types/moment installed!", - "dev": true, - "dependencies": { - "moment": "*" - } - }, "node_modules/@types/node": { "version": "16.9.1", "resolved": "https://registry.npmjs.org/@types/node/-/node-16.9.1.tgz", @@ -1052,15 +1027,6 @@ "resolved": "https://registry.npmjs.org/command-exists/-/command-exists-1.2.9.tgz", "integrity": "sha512-LTQ/SGc+s0Xc0Fu5WaKnR0YiygZkm9eKFvyS+fRsU7/ZWFF8ykFM6Pc9aCVf1+xasOOZpO3BAVgVrKvsqKHV7w==" }, - "node_modules/commander": { - "version": "9.5.0", - "resolved": "https://registry.npmjs.org/commander/-/commander-9.5.0.tgz", - "integrity": "sha512-KRs7WVDKg86PWiuAqhDrAQnTXZKraVcCc6vFdL14qrZ/DcWwuRo7VoiYXalXO7S5GKpqYiVEwCbgFDfxNHKJBQ==", - "dev": true, - "engines": { - "node": "^12.20.0 || >=14" - } - }, "node_modules/compressible": { "version": "2.0.18", "resolved": "https://registry.npmjs.org/compressible/-/compressible-2.0.18.tgz", @@ -1298,12 +1264,6 @@ "resolved": "https://registry.npmjs.org/dom-walk/-/dom-walk-0.1.2.tgz", "integrity": "sha512-6QvTW9mrGeIegrFXdtQi9pk7O/nSK6lSdXW2eqUspN5LWD7UTji2Fqw5V2YLjBpHEoU9Xl/eUWNpDeZvoyOv2w==" }, - "node_modules/dompurify": { - "version": "3.0.5", - "resolved": "https://registry.npmjs.org/dompurify/-/dompurify-3.0.5.tgz", - "integrity": "sha512-F9e6wPGtY+8KNMRAVfxeCOHU0/NPWMSENNq4pQctuXRqqdEPW7q3CrLbR5Nse044WwacyjHGOMlvNsBe1y6z9A==", - "dev": true - }, "node_modules/ee-first": { "version": "1.1.1", "resolved": "https://registry.npmjs.org/ee-first/-/ee-first-1.1.1.tgz", @@ -2196,15 +2156,6 @@ "integrity": "sha512-gKLcREMhtuZRwRAfqP3RFW+TK4JqApVBtOIftVgjuABpAtpxhPGaDcfvbhNvD0B8iD1oUr/txX35NjcaY6Ns/A==", "dev": true }, - "node_modules/moment": { - "version": "2.29.4", - "resolved": "https://registry.npmjs.org/moment/-/moment-2.29.4.tgz", - "integrity": "sha512-5LC9SOxjSc2HF6vO2CyuTDNivEdoz2IvyJJGj6X8DJ0eFyfszE0QiEd+iXmBvUP3WHxSjFH/vIsA0EN00cgr8w==", - "dev": true, - "engines": { - "node": "*" - } - }, "node_modules/ms": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", @@ -3117,22 +3068,6 @@ "resolved": "https://registry.npmjs.org/setprototypeof/-/setprototypeof-1.2.0.tgz", "integrity": "sha512-E5LDX7Wrp85Kil5bhZv46j8jOeboKq5JMmYM3gVGdGH8xFpPWXUMsNrlODCrkoxMEeNi/XZIwuRvY4XNwYMJpw==" }, - "node_modules/showdown": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/showdown/-/showdown-2.1.0.tgz", - "integrity": "sha512-/6NVYu4U819R2pUIk79n67SYgJHWCce0a5xTP979WbNp0FL9MN1I1QK662IDU1b6JzKTvmhgI7T7JYIxBi3kMQ==", - "dev": true, - "dependencies": { - "commander": "^9.0.0" - }, - "bin": { - "showdown": "bin/showdown.js" - }, - "funding": { - "type": "individual", - "url": "https://www.paypal.me/tiviesantos" - } - }, "node_modules/side-channel": { "version": "1.0.4", "resolved": "https://registry.npmjs.org/side-channel/-/side-channel-1.0.4.tgz", @@ -3430,15 +3365,6 @@ "node": ">=8.0" } }, - "node_modules/toastr": { - "version": "2.1.4", - "resolved": "https://registry.npmjs.org/toastr/-/toastr-2.1.4.tgz", - "integrity": "sha512-LIy77F5n+sz4tefMmFOntcJ6HL0Fv3k1TDnNmFZ0bU/GcvIIfy6eG2v7zQmMiYgaalAiUv75ttFrPn5s0gyqlA==", - "dev": true, - "dependencies": { - "jquery": ">=1.12.0" - } - }, "node_modules/toidentifier": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/toidentifier/-/toidentifier-1.0.1.tgz", diff --git a/package.json b/package.json index 83701b589..26b4f127b 100644 --- a/package.json +++ b/package.json @@ -80,12 +80,7 @@ ] }, "devDependencies": { - "@popperjs/core": "^2.11.8", - "@types/moment": "^2.13.0", - "dompurify": "^3.0.5", "pkg": "^5.8.1", - "pkg-fetch": "^3.5.2", - "showdown": "^2.1.0", - "toastr": "^2.1.4" + "pkg-fetch": "^3.5.2" } } diff --git a/public/jsconfig.json b/public/jsconfig.json index d0d7724b4..76ec13541 100644 --- a/public/jsconfig.json +++ b/public/jsconfig.json @@ -1,7 +1,7 @@ { "compilerOptions": { "checkJs": true, - "target": "esnext", + "target": "ESNext", "module": "commonjs", "allowUmdGlobalAccess": true, "allowSyntheticDefaultImports": true @@ -16,7 +16,11 @@ "toastr", "showdown", "dompurify", - "@types/moment" + "moment", + "seedrandom", + "showdown-katex", + "droll", + "handlebars", ] } } diff --git a/public/script.js b/public/script.js index 0e41948f8..b73db69b9 100644 --- a/public/script.js +++ b/public/script.js @@ -2318,7 +2318,7 @@ async function Generate(type, { automatic_trigger, force_name2, resolve, reject, const magName = isImpersonate ? (is_pygmalion ? 'You' : name1) : name2; if (isInstruct) { - message_already_generated = formatInstructModePrompt(magName, isImpersonate, false, name1, name2); + message_already_generated = formatInstructModePrompt(magName, isImpersonate, '', name1, name2); } else { message_already_generated = `${magName}: `; } @@ -4743,7 +4743,7 @@ function onPersonaDescriptionPositionInput() { } function onPersonaDescriptionInput() { - power_user.persona_description = $("#persona_description").val(); + power_user.persona_description = String($("#persona_description").val()); if (power_user.personas[user_avatar]) { let object = power_user.persona_descriptions[user_avatar]; @@ -7397,7 +7397,7 @@ $(document).ready(function () { $(document).on('click', '.swipe_left', swipe_left); $("#character_search_bar").on("input", function () { - const searchValue = $(this).val().toLowerCase(); + const searchValue = String($(this).val()).toLowerCase(); entitiesFilter.setFilterData(FILTER_TYPES.SEARCH, searchValue); }); @@ -7465,7 +7465,7 @@ $(document).ready(function () { $(this).parent().css("background", css_mes_bg); }); $(this).css("background", "#600"); //sets the bg of the mes selected for deletion - var i = $(this).attr("mesid"); //checks the message ID in the chat + var i = Number($(this).attr("mesid")); //checks the message ID in the chat this_del_mes = i; while (i < chat.length) { //as long as the current message ID is less than the total chat length @@ -7899,7 +7899,7 @@ $(document).ready(function () { $("#api_button").click(function (e) { e.stopPropagation(); if ($("#api_url_text").val() != "") { - let value = formatKoboldUrl($.trim($("#api_url_text").val())); + let value = formatKoboldUrl(String($("#api_url_text").val()).trim()); if (!value) { toastr.error('Please enter a valid URL.'); @@ -7933,13 +7933,13 @@ $(document).ready(function () { e.stopPropagation(); const url_source = api_use_mancer_webui ? "#mancer_api_url_text" : "#textgenerationwebui_api_url_text"; if ($(url_source).val() != "") { - let value = formatTextGenURL($(url_source).val().trim(), api_use_mancer_webui); + let value = formatTextGenURL(String($(url_source).val()).trim(), api_use_mancer_webui); if (!value) { callPopup("Please enter a valid URL.
    WebUI URLs should end with /api
    Enable 'Relaxed API URLs' to allow other paths.", 'text'); return; } - const mancer_key = $("#api_key_mancer").val().trim(); + const mancer_key = String($("#api_key_mancer").val()).trim(); if (mancer_key.length) { await writeSecret(SECRET_KEYS.MANCER, mancer_key); } @@ -8194,8 +8194,8 @@ $(document).ready(function () { const preset = novelai_settings[novelai_setting_names[nai_settings.preset_settings_novel]]; loadNovelPreset(preset); - amount_gen = parseInt($("#amount_gen").val()); - max_context = parseInt($("#max_context").val()); + amount_gen = Number($("#amount_gen").val()); + max_context = Number($("#max_context").val()); saveSettingsDebounced(); }); @@ -8572,7 +8572,7 @@ $(document).ready(function () { $("#api_button_novel").on('click', async function (e) { e.stopPropagation(); - const api_key_novel = $("#api_key_novel").val().trim(); + const api_key_novel = String($("#api_key_novel").val()).trim(); if (api_key_novel.length) { await writeSecret(SECRET_KEYS.NOVEL, api_key_novel); @@ -8922,7 +8922,7 @@ $(document).ready(function () { }); $("#bg-filter").on("input", function () { - const filterValue = $(this).val().toLowerCase(); + const filterValue = String($(this).val()).toLowerCase(); $("#bg_menu_content > div").each(function () { const $bgContent = $(this); if ($bgContent.attr("title").toLowerCase().includes(filterValue)) { diff --git a/public/scripts/filters.js b/public/scripts/filters.js index d0c5a7aeb..364259687 100644 --- a/public/scripts/filters.js +++ b/public/scripts/filters.js @@ -1,6 +1,10 @@ import { fuzzySearchCharacters, fuzzySearchGroups, fuzzySearchWorldInfo, power_user } from "./power-user.js"; import { tag_map } from "./tags.js"; +/** + * The filter types. + * @type {Object.} + */ export const FILTER_TYPES = { SEARCH: 'search', TAG: 'tag', @@ -9,11 +13,26 @@ export const FILTER_TYPES = { WORLD_INFO_SEARCH: 'world_info_search', }; +/** + * Helper class for filtering data. + * @example + * const filterHelper = new FilterHelper(() => console.log('data changed')); + * filterHelper.setFilterData(FILTER_TYPES.SEARCH, 'test'); + * data = filterHelper.applyFilters(data); + */ export class FilterHelper { + /** + * Creates a new FilterHelper + * @param {Function} onDataChanged Callback to trigger when the filter data changes + */ constructor(onDataChanged) { this.onDataChanged = onDataChanged; } + /** + * The filter functions. + * @type {Object.} + */ filterFunctions = { [FILTER_TYPES.SEARCH]: this.searchFilter.bind(this), [FILTER_TYPES.GROUP]: this.groupFilter.bind(this), @@ -22,6 +41,10 @@ export class FilterHelper { [FILTER_TYPES.WORLD_INFO_SEARCH]: this.wiSearchFilter.bind(this), } + /** + * The filter data. + * @type {Object.} + */ filterData = { [FILTER_TYPES.SEARCH]: '', [FILTER_TYPES.GROUP]: false, @@ -30,6 +53,11 @@ export class FilterHelper { [FILTER_TYPES.WORLD_INFO_SEARCH]: '', } + /** + * Applies a fuzzy search filter to the World Info data. + * @param {any[]} data The data to filter. Must have a uid property. + * @returns {any[]} The filtered data. + */ wiSearchFilter(data) { const term = this.filterData[FILTER_TYPES.WORLD_INFO_SEARCH]; @@ -41,6 +69,11 @@ export class FilterHelper { return data.filter(entity => fuzzySearchResults.includes(entity.uid)); } + /** + * Applies a tag filter to the data. + * @param {any[]} data The data to filter. + * @returns {any[]} The filtered data. + */ tagFilter(data) { const TAG_LOGIC_AND = true; // switch to false to use OR logic for combining tags const { selected, excluded } = this.filterData[FILTER_TYPES.TAG]; @@ -76,6 +109,11 @@ export class FilterHelper { return data.filter(entity => getIsTagged(entity)); } + /** + * Applies a favorite filter to the data. + * @param {any[]} data The data to filter. + * @returns {any[]} The filtered data. + */ favFilter(data) { if (!this.filterData[FILTER_TYPES.FAV]) { return data; @@ -84,6 +122,11 @@ export class FilterHelper { return data.filter(entity => entity.item.fav || entity.item.fav == "true"); } + /** + * Applies a group type filter to the data. + * @param {any[]} data The data to filter. + * @returns {any[]} The filtered data. + */ groupFilter(data) { if (!this.filterData[FILTER_TYPES.GROUP]) { return data; @@ -92,6 +135,11 @@ export class FilterHelper { return data.filter(entity => entity.type === 'group'); } + /** + * Applies a search filter to the data. Uses fuzzy search if enabled. + * @param {any[]} data The data to filter. + * @returns {any[]} The filtered data. + */ searchFilter(data) { if (!this.filterData[FILTER_TYPES.SEARCH]) { return data; @@ -122,6 +170,12 @@ export class FilterHelper { return data.filter(entity => getIsValidSearch(entity)); } + /** + * Sets the filter data for the given filter type. + * @param {string} filterType The filter type to set data for. + * @param {any} data The data to set. + * @param {boolean} suppressDataChanged Whether to suppress the data changed callback. + */ setFilterData(filterType, data, suppressDataChanged = false) { const oldData = this.filterData[filterType]; this.filterData[filterType] = data; @@ -132,10 +186,19 @@ export class FilterHelper { } } + /** + * Gets the filter data for the given filter type. + * @param {string} filterType The filter type to get data for. + */ getFilterData(filterType) { return this.filterData[filterType]; } + /** + * Applies all filters to the given data. + * @param {any[]} data The data to filter. + * @returns {any[]} The filtered data. + */ applyFilters(data) { return Object.values(this.filterFunctions) .reduce((data, fn) => fn(data), data); diff --git a/public/scripts/group-chats.js b/public/scripts/group-chats.js index f0655d98d..b62249e89 100644 --- a/public/scripts/group-chats.js +++ b/public/scripts/group-chats.js @@ -463,7 +463,7 @@ async function generateGroupWrapper(by_auto_mode, type = null, params = {}) { is_group_generating = true; setCharacterName(''); setCharacterId(undefined); - const userInput = $("#send_textarea").val(); + const userInput = String($("#send_textarea").val()); if (typingIndicator.length === 0 && !isStreamingEnabled()) { typingIndicator = $( @@ -983,11 +983,9 @@ function printGroupCandidates() { const storageKey = 'GroupCandidates_PerPage'; $("#rm_group_add_members_pagination").pagination({ dataSource: getGroupCharacters({ doFilter: true, onlyMembers: false }), - pageSize: 5, pageRange: 1, position: 'top', showPageNumbers: false, - showSizeChanger: false, prevText: '<', nextText: '>', formatNavigator: PAGINATION_TEMPLATE, @@ -1011,11 +1009,9 @@ function printGroupMembers() { const storageKey = 'GroupMembers_PerPage'; $("#rm_group_members_pagination").pagination({ dataSource: getGroupCharacters({ doFilter: false, onlyMembers: true }), - pageSize: 5, pageRange: 1, position: 'top', showPageNumbers: false, - showSizeChanger: false, prevText: '<', nextText: '>', formatNavigator: PAGINATION_TEMPLATE, @@ -1320,7 +1316,7 @@ function openCharacterDefinition(characterSelect) { } function filterGroupMembers() { - const searchValue = $(this).val().toLowerCase(); + const searchValue = String($(this).val()).toLowerCase(); groupCandidatesFilter.setFilterData(FILTER_TYPES.SEARCH, searchValue); } diff --git a/public/scripts/i18n.js b/public/scripts/i18n.js index d5ac1d115..a27a5a456 100644 --- a/public/scripts/i18n.js +++ b/public/scripts/i18n.js @@ -62,7 +62,7 @@ jQuery(async () => { addLanguagesToDropdown(); $('#ui_language_select').on('change', async function () { - const language = $(this).val(); + const language = String($(this).val()); if (language) { localStorage.setItem(storageKey, language); diff --git a/public/scripts/instruct-mode.js b/public/scripts/instruct-mode.js index bb99615d4..bc75e266d 100644 --- a/public/scripts/instruct-mode.js +++ b/public/scripts/instruct-mode.js @@ -4,6 +4,9 @@ import { saveSettingsDebounced, substituteParams } from "../script.js"; import { selected_group } from "./group-chats.js"; import { power_user } from "./power-user.js"; +/** + * @type {any[]} Instruct mode presets. + */ export let instruct_presets = []; const controls = [ @@ -116,6 +119,11 @@ export function autoSelectInstructPreset(modelId) { * @returns {string[]} Array of instruct mode stopping strings. */ export function getInstructStoppingSequences() { + /** + * Adds instruct mode sequence to the result array. + * @param {string} sequence Sequence string. + * @returns {void} + */ function addInstructSequence(sequence) { // Cohee: oobabooga's textgen always appends newline before the sequence as a stopping string // But it's a problem for Metharme which doesn't use newlines to separate them. @@ -215,6 +223,7 @@ export function formatInstructModeExamples(mesExamples, name1, name2) { * @param {string} promptBias Prompt bias string. * @param {string} name1 User name. * @param {string} name2 Character name. + * @returns {string} Formatted instruct mode last prompt line. */ export function formatInstructModePrompt(name, isImpersonate, promptBias, name1, name2) { const includeNames = power_user.instruct.names || (!!selected_group && power_user.instruct.names_force_groups); @@ -258,7 +267,7 @@ jQuery(() => { return; } - power_user.instruct.preset = name; + power_user.instruct.preset = String(name); controls.forEach(control => { if (preset[control.property] !== undefined) { power_user.instruct[control.property] = preset[control.property]; diff --git a/public/scripts/kai-settings.js b/public/scripts/kai-settings.js index d1f8895ac..9dbb5ec21 100644 --- a/public/scripts/kai-settings.js +++ b/public/scripts/kai-settings.js @@ -75,18 +75,18 @@ function loadKoboldSettings(preset) { } } -function getKoboldGenerationData(finalPromt, this_settings, this_amount_gen, this_max_context, isImpersonate, type) { +function getKoboldGenerationData(finalPrompt, this_settings, this_amount_gen, this_max_context, isImpersonate, type) { const sampler_order = kai_settings.sampler_order || this_settings.sampler_order; let generate_data = { - prompt: finalPromt, + prompt: finalPrompt, gui_settings: false, sampler_order: sampler_order, - max_context_length: parseInt(this_max_context), + max_context_length: Number(this_max_context), max_length: this_amount_gen, - rep_pen: parseFloat(kai_settings.rep_pen), - rep_pen_range: parseInt(kai_settings.rep_pen_range), + rep_pen: Number(kai_settings.rep_pen), + rep_pen_range: Number(kai_settings.rep_pen_range), rep_pen_slope: kai_settings.rep_pen_slope, - temperature: parseFloat(kai_settings.temp), + temperature: Number(kai_settings.temp), tfs: kai_settings.tfs, top_a: kai_settings.top_a, top_k: kai_settings.top_k, @@ -223,16 +223,30 @@ const sliders = [ } ]; +/** + * Determines if the Kobold stop sequence can be used with the given version. + * @param {string} version KoboldAI version to check. + * @returns {boolean} True if the Kobold stop sequence can be used, false otherwise. + */ function canUseKoboldStopSequence(version) { return (version || '0.0.0').localeCompare(MIN_STOP_SEQUENCE_VERSION, undefined, { numeric: true, sensitivity: 'base' }) > -1; } +/** + * Determines if the Kobold streaming API can be used with the given version. + * @param {{ result: string; version: string; }} koboldVersion KoboldAI version object. + * @returns {boolean} True if the Kobold streaming API can be used, false otherwise. + */ function canUseKoboldStreaming(koboldVersion) { if (koboldVersion && koboldVersion.result == 'KoboldCpp') { return (koboldVersion.version || '0.0').localeCompare(MIN_STREAMING_KCPPVERSION, undefined, { numeric: true, sensitivity: 'base' }) > -1; } else return false; } +/** + * Sorts the sampler items by the given order. + * @param {any[]} orderArray Sampler order array. + */ function sortItemsByOrder(orderArray) { console.debug('Preset samplers order: ' + orderArray); const $draggableItems = $("#kobold_order"); diff --git a/public/scripts/power-user.js b/public/scripts/power-user.js index de661f342..9ccbd7db9 100644 --- a/public/scripts/power-user.js +++ b/public/scripts/power-user.js @@ -911,7 +911,7 @@ function loadContextSettings() { }); $('#context_presets').on('change', function () { - const name = $(this).find(':selected').val(); + const name = String($(this).find(':selected').val()); const preset = context_presets.find(x => x.name === name); if (!preset) { @@ -1032,6 +1032,10 @@ const compareFunc = (first, second) => { } }; +/** + * Sorts an array of entities based on the current sort settings + * @param {any[]} entities An array of objects with an `item` property + */ function sortEntitiesList(entities) { if (power_user.sort_field == undefined || entities.length === 0) { return; @@ -1039,6 +1043,7 @@ function sortEntitiesList(entities) { entities.sort((a, b) => sortFunc(a.item, b.item)); } + async function saveTheme() { const name = await callPopup('Enter a theme preset name:', 'input'); @@ -1262,8 +1267,8 @@ async function doDelMode(_, text) { if (text) { await delay(300) //same as above, need event signal for 'entered del mode' console.debug('parsing msgs to del') - let numMesToDel = Number(text).toFixed(0) - let lastMesID = $('.last_mes').attr('mesid') + let numMesToDel = Number(text); + let lastMesID = Number($('.last_mes').attr('mesid')); let oldestMesIDToDel = lastMesID - numMesToDel + 1; //disallow targeting first message @@ -1340,10 +1345,6 @@ function setAvgBG() { $("#user-mes-blur-tint-color-picker").attr('color', 'rgb(' + rgb.r + ',' + rgb.g + ',' + rgb.b + ')'); } */ - - - - function getAverageRGB(imgEl) { var blockSize = 5, // only visit every 5 pixels @@ -1388,6 +1389,13 @@ function setAvgBG() { } + /** + * Converts an HSL color value to RGB. + * @param {number} h Hue value + * @param {number} s Saturation value + * @param {number} l Luminance value + * @return {Array} The RGB representation + */ function hslToRgb(h, s, l) { const hueToRgb = (p, q, t) => { if (t < 0) t += 1; @@ -1429,7 +1437,7 @@ function setAvgBG() { console.log(`rLum ${rLuminance}, gLum ${gLuminance}, bLum ${bLuminance}`) - return 0.2126 * rLuminance + 0.7152 * gLuminance + 0.0722 * bLuminance; + return 0.2126 * Number(rLuminance) + 0.7152 * Number(gLuminance) + 0.0722 * Number(bLuminance); } //this version keeps BG and main text in same hue @@ -1612,13 +1620,13 @@ $(document).ready(() => { }); $("#markdown_escape_strings").on('input', function () { - power_user.markdown_escape_strings = $(this).val(); + power_user.markdown_escape_strings = String($(this).val()); saveSettingsDebounced(); reloadMarkdownProcessor(power_user.render_formulas); }); $("#start_reply_with").on('input', function () { - power_user.user_prompt_bias = $(this).val(); + power_user.user_prompt_bias = String($(this).val()); saveSettingsDebounced(); }); @@ -1745,7 +1753,7 @@ $(document).ready(() => { }); $("#themes").on('change', function () { - const themeSelected = $(this).find(':selected').val(); + const themeSelected = String($(this).find(':selected').val()); power_user.theme = themeSelected; applyTheme(themeSelected); saveSettingsDebounced(); @@ -1753,7 +1761,7 @@ $(document).ready(() => { $("#movingUIPresets").on('change', async function () { console.log('saw MUI preset change') - const movingUIPresetSelected = $(this).find(':selected').val(); + const movingUIPresetSelected = String($(this).find(':selected').val()); power_user.movingUIPreset = movingUIPresetSelected; applyMovingUIPreset(movingUIPresetSelected); saveSettingsDebounced(); @@ -1813,7 +1821,7 @@ $(document).ready(() => { }); $('#auto_swipe_blacklist').on('input', function () { - power_user.auto_swipe_blacklist = $(this).val() + power_user.auto_swipe_blacklist = String($(this).val()) .split(",") .map(str => str.trim()) .filter(str => str); @@ -1822,7 +1830,7 @@ $(document).ready(() => { }); $('#auto_swipe_minimum_length').on('input', function () { - const number = parseInt($(this).val()); + const number = Number($(this).val()); if (!isNaN(number)) { power_user.auto_swipe_minimum_length = number; saveSettingsDebounced(); @@ -1830,7 +1838,7 @@ $(document).ready(() => { }); $('#auto_swipe_blacklist_threshold').on('input', function () { - const number = parseInt($(this).val()); + const number = Number($(this).val()); if (!isNaN(number)) { power_user.auto_swipe_blacklist_threshold = number; saveSettingsDebounced(); @@ -1913,35 +1921,35 @@ $(document).ready(() => { $("#messageTimerEnabled").on("input", function () { const value = !!$(this).prop('checked'); power_user.timer_enabled = value; - localStorage.setItem(storage_keys.timer_enabled, power_user.timer_enabled); + localStorage.setItem(storage_keys.timer_enabled, String(power_user.timer_enabled)); switchTimer(); }); $("#messageTimestampsEnabled").on("input", function () { const value = !!$(this).prop('checked'); power_user.timestamps_enabled = value; - localStorage.setItem(storage_keys.timestamps_enabled, power_user.timestamps_enabled); + localStorage.setItem(storage_keys.timestamps_enabled, String(power_user.timestamps_enabled)); switchTimestamps(); }); $("#messageModelIconEnabled").on("input", function () { const value = !!$(this).prop('checked'); power_user.timestamp_model_icon = value; - localStorage.setItem(storage_keys.timestamp_model_icon, power_user.timestamp_model_icon); + localStorage.setItem(storage_keys.timestamp_model_icon, String(power_user.timestamp_model_icon)); switchIcons(); }); $("#mesIDDisplayEnabled").on("input", function () { const value = !!$(this).prop('checked'); power_user.mesIDDisplay_enabled = value; - localStorage.setItem(storage_keys.mesIDDisplay_enabled, power_user.mesIDDisplay_enabled); + localStorage.setItem(storage_keys.mesIDDisplay_enabled, String(power_user.mesIDDisplay_enabled)); switchMesIDDisplay(); }); $("#hotswapEnabled").on("input", function () { const value = !!$(this).prop('checked'); power_user.hotswap_enabled = value; - localStorage.setItem(storage_keys.hotswap_enabled, power_user.hotswap_enabled); + localStorage.setItem(storage_keys.hotswap_enabled, String(power_user.hotswap_enabled)); switchHotswap(); }); @@ -1987,7 +1995,7 @@ $(document).ready(() => { }); $('#custom_stopping_strings').on('input', function () { - power_user.custom_stopping_strings = $(this).val(); + power_user.custom_stopping_strings = String($(this).val()); saveSettingsDebounced(); }); From c7ce6a4953927fbc61189e1b2c1ae5cf388b6d44 Mon Sep 17 00:00:00 2001 From: Cohee <18619528+Cohee1207@users.noreply.github.com> Date: Tue, 22 Aug 2023 14:50:43 +0300 Subject: [PATCH 14/32] Move prompt itemization to HTML templates --- public/script.js | 269 +++--------------- public/scripts/templates/itemizationChat.html | 124 ++++++++ public/scripts/templates/itemizationText.html | 108 +++++++ 3 files changed, 272 insertions(+), 229 deletions(-) create mode 100644 public/scripts/templates/itemizationChat.html create mode 100644 public/scripts/templates/itemizationText.html diff --git a/public/script.js b/public/script.js index b73db69b9..1377a67c4 100644 --- a/public/script.js +++ b/public/script.js @@ -3567,238 +3567,49 @@ function promptItemize(itemizedPrompts, requestedMesId) { var selectedTokenizer = $("#tokenizer").find(':selected').text(); } - if (this_main_api == 'openai') { - //console.log('-- calling popup for OAI tokens'); - callPopup( - ` -

    - Prompt Itemization - -

    - Tokenizer: ${selectedTokenizer}
    - API Used: ${this_main_api}
    - - Only the white numbers really matter. All numbers are estimates. - Grey color items may not have been included in the context due to certain prompt format settings. - -
    -
    -
    -
    -
    -
    -
    -
    -
    -
    -
    -
    -
    -
    -
    -
    System Info:
    -
    ${oaiSystemTokens}
    -
    -
    -
    -- Chat Start:
    -
    ${oaiStartTokens}
    -
    -
    -
    -- Jailbreak:
    -
    ${oaiJailbreakTokens}
    -
    -
    -
    -- NSFW:
    -
    ??
    -
    -
    -
    -- Nudge:
    -
    ${oaiNudgeTokens}
    -
    -
    -
    -- Impersonate:
    -
    ${oaiImpersonateTokens}
    -
    -
    -
    -
    -
    Prompt Tokens:
    -
    ${oaiPromptTokens}
    -
    -
    -
    -- Description:
    -
    ${charDescriptionTokens}
    -
    -
    -
    -- Personality:
    -
    ${charPersonalityTokens}
    -
    -
    -
    -- Scenario:
    -
    ${scenarioTextTokens}
    -
    -
    -
    -- Examples:
    -
    ${examplesStringTokens}
    -
    -
    -
    -- User Persona:
    -
    ${userPersonaStringTokens}
    -
    -
    -
    -
    World Info:
    -
    ${worldInfoStringTokens}
    -
    -
    -
    Chat History:
    -
    ${ActualChatHistoryTokens}
    -
    -
    -
    -
    Extensions:
    -
    ${allAnchorsTokens}
    -
    -
    -
    -- Summarize:
    -
    ${summarizeStringTokens}
    -
    -
    -
    -- Author's Note:
    -
    ${authorsNoteStringTokens}
    -
    -
    -
    -- Smart Context:
    -
    ${smartContextStringTokens}
    -
    -
    -
    -
    {{}} Bias:
    ${oaiBiasTokens}
    -
    -
    + const params = { + selectedTokenizer, + this_main_api, + storyStringTokensPercentage, + worldInfoStringTokensPercentage, + ActualChatHistoryTokensPercentage, + allAnchorsTokensPercentage, + promptBiasTokensPercentage, + storyStringTokens, + charDescriptionTokens, + charPersonalityTokens, + scenarioTextTokens, + examplesStringTokens, + userPersonaStringTokens, + instructionTokens, + worldInfoStringTokens, + ActualChatHistoryTokens, + allAnchorsTokens, + summarizeStringTokens, + authorsNoteStringTokens, + smartContextStringTokens, + promptBiasTokens, + totalTokensInPrompt, + finalPromptTokens, + thisPrompt_max_context, + thisPrompt_padding, + thisPrompt_actual: thisPrompt_max_context - thisPrompt_padding, + oaiSystemTokensPercentage, + oaiStartTokensPercentage, + oaiSystemTokens, + oaiStartTokens, + oaiJailbreakTokens, + oaiNudgeTokens, + oaiImpersonateTokens, + oaiPromptTokens, + oaiBiasTokens, + }; -
    -
    -
    -
    -
    Total Tokens in Prompt:
    ${finalPromptTokens}
    -
    -
    -
    Max Context (Context Size - Response Length):
    ${thisPrompt_max_context}
    -
    -
    -
    -
    - `, 'text' - ); + if (this_main_api == 'openai') { + callPopup(renderTemplate('itemizationChat', params), 'text'); } else { - //console.log('-- calling popup for non-OAI tokens'); - callPopup( - ` -

    - Prompt Itemization - -

    - Tokenizer: ${selectedTokenizer}
    - API Used: ${this_main_api}
    - - Only the white numbers really matter. All numbers are estimates. - Grey color items may not have been included in the context due to certain prompt format settings. - -
    -
    -
    -
    -
    -
    -
    -
    -
    -
    -
    -
    -
    -
    Character Definitions:
    -
    ${storyStringTokens}
    -
    -
    -
    -- Description:
    -
    ${charDescriptionTokens}
    -
    -
    -
    -- Personality:
    -
    ${charPersonalityTokens}
    -
    -
    -
    -- Scenario:
    -
    ${scenarioTextTokens}
    -
    -
    -
    -- Examples:
    -
    ${examplesStringTokens}
    -
    -
    -
    -- User Persona:
    -
    ${userPersonaStringTokens}
    -
    -
    -
    -- System Prompt (Instruct):
    -
    ${instructionTokens}
    -
    -
    -
    -
    World Info:
    -
    ${worldInfoStringTokens}
    -
    -
    -
    Chat History:
    -
    ${ActualChatHistoryTokens}
    -
    -
    -
    -
    Extensions:
    -
    ${allAnchorsTokens}
    -
    -
    -
    -- Summarize:
    -
    ${summarizeStringTokens}
    -
    -
    -
    -- Author's Note:
    -
    ${authorsNoteStringTokens}
    -
    -
    -
    -- Smart Context:
    -
    ${smartContextStringTokens}
    -
    -
    -
    -
    {{}} Bias:
    ${promptBiasTokens}
    -
    -
    - -
    -
    -
    -
    -
    Total Tokens in Prompt:
    ${totalTokensInPrompt}
    -
    - -
    -
    Max Context (Context Size - Response Length):
    ${thisPrompt_max_context}
    -
    -
    -
    - Padding:
    ${thisPrompt_padding}
    -
    -
    -
    Actual Max Context Allowed:
    ${thisPrompt_max_context - thisPrompt_padding}
    -
    -
    -
    -
    - `, 'text' - ); + callPopup(renderTemplate('itemizationText', params), 'text'); } } diff --git a/public/scripts/templates/itemizationChat.html b/public/scripts/templates/itemizationChat.html new file mode 100644 index 000000000..e05756a6f --- /dev/null +++ b/public/scripts/templates/itemizationChat.html @@ -0,0 +1,124 @@ +

    + Prompt Itemization + +

    +Tokenizer: {{selectedTokenizer}}
    +API Used: {{this_main_api}}
    + + Only the white numbers really matter. All numbers are estimates. + Grey color items may not have been included in the context due to certain prompt format settings. + +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    System Info:
    +
    {{oaiSystemTokens}}
    +
    +
    +
    -- Chat Start:
    +
    {{oaiStartTokens}}
    +
    +
    +
    -- Jailbreak:
    +
    {{oaiJailbreakTokens}}
    +
    +
    +
    -- NSFW:
    +
    ??
    +
    +
    +
    -- Nudge:
    +
    {{oaiNudgeTokens}}
    +
    +
    +
    -- Impersonate:
    +
    {{oaiImpersonateTokens}}
    +
    +
    +
    +
    +
    Prompt Tokens:
    +
    {{oaiPromptTokens}}
    +
    +
    +
    -- Description:
    +
    {{charDescriptionTokens}}
    +
    +
    +
    -- Personality:
    +
    {{charPersonalityTokens}}
    +
    +
    +
    -- Scenario:
    +
    {{scenarioTextTokens}}
    +
    +
    +
    -- Examples:
    +
    {{examplesStringTokens}}
    +
    +
    +
    -- User Persona:
    +
    {{userPersonaStringTokens}}
    +
    +
    +
    +
    World Info:
    +
    {{worldInfoStringTokens}}
    +
    +
    +
    Chat History:
    +
    {{ActualChatHistoryTokens}}
    +
    +
    +
    +
    Extensions:
    +
    {{allAnchorsTokens}}
    +
    +
    +
    -- Summarize:
    +
    {{summarizeStringTokens}}
    +
    +
    +
    -- Author's Note:
    +
    {{authorsNoteStringTokens}}
    +
    +
    +
    -- Smart Context:
    +
    {{smartContextStringTokens}}
    +
    +
    +
    +
    {{}} Bias:
    +
    {{oaiBiasTokens}}
    +
    +
    + +
    +
    +
    +
    +
    Total Tokens in Prompt:
    +
    {{finalPromptTokens}}
    +
    +
    +
    Max Context (Context Size - Response Length):
    +
    {{thisPrompt_max_context}}
    +
    +
    +
    +
    diff --git a/public/scripts/templates/itemizationText.html b/public/scripts/templates/itemizationText.html new file mode 100644 index 000000000..b6f60404b --- /dev/null +++ b/public/scripts/templates/itemizationText.html @@ -0,0 +1,108 @@ +

    + Prompt Itemization + +

    +Tokenizer: {{selectedTokenizer}}
    +API Used: {{this_main_api}}
    + + Only the white numbers really matter. All numbers are estimates. + Grey color items may not have been included in the context due to certain prompt format settings. + +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    Character Definitions:
    +
    {{storyStringTokens}}
    +
    +
    +
    -- Description:
    +
    {{charDescriptionTokens}}
    +
    +
    +
    -- Personality:
    +
    {{charPersonalityTokens}}
    +
    +
    +
    -- Scenario:
    +
    {{scenarioTextTokens}}
    +
    +
    +
    -- Examples:
    +
    {{examplesStringTokens}}
    +
    +
    +
    -- User Persona:
    +
    {{userPersonaStringTokens}}
    +
    +
    +
    -- System Prompt (Instruct):
    +
    {{instructionTokens}}
    +
    +
    +
    +
    World Info:
    +
    {{worldInfoStringTokens}}
    +
    +
    +
    Chat History:
    +
    {{ActualChatHistoryTokens}}
    +
    +
    +
    +
    Extensions:
    +
    {{allAnchorsTokens}}
    +
    +
    +
    -- Summarize:
    +
    {{summarizeStringTokens}}
    +
    +
    +
    -- Author's Note:
    +
    {{authorsNoteStringTokens}}
    +
    +
    +
    -- Smart Context:
    +
    {{smartContextStringTokens}}
    +
    +
    +
    +
    {{}} Bias:
    +
    {{promptBiasTokens}}
    +
    +
    + +
    +
    +
    +
    +
    Total Tokens in Prompt:
    +
    {{totalTokensInPrompt}}
    +
    +
    +
    Max Context (Context Size - Response Length):
    +
    {{thisPrompt_max_context}}
    +
    +
    +
    - Padding:
    +
    {{thisPrompt_padding}}
    +
    +
    +
    Actual Max Context Allowed:
    +
    {{thisPrompt_actual}}
    +
    +
    +
    +
    From 3716fd51ef5d79d6278a131fe26e99f606a2b63a Mon Sep 17 00:00:00 2001 From: based Date: Tue, 22 Aug 2023 22:29:57 +1000 Subject: [PATCH 15/32] add example names to initial system prompt --- public/scripts/openai.js | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/public/scripts/openai.js b/public/scripts/openai.js index 535660ac8..fdc411216 100644 --- a/public/scripts/openai.js +++ b/public/scripts/openai.js @@ -109,7 +109,7 @@ const max_4k = 4095; const max_8k = 8191; const max_16k = 16383; const max_32k = 32767; -const scale_max = 8191; // Probably more. Save some for the system prompt defined on Scale site. +const scale_max = 8191; const claude_max = 9000; // We have a proper tokenizer, so theoretically could be larger (up to 9k) const palm2_max = 7500; // The real context window is 8192, spare some for padding due to using turbo tokenizer const claude_100k_max = 99000; @@ -1090,7 +1090,7 @@ async function sendAltScaleRequest(openai_msgs_tosend, logit_bias, signal) { let firstSysMsgs = [] for(let msg of openai_msgs_tosend){ if(msg.role === 'system') { - firstSysMsgs.push(substituteParams(msg.content)); + firstSysMsgs.push(substituteParams(msg.name ? msg.name + ": " + msg.content : msg.content)); } else { break; } From 32f9908a97eff48af3aa550262b3620f58b60bce Mon Sep 17 00:00:00 2001 From: based Date: Tue, 22 Aug 2023 23:11:07 +1000 Subject: [PATCH 16/32] Unrelated Optimization: one slipped through. --- public/scripts/group-chats.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/public/scripts/group-chats.js b/public/scripts/group-chats.js index f0655d98d..41ce154a2 100644 --- a/public/scripts/group-chats.js +++ b/public/scripts/group-chats.js @@ -1390,7 +1390,7 @@ export async function createNewGroupChat(groupId) { group.chat_metadata = {}; updateChatMetadata(group.chat_metadata, true); - await editGroup(group.id, true); + await editGroup(group.id, true, false); await getGroupChat(group.id); } From 1abb739a8cefccbd8b28dcde064bc115e2ab3508 Mon Sep 17 00:00:00 2001 From: Cohee <18619528+Cohee1207@users.noreply.github.com> Date: Tue, 22 Aug 2023 16:14:21 +0300 Subject: [PATCH 17/32] Replace one-time bias regex parser with Handlebars helper --- public/script.js | 38 ++++++++++++++------------------------ 1 file changed, 14 insertions(+), 24 deletions(-) diff --git a/public/script.js b/public/script.js index 1377a67c4..42a5fbf02 100644 --- a/public/script.js +++ b/public/script.js @@ -1937,34 +1937,24 @@ export function extractMessageBias(message) { return null; } - const forbiddenMatches = ['user', 'char', 'time', 'date', 'random', 'idle_duration', 'roll']; - const found = []; - const rxp = /\{\{([\s\S]+?)\}\}/gm; - //const rxp = /{([^}]+)}/g; - let curMatch; + try { + const biasHandlebars = Handlebars.create(); + const biasMatches = []; + biasHandlebars.registerHelper('bias', function (text) { + biasMatches.push(text); + return ''; + }); + const template = biasHandlebars.compile(message); + template({}); - while ((curMatch = rxp.exec(message))) { - const match = curMatch[1].trim(); - - // Ignore random/roll pattern matches - if (/^random[ : ].+/i.test(match) || /^roll[ : ].+/i.test(match)) { - continue; + if (biasMatches && biasMatches.length > 0) { + return ` ${biasMatches.join(" ")}`; } - if (forbiddenMatches.includes(match.toLowerCase())) { - continue; - } - - found.push(match); + return ''; + } catch { + return ''; } - - let biasString = ''; - - if (found.length) { - biasString = ` ${found.join(" ")}` - } - - return biasString; } function cleanGroupMessage(getMessage) { From 25c7686462e91a44582efc660827a779b54920bc Mon Sep 17 00:00:00 2001 From: kingbri Date: Tue, 22 Aug 2023 10:17:20 -0400 Subject: [PATCH 18/32] CFG: Pass entire guidance object If an API doesn't use the negative prompt from generate, allow the negative prompt to be refetched in the specific API. This requires the full guidance scale object instead of just the number. Signed-off-by: kingbri --- public/script.js | 4 +++- public/scripts/nai-settings.js | 4 ++-- public/scripts/textgen-settings.js | 2 +- 3 files changed, 6 insertions(+), 4 deletions(-) diff --git a/public/script.js b/public/script.js index a0d37f792..fd5bb63b5 100644 --- a/public/script.js +++ b/public/script.js @@ -2975,8 +2975,10 @@ async function Generate(type, { automatic_trigger, force_name2, resolve, reject, // Get the negative prompt first since it has the unmodified mesSend array let negativePrompt = main_api == 'textgenerationwebui' ? getCombinedPrompt(true) : undefined; let finalPromt = getCombinedPrompt(false); + + // Include the entire guidance scale object const cfgValues = { - guidanceScale: cfgGuidanceScale?.value, + guidanceScale: cfgGuidanceScale, negativePrompt: negativePrompt }; diff --git a/public/scripts/nai-settings.js b/public/scripts/nai-settings.js index 85d470c4f..6d5a9d0bc 100644 --- a/public/scripts/nai-settings.js +++ b/public/scripts/nai-settings.js @@ -396,7 +396,7 @@ function getBadWordPermutations(text) { } export function getNovelGenerationData(finalPrompt, this_settings, this_amount_gen, isImpersonate, cfgValues) { - if (cfgValues.guidanceScale && cfgValues.guidanceScale !== 1) { + if (cfgValues.guidanceScale && cfgValues.guidanceScale?.value !== 1) { cfgValues.negativePrompt = (getCfgPrompt(cfgValues.guidanceScale, true))?.value; } @@ -440,7 +440,7 @@ export function getNovelGenerationData(finalPrompt, this_settings, this_amount_g "typical_p": parseFloat(nai_settings.typical_p), "mirostat_lr": parseFloat(nai_settings.mirostat_lr), "mirostat_tau": parseFloat(nai_settings.mirostat_tau), - "cfg_scale": cfgValues?.guidanceScale ?? parseFloat(nai_settings.cfg_scale), + "cfg_scale": cfgValues?.guidanceScale?.value ?? parseFloat(nai_settings.cfg_scale), "cfg_uc": cfgValues?.negativePrompt ?? nai_settings.cfg_uc ?? "", "phrase_rep_pen": nai_settings.phrase_rep_pen, "stop_sequences": stopSequences, diff --git a/public/scripts/textgen-settings.js b/public/scripts/textgen-settings.js index d61d0dd25..5e41ff2c0 100644 --- a/public/scripts/textgen-settings.js +++ b/public/scripts/textgen-settings.js @@ -251,7 +251,7 @@ export function getTextGenGenerationData(finalPromt, this_amount_gen, isImperson 'penalty_alpha': textgenerationwebui_settings.penalty_alpha, 'length_penalty': textgenerationwebui_settings.length_penalty, 'early_stopping': textgenerationwebui_settings.early_stopping, - 'guidance_scale': isImpersonate ? 1 : cfgValues?.guidanceScale ?? textgenerationwebui_settings.guidance_scale ?? 1, + 'guidance_scale': isImpersonate ? 1 : cfgValues?.guidanceScale?.value ?? textgenerationwebui_settings.guidance_scale ?? 1, 'negative_prompt': isImpersonate ? '' : cfgValues?.negativePrompt ?? textgenerationwebui_settings.negative_prompt ?? '', 'seed': textgenerationwebui_settings.seed, 'add_bos_token': textgenerationwebui_settings.add_bos_token, From 41cc86af9f453bbfd671d818e04a566f89c75c0b Mon Sep 17 00:00:00 2001 From: Cohee <18619528+Cohee1207@users.noreply.github.com> Date: Tue, 22 Aug 2023 17:46:37 +0300 Subject: [PATCH 19/32] Add example extension for chat variables. Allow registering custom text processing functions for extensions --- public/jsconfig.json | 1 + public/script.js | 46 +++++++------- public/scripts/extensions.js | 45 +++++++++++++- public/scripts/extensions/variables/index.js | 62 +++++++++++++++++++ .../extensions/variables/manifest.json | 11 ++++ public/scripts/openai.js | 6 +- public/scripts/slash-commands.js | 6 +- server.js | 6 +- 8 files changed, 149 insertions(+), 34 deletions(-) create mode 100644 public/scripts/extensions/variables/index.js create mode 100644 public/scripts/extensions/variables/manifest.json diff --git a/public/jsconfig.json b/public/jsconfig.json index 76ec13541..1fe7ce877 100644 --- a/public/jsconfig.json +++ b/public/jsconfig.json @@ -21,6 +21,7 @@ "showdown-katex", "droll", "handlebars", + "highlight.js" ] } } diff --git a/public/script.js b/public/script.js index 42a5fbf02..ffd25d483 100644 --- a/public/script.js +++ b/public/script.js @@ -136,7 +136,7 @@ import { PAGINATION_TEMPLATE, } from "./scripts/utils.js"; -import { extension_settings, getContext, loadExtensionSettings, runGenerationInterceptors, saveMetadataDebounced } from "./scripts/extensions.js"; +import { extension_settings, getContext, loadExtensionSettings, registerExtensionHelper, runGenerationInterceptors, saveMetadataDebounced } from "./scripts/extensions.js"; import { executeSlashCommands, getSlashCommandsHelp, registerSlashCommand } from "./scripts/slash-commands.js"; import { tag_map, @@ -2087,14 +2087,14 @@ class StreamingProcessor { $(`#chat .mes[mesid="${messageId}"] .mes_buttons`).css({ 'display': 'flex' }); } - onStartStreaming(text) { + async onStartStreaming(text) { let messageId = -1; if (this.type == "impersonate") { $('#send_textarea').val('').trigger('input'); } else { - saveReply(this.type, text); + await saveReply(this.type, text); messageId = count_view_mes - 1; this.showMessageButtons(messageId); } @@ -2170,8 +2170,13 @@ class StreamingProcessor { } } - onFinishStreaming(messageId, text) { + async onFinishStreaming(messageId, text) { this.hideMessageButtons(this.messageId); + + const eventType = this.type !== 'impersonate' ? event_types.MESSAGE_RECEIVED : event_types.IMPERSONATE_READY; + const eventData = this.type !== 'impersonate' ? this.messageId : text; + await eventSource.emit(eventType, eventData); + this.onProgressStreaming(messageId, text, true); addCopyToCodeBlocks($(`#chat .mes[mesid="${messageId}"]`)); saveChatConditional(); @@ -2213,10 +2218,6 @@ class StreamingProcessor { } } playMessageSound(); - - const eventType = this.type !== 'impersonate' ? event_types.MESSAGE_RECEIVED : event_types.IMPERSONATE_READY; - const eventData = this.type !== 'impersonate' ? this.messageId : text; - eventSource.emit(eventType, eventData); } onErrorStreaming() { @@ -2241,7 +2242,7 @@ class StreamingProcessor { this.onErrorStreaming(); } - nullStreamingGeneration() { + *nullStreamingGeneration() { throw new Error('Generation function for streaming is not hooked up'); } @@ -2260,7 +2261,7 @@ class StreamingProcessor { async generate() { if (this.messageId == -1) { - this.messageId = this.onStartStreaming(this.firstMessageText); + this.messageId = await this.onStartStreaming(this.firstMessageText); await delay(1); // delay for message to be rendered scrollLock = false; } @@ -3026,7 +3027,7 @@ async function Generate(type, { automatic_trigger, force_name2, resolve, reject, } if (streamingProcessor && !streamingProcessor.isStopped && streamingProcessor.isFinished) { - streamingProcessor.onFinishStreaming(streamingProcessor.messageId, getMessage); + await streamingProcessor.onFinishStreaming(streamingProcessor.messageId, getMessage); streamingProcessor = null; } } @@ -3059,11 +3060,11 @@ async function Generate(type, { automatic_trigger, force_name2, resolve, reject, if (!isImpersonate) { if (tokens_already_generated == 0) { console.debug("New message"); - ({ type, getMessage } = saveReply(type, getMessage, this_mes_is_name, title)); + ({ type, getMessage } = await saveReply(type, getMessage, this_mes_is_name, title)); } else { console.debug("Should append message"); - ({ type, getMessage } = saveReply('append', getMessage, this_mes_is_name, title)); + ({ type, getMessage } = await saveReply('append', getMessage, this_mes_is_name, title)); } } else { let chunk = cleanUpMessage(message_already_generated, true, isContinue, true); @@ -3112,12 +3113,11 @@ async function Generate(type, { automatic_trigger, force_name2, resolve, reject, else { // Without streaming we'll be having a full message on continuation. Treat it as a multigen last chunk. if (!isMultigenEnabled() && originalType !== 'continue') { - ({ type, getMessage } = saveReply(type, getMessage, this_mes_is_name, title)); + ({ type, getMessage } = await saveReply(type, getMessage, this_mes_is_name, title)); } else { - ({ type, getMessage } = saveReply('appendFinal', getMessage, this_mes_is_name, title)); + ({ type, getMessage } = await saveReply('appendFinal', getMessage, this_mes_is_name, title)); } - await eventSource.emit(event_types.MESSAGE_RECEIVED, (chat.length - 1)); } activateSendButtons(); @@ -3294,9 +3294,9 @@ export async function sendMessageAsUser(textareaText, messageBias) { chat[chat.length - 1]['extra']['bias'] = messageBias; } statMesProcess(chat[chat.length - 1], 'user', characters, this_chid, ''); - addOneMessage(chat[chat.length - 1]); // Wait for all handlers to finish before continuing with the prompt await eventSource.emit(event_types.MESSAGE_SENT, (chat.length - 1)); + addOneMessage(chat[chat.length - 1]); console.debug('message sent as user'); } @@ -3814,10 +3814,7 @@ function cleanUpMessage(getMessage, isImpersonate, isContinue, displayIncomplete return getMessage; } - - - -function saveReply(type, getMessage, this_mes_is_name, title) { +async function saveReply(type, getMessage, this_mes_is_name, title) { if (type != 'append' && type != 'continue' && type != 'appendFinal' && chat.length && (chat[chat.length - 1]['swipe_id'] === undefined || chat[chat.length - 1]['is_user'])) { type = 'normal'; @@ -3838,6 +3835,7 @@ function saveReply(type, getMessage, this_mes_is_name, title) { chat[chat.length - 1]['send_date'] = getMessageTimeStamp(); chat[chat.length - 1]['extra']['api'] = getGeneratingApi(); chat[chat.length - 1]['extra']['model'] = getGeneratingModel(); + await eventSource.emit(event_types.MESSAGE_RECEIVED, (chat.length - 1)); addOneMessage(chat[chat.length - 1], { type: 'swipe' }); } else { chat[chat.length - 1]['mes'] = getMessage; @@ -3852,6 +3850,7 @@ function saveReply(type, getMessage, this_mes_is_name, title) { chat[chat.length - 1]['send_date'] = getMessageTimeStamp(); chat[chat.length - 1]["extra"]["api"] = getGeneratingApi(); chat[chat.length - 1]["extra"]["model"] = getGeneratingModel(); + await eventSource.emit(event_types.MESSAGE_RECEIVED, (chat.length - 1)); addOneMessage(chat[chat.length - 1], { type: 'swipe' }); } else if (type === 'appendFinal') { oldMessage = chat[chat.length - 1]['mes']; @@ -3863,6 +3862,7 @@ function saveReply(type, getMessage, this_mes_is_name, title) { chat[chat.length - 1]['send_date'] = getMessageTimeStamp(); chat[chat.length - 1]["extra"]["api"] = getGeneratingApi(); chat[chat.length - 1]["extra"]["model"] = getGeneratingModel(); + await eventSource.emit(event_types.MESSAGE_RECEIVED, (chat.length - 1)); addOneMessage(chat[chat.length - 1], { type: 'swipe' }); } else { @@ -3896,6 +3896,7 @@ function saveReply(type, getMessage, this_mes_is_name, title) { } saveImageToMessage(img, chat[chat.length - 1]); + await eventSource.emit(event_types.MESSAGE_RECEIVED, (chat.length - 1)); addOneMessage(chat[chat.length - 1]); } @@ -6503,8 +6504,8 @@ async function createOrEditCharacter(e) { add_mes_without_animation = true; //console.log('form create submission calling addOneMessage'); - addOneMessage(chat[0]); await eventSource.emit(event_types.MESSAGE_RECEIVED, (chat.length - 1)); + addOneMessage(chat[0]); } } $("#create_button").removeAttr("disabled"); @@ -6558,6 +6559,7 @@ window["SillyTavern"].getContext = function () { deactivateSendButtons, saveReply, registerSlashCommand: registerSlashCommand, + registerHelper: registerExtensionHelper, }; }; diff --git a/public/scripts/extensions.js b/public/scripts/extensions.js index 83d88f346..70be2dc44 100644 --- a/public/scripts/extensions.js +++ b/public/scripts/extensions.js @@ -1,5 +1,5 @@ import { callPopup, eventSource, event_types, saveSettings, saveSettingsDebounced, getRequestHeaders } from "../script.js"; -import { isSubsetOf, debounce } from "./utils.js"; +import { isSubsetOf, debounce, waitUntilCondition } from "./utils.js"; export { getContext, getApiUrl, @@ -16,6 +16,42 @@ let manifests = []; const defaultUrl = "http://localhost:5100"; export const saveMetadataDebounced = debounce(async () => await getContext().saveMetadata(), 1000); +export const extensionsHandlebars = Handlebars.create(); + +/** + * Registers a Handlebars helper for use in extensions. + * @param {string} name Handlebars helper name + * @param {function} helper Handlebars helper function + */ +export function registerExtensionHelper(name, helper) { + extensionsHandlebars.registerHelper(name, helper); +} + +/** + * Applies handlebars extension helpers to a message. + * @param {number} messageId Message index in the chat. + */ +function processExtensionHelpers(messageId) { + const context = getContext(); + const message = context.chat[messageId]; + + if (!message?.mes || typeof message.mes !== 'string') { + return; + } + + // Don't waste time if there are no mustaches + if (!message.mes.includes('{{')) { + return; + } + + try { + const template = extensionsHandlebars.compile(message.mes, { noEscape: true }); + message.mes = template({}); + } catch { + // Ignore + } +} + // Disables parallel updates class ModuleWorkerWrapper { constructor(callback) { @@ -629,10 +665,13 @@ async function runGenerationInterceptors(chat, contextSize) { } } -$(document).ready(async function () { - setTimeout(function () { +jQuery(function () { + setTimeout(async function () { addExtensionsButtonAndMenu(); $("#extensionsMenuButton").css("display", "flex"); + await waitUntilCondition(() => eventSource !== undefined, 1000, 100); + eventSource.on(event_types.MESSAGE_RECEIVED, processExtensionHelpers); + eventSource.on(event_types.MESSAGE_SENT, processExtensionHelpers); }, 100) $("#extensions_connect").on('click', connectClickHandler); diff --git a/public/scripts/extensions/variables/index.js b/public/scripts/extensions/variables/index.js new file mode 100644 index 000000000..2b473993a --- /dev/null +++ b/public/scripts/extensions/variables/index.js @@ -0,0 +1,62 @@ +import { getContext } from "../../extensions.js"; + +/** + * Gets a chat variable from the current chat metadata. + * @param {string} name The name of the variable to get. + * @returns {string} The value of the variable. + */ +function getChatVariable(name) { + const metadata = getContext().chatMetadata; + + if (!metadata) { + return ''; + } + + if (!metadata.variables) { + metadata.variables = {}; + return ''; + } + + return metadata.variables[name] || ''; +} + +/** + * Sets a chat variable in the current chat metadata. + * @param {string} name The name of the variable to set. + * @param {any} value The value of the variable to set. + */ +function setChatVariable(name, value) { + const metadata = getContext().chatMetadata; + + if (!metadata) { + return; + } + + if (!metadata.variables) { + metadata.variables = {}; + } + + metadata.variables[name] = value; +} + +function listChatVariables() { + const metadata = getContext().chatMetadata; + + if (!metadata) { + return ''; + } + + if (!metadata.variables) { + metadata.variables = {}; + return ''; + } + + return Object.keys(metadata.variables).map(key => `${key}=${metadata.variables[key]}`).join(';'); +} + +jQuery(() => { + const context = getContext(); + context.registerHelper('getvar', getChatVariable); + context.registerHelper('setvar', setChatVariable); + context.registerHelper('listvar', listChatVariables); +}); diff --git a/public/scripts/extensions/variables/manifest.json b/public/scripts/extensions/variables/manifest.json new file mode 100644 index 000000000..9c4e9cc48 --- /dev/null +++ b/public/scripts/extensions/variables/manifest.json @@ -0,0 +1,11 @@ +{ + "display_name": "Chat Variables", + "loading_order": 100, + "requires": [], + "optional": [], + "js": "index.js", + "css": "", + "author": "Cohee#1207", + "version": "1.0.0", + "homePage": "https://github.com/SillyTavern/SillyTavern" +} diff --git a/public/scripts/openai.js b/public/scripts/openai.js index fdc411216..621e094db 100644 --- a/public/scripts/openai.js +++ b/public/scripts/openai.js @@ -1088,8 +1088,8 @@ async function sendAltScaleRequest(openai_msgs_tosend, logit_bias, signal) { const generate_url = '/generate_altscale'; let firstSysMsgs = [] - for(let msg of openai_msgs_tosend){ - if(msg.role === 'system') { + for (let msg of openai_msgs_tosend) { + if (msg.role === 'system') { firstSysMsgs.push(substituteParams(msg.name ? msg.name + ": " + msg.content : msg.content)); } else { break; @@ -2587,7 +2587,7 @@ function onSettingsPresetChange() { use_ai21_tokenizer: ['#use_ai21_tokenizer', 'use_ai21_tokenizer', false], exclude_assistant: ['#exclude_assistant', 'exclude_assistant', false], use_alt_scale: ['#use_alt_scale', 'use_alt_scale', false], - }; + }; const presetName = $('#settings_perset_openai').find(":selected").text(); oai_settings.preset_settings_openai = presetName; diff --git a/public/scripts/slash-commands.js b/public/scripts/slash-commands.js index aae5fc9e9..73aa98e25 100644 --- a/public/scripts/slash-commands.js +++ b/public/scripts/slash-commands.js @@ -338,8 +338,8 @@ async function sendMessageAs(_, text) { }; chat.push(message); - addOneMessage(message); await eventSource.emit(event_types.MESSAGE_SENT, (chat.length - 1)); + addOneMessage(message); saveChatConditional(); } @@ -369,8 +369,8 @@ async function sendNarratorMessage(_, text) { }; chat.push(message); - addOneMessage(message); await eventSource.emit(event_types.MESSAGE_SENT, (chat.length - 1)); + addOneMessage(message); saveChatConditional(); } @@ -394,8 +394,8 @@ async function sendCommentMessage(_, text) { }; chat.push(message); - addOneMessage(message); await eventSource.emit(event_types.MESSAGE_SENT, (chat.length - 1)); + addOneMessage(message); saveChatConditional(); } diff --git a/server.js b/server.js index f990e049a..91ff3063f 100644 --- a/server.js +++ b/server.js @@ -3301,7 +3301,7 @@ async function sendScaleRequest(request, response) { } app.post("/generate_altscale", jsonParser, function (request, response_generate_scale) { - if(!request.body) return response_generate_scale.sendStatus(400); + if (!request.body) return response_generate_scale.sendStatus(400); fetch('https://dashboard.scale.com/spellbook/api/trpc/v2.variant.run', { method: 'POST', @@ -3357,11 +3357,11 @@ app.post("/generate_altscale", jsonParser, function (request, response_generate_ .then(response => response.json()) .then(data => { console.log(data.result.data.json.outputs[0]) - return response_generate_scale.send({output: data.result.data.json.outputs[0]}); + return response_generate_scale.send({ output: data.result.data.json.outputs[0] }); }) .catch((error) => { console.error('Error:', error) - return response_generate_scale.send({error: true}) + return response_generate_scale.send({ error: true }) }); }); From 7f4a47c1b0746c27ea8ff4cf886cfcff86f349f2 Mon Sep 17 00:00:00 2001 From: Cohee <18619528+Cohee1207@users.noreply.github.com> Date: Tue, 22 Aug 2023 17:52:45 +0300 Subject: [PATCH 20/32] Don't allow setting undefined variables --- public/scripts/extensions/variables/index.js | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/public/scripts/extensions/variables/index.js b/public/scripts/extensions/variables/index.js index 2b473993a..478181b7d 100644 --- a/public/scripts/extensions/variables/index.js +++ b/public/scripts/extensions/variables/index.js @@ -26,6 +26,10 @@ function getChatVariable(name) { * @param {any} value The value of the variable to set. */ function setChatVariable(name, value) { + if (name === undefined || value === undefined) { + return; + } + const metadata = getContext().chatMetadata; if (!metadata) { From ea4d4a8fd6fc3063c38596bcd2f600bf5d71de8d Mon Sep 17 00:00:00 2001 From: Cohee <18619528+Cohee1207@users.noreply.github.com> Date: Tue, 22 Aug 2023 18:13:03 +0300 Subject: [PATCH 21/32] Fix system messages using incorrect timezone --- public/script.js | 2 +- public/scripts/extensions/caption/index.js | 3 +- .../extensions/speech-recognition/index.js | 39 ++++++++++--------- public/scripts/group-chats.js | 4 +- public/scripts/slash-commands.js | 8 ++-- 5 files changed, 29 insertions(+), 27 deletions(-) diff --git a/public/script.js b/public/script.js index ffd25d483..8ef9ec9a5 100644 --- a/public/script.js +++ b/public/script.js @@ -1910,7 +1910,7 @@ function sendSystemMessage(type, text, extra = {}) { return; } - const newMessage = { ...systemMessage, send_date: humanizedDateTime() }; + const newMessage = { ...systemMessage, send_date: getMessageTimeStamp() }; if (text) { newMessage.mes = text; diff --git a/public/scripts/extensions/caption/index.js b/public/scripts/extensions/caption/index.js index 89214a5ef..6e12684eb 100644 --- a/public/scripts/extensions/caption/index.js +++ b/public/scripts/extensions/caption/index.js @@ -1,6 +1,7 @@ import { getBase64Async } from "../../utils.js"; import { getContext, getApiUrl, doExtrasFetch, extension_settings } from "../../extensions.js"; import { callPopup, saveSettingsDebounced } from "../../../script.js"; +import { getMessageTimeStamp } from "../../RossAscends-mods.js"; export { MODULE_NAME }; const MODULE_NAME = 'caption'; @@ -52,7 +53,7 @@ async function sendCaptionedMessage(caption, image) { name: context.name1, is_user: true, is_name: true, - send_date: Date.now(), + send_date: getMessageTimeStamp(), mes: messageText, extra: { image: image, diff --git a/public/scripts/extensions/speech-recognition/index.js b/public/scripts/extensions/speech-recognition/index.js index e5b0ae116..8678b6bcd 100644 --- a/public/scripts/extensions/speech-recognition/index.js +++ b/public/scripts/extensions/speech-recognition/index.js @@ -4,11 +4,12 @@ TODO: */ import { saveSettingsDebounced } from "../../../script.js"; -import { getContext, getApiUrl, modules, extension_settings, ModuleWorkerWrapper, doExtrasFetch } from "../../extensions.js"; +import { getContext, extension_settings, ModuleWorkerWrapper } from "../../extensions.js"; import { VoskSttProvider } from './vosk.js' import { WhisperSttProvider } from './whisper.js' import { BrowserSttProvider } from './browser.js' import { StreamingSttProvider } from './streaming.js' +import { getMessageTimeStamp } from "../../RossAscends-mods.js"; export { MODULE_NAME }; const MODULE_NAME = 'Speech Recognition'; @@ -61,10 +62,10 @@ async function moduleWorker() { let messageStart = -1; if (extension_settings.speech_recognition.Streaming.triggerWordsEnabled) { - + for (const triggerWord of extension_settings.speech_recognition.Streaming.triggerWords) { const triggerPos = userMessageRaw.indexOf(triggerWord.toLowerCase()); - + // Trigger word not found or not starting message and just a substring if (triggerPos == -1){ // | (triggerPos > 0 & userMessageFormatted[triggerPos-1] != " ")) { console.debug(DEBUG_PREFIX+"trigger word not found: ", triggerWord); @@ -152,12 +153,12 @@ async function processTranscript(transcript) { name: context.name1, is_user: true, is_name: true, - send_date: Date.now(), + send_date: getMessageTimeStamp(), mes: messageText, }; context.chat.push(message); context.addOneMessage(message); - + await context.generate(); $('#debug_output').text(": message sent: \""+ transcriptFormatted +"\""); @@ -191,10 +192,10 @@ async function processTranscript(transcript) { function loadNavigatorAudioRecording() { if (navigator.mediaDevices.getUserMedia) { console.debug(DEBUG_PREFIX+' getUserMedia supported by browser.'); - + let onSuccess = function(stream) { const mediaRecorder = new MediaRecorder(stream); - + $("#microphone_button").off('click').on("click", function() { if (!audioRecording) { mediaRecorder.start(); @@ -211,30 +212,30 @@ function loadNavigatorAudioRecording() { $("#microphone_button").toggleClass('fa-microphone fa-microphone-slash'); } }); - + mediaRecorder.onstop = async function() { console.debug(DEBUG_PREFIX+"data available after MediaRecorder.stop() called: ", audioChunks.length, " chunks"); const audioBlob = new Blob(audioChunks, { type: "audio/wav; codecs=0" }); audioChunks = []; - + const transcript = await sttProvider.processAudio(audioBlob); - + // TODO: lock and release recording while processing? console.debug(DEBUG_PREFIX+"received transcript:", transcript); processTranscript(transcript); } - + mediaRecorder.ondataavailable = function(e) { audioChunks.push(e.data); } } - + let onError = function(err) { console.debug(DEBUG_PREFIX+"The following error occured: " + err); } - + navigator.mediaDevices.getUserMedia(constraints).then(onSuccess, onError); - + } else { console.debug(DEBUG_PREFIX+"getUserMedia not supported on your browser!"); toastr.error("getUserMedia not supported", DEBUG_PREFIX+"not supported for your browser.", { timeOut: 10000, extendedTimeOut: 20000, preventDuplicates: true }); @@ -257,7 +258,7 @@ function loadSttProvider(provider) { console.warn(`Provider ${sttProviderName} not in Extension Settings, initiatilizing provider in settings`); extension_settings.speech_recognition[sttProviderName] = {}; } - + $('#speech_recognition_provider').val(sttProviderName); if (sttProviderName == "None") { @@ -287,13 +288,13 @@ function loadSttProvider(provider) { loadNavigatorAudioRecording(); $("#microphone_button").show(); } - + if (sttProviderName == "Streaming") { sttProvider.loadSettings(extension_settings.speech_recognition[sttProviderName]); $("#microphone_button").off('click'); $("#microphone_button").hide(); } - + } function onSttProviderChange() { @@ -365,7 +366,7 @@ async function onMessageMappingChange() { console.debug(DEBUG_PREFIX+"Wrong syntax for message mapping, no '=' found in:", text); } } - + $("#speech_recognition_message_mapping_status").text("Message mapping updated to: "+JSON.stringify(extension_settings.speech_recognition.messageMapping)) console.debug(DEBUG_PREFIX+"Updated message mapping", extension_settings.speech_recognition.messageMapping); extension_settings.speech_recognition.messageMappingText = $('#speech_recognition_message_mapping').val() @@ -425,7 +426,7 @@ $(document).ready(function () { $('#speech_recognition_message_mode').on('change', onMessageModeChange); $('#speech_recognition_message_mapping').on('change', onMessageMappingChange); $('#speech_recognition_message_mapping_enabled').on('click', onMessageMappingEnabledClick); - + const $button = $('
    '); $('#send_but_sheld').prepend($button); diff --git a/public/scripts/group-chats.js b/public/scripts/group-chats.js index ab6a1e0fd..589809df1 100644 --- a/public/scripts/group-chats.js +++ b/public/scripts/group-chats.js @@ -9,7 +9,7 @@ import { saveBase64AsFile, PAGINATION_TEMPLATE, } from './utils.js'; -import { RA_CountCharTokens, humanizedDateTime, dragElement, favsToHotswap } from "./RossAscends-mods.js"; +import { RA_CountCharTokens, humanizedDateTime, dragElement, favsToHotswap, getMessageTimeStamp } from "./RossAscends-mods.js"; import { loadMovingUIState, sortEntitiesList } from './power-user.js'; import { @@ -202,7 +202,7 @@ function getFirstCharacterMessage(character) { mes["is_system"] = false; mes["name"] = character.name; mes["is_name"] = true; - mes["send_date"] = humanizedDateTime(); + mes["send_date"] = getMessageTimeStamp(); mes["original_avatar"] = character.avatar; mes["extra"] = { "gen_id": Date.now() * Math.random() * 1000000 }; mes["mes"] = messageText diff --git a/public/scripts/slash-commands.js b/public/scripts/slash-commands.js index 73aa98e25..eb2c2e15d 100644 --- a/public/scripts/slash-commands.js +++ b/public/scripts/slash-commands.js @@ -22,7 +22,7 @@ import { reloadCurrentChat, sendMessageAsUser, } from "../script.js"; -import { humanizedDateTime } from "./RossAscends-mods.js"; +import { getMessageTimeStamp } from "./RossAscends-mods.js"; import { resetSelectedGroup } from "./group-chats.js"; import { getRegexedString, regex_placement } from "./extensions/regex/engine.js"; import { chat_styles, power_user } from "./power-user.js"; @@ -327,7 +327,7 @@ async function sendMessageAs(_, text) { is_user: false, is_name: true, is_system: isSystem, - send_date: humanizedDateTime(), + send_date: getMessageTimeStamp(), mes: substituteParams(mesText), force_avatar: force_avatar, original_avatar: original_avatar, @@ -358,7 +358,7 @@ async function sendNarratorMessage(_, text) { is_user: false, is_name: false, is_system: isSystem, - send_date: humanizedDateTime(), + send_date: getMessageTimeStamp(), mes: substituteParams(text.trim()), force_avatar: system_avatar, extra: { @@ -384,7 +384,7 @@ async function sendCommentMessage(_, text) { is_user: false, is_name: true, is_system: true, - send_date: humanizedDateTime(), + send_date: getMessageTimeStamp(), mes: substituteParams(text.trim()), force_avatar: comment_avatar, extra: { From 042c0b84a14b4b321f07fc8b4a5a9266407c290b Mon Sep 17 00:00:00 2001 From: Cohee <18619528+Cohee1207@users.noreply.github.com> Date: Tue, 22 Aug 2023 18:32:18 +0300 Subject: [PATCH 22/32] Add a token counter for persona descriptions --- public/index.html | 5 ++++- public/script.js | 17 +++++++++++++++++ public/scripts/RossAscends-mods.js | 2 +- .../extensions/stable-diffusion/index.js | 5 ++--- 4 files changed, 24 insertions(+), 5 deletions(-) diff --git a/public/index.html b/public/index.html index 14327cb34..f2688ac2d 100644 --- a/public/index.html +++ b/public/index.html @@ -2981,7 +2981,10 @@

    Persona Description

    - + +
    + Tokens: 0 +
    From 0e378d93b2b9d9f4a1e58e15c4940e70a46f7b83 Mon Sep 17 00:00:00 2001 From: maver Date: Tue, 22 Aug 2023 20:37:37 +0200 Subject: [PATCH 26/32] Make quick edit fields static --- public/index.html | 19 +++++- public/scripts/PromptManager.js | 103 +++++++++++++++++++------------- 2 files changed, 77 insertions(+), 45 deletions(-) diff --git a/public/index.html b/public/index.html index f2688ac2d..991a3f513 100644 --- a/public/index.html +++ b/public/index.html @@ -1436,9 +1436,22 @@
    -
    -
    - Select a character to show quick edit options. +
    +
    Main
    +
    + +
    +
    +
    +
    NSFW
    +
    + +
    +
    +
    +
    Jailbreak
    +
    +
    diff --git a/public/scripts/PromptManager.js b/public/scripts/PromptManager.js index 9b2e6d99a..af36abb1a 100644 --- a/public/scripts/PromptManager.js +++ b/public/scripts/PromptManager.js @@ -513,14 +513,69 @@ PromptManagerModule.prototype.init = function (moduleConfiguration, serviceSetti }); } + // Fill quick edit fields for the first time + if ('global' === this.configuration.promptOrder.strategy) { + const handleQuickEditSave = (event) => { + const promptId = event.target.dataset.pmPrompt; + const prompt = this.getPromptById(promptId); + + prompt.content = event.target.value; + + // Update edit form if present + // @see https://developer.mozilla.org/en-US/docs/Web/API/HTMLElement/offsetParent + const popupEditFormPrompt = document.getElementById(this.configuration.prefix + 'prompt_manager_popup_entry_form_prompt'); + if (popupEditFormPrompt.offsetParent) { + popupEditFormPrompt.value = prompt.content; + } + + this.log('Saved prompt: ' + promptId); + this.saveServiceSettings().then(() => this.render()); + }; + + const mainPrompt = this.getPromptById('main'); + const mainElementId = this.updateQuickEdit('main', mainPrompt); + document.getElementById(mainElementId).addEventListener('blur', handleQuickEditSave); + + const nsfwPrompt = this.getPromptById('nsfw'); + const nsfwElementId = this.updateQuickEdit('nsfw', nsfwPrompt); + document.getElementById(nsfwElementId).addEventListener('blur', handleQuickEditSave); + + const jailbreakPrompt = this.getPromptById('jailbreak'); + const jailbreakElementId = this.updateQuickEdit('jailbreak', jailbreakPrompt); + document.getElementById(jailbreakElementId).addEventListener('blur', handleQuickEditSave); + } + // Re-render when chat history changes. eventSource.on(event_types.MESSAGE_DELETED, () => this.renderDebounced()); eventSource.on(event_types.MESSAGE_EDITED, () => this.renderDebounced()); eventSource.on(event_types.MESSAGE_RECEIVED, () => this.renderDebounced()); // Re-render when chatcompletion settings change - eventSource.on(event_types.CHATCOMPLETION_SOURCE_CHANGED, () => this.renderDebounced()); - eventSource.on(event_types.CHATCOMPLETION_MODEL_CHANGED, () => this.renderDebounced()); + eventSource.on(event_types.CHATCOMPLETION_SOURCE_CHANGED, () => { + this.renderDebounced(); + + const mainPrompt = this.getPromptById('main'); + this.updateQuickEdit('main', mainPrompt); + + const nsfwPrompt = this.getPromptById('nsfw'); + this.updateQuickEdit('nsfw', nsfwPrompt); + + const jailbreakPrompt = this.getPromptById('jailbreak'); + this.updateQuickEdit('jailbreak', jailbreakPrompt); + }); + + eventSource.on(event_types.CHATCOMPLETION_MODEL_CHANGED, () => { + this.renderDebounced(); + + const mainPrompt = this.getPromptById('main'); + this.updateQuickEdit('main', mainPrompt); + + const nsfwPrompt = this.getPromptById('nsfw'); + this.updateQuickEdit('nsfw', nsfwPrompt); + + const jailbreakPrompt = this.getPromptById('jailbreak'); + this.updateQuickEdit('jailbreak', jailbreakPrompt); + }); // Re-render when the character changes. eventSource.on('chatLoaded', (event) => { @@ -1016,8 +1071,11 @@ PromptManagerModule.prototype.createQuickEdit = function (identifier, title) { } PromptManagerModule.prototype.updateQuickEdit = function (identifier, prompt) { - const textarea = document.getElementById(`${identifier}_prompt_quick_edit_textarea`); + const elementId = `${identifier}_prompt_quick_edit_textarea`; + const textarea = document.getElementById(elementId); textarea.value = prompt.content; + + return elementId; } /** @@ -1312,48 +1370,9 @@ PromptManagerModule.prototype.renderPromptManager = function () { footerDiv.querySelector('#prompt-manager-export').addEventListener('click', showExportSelection); rangeBlockDiv.querySelector('.export-promptmanager-prompts-full').addEventListener('click', this.handleFullExport); rangeBlockDiv.querySelector('.export-promptmanager-prompts-character')?.addEventListener('click', this.handleCharacterExport); - - const quickEditContainer = document.getElementById('quick-edit-container'); - const heights = this.saveTextAreaHeights(quickEditContainer); - quickEditContainer.innerHTML = ''; - - this.createQuickEdit('jailbreak', 'Jailbreak'); - this.createQuickEdit('nsfw', 'NSFW'); - this.createQuickEdit('main', 'Main'); - - this.restoreTextAreaHeights(quickEditContainer, heights); } }; -/** - * Restores the height of each textarea in the container - * @param container The container to search for textareas - * @param heights An object with textarea ids as keys and heights as values - */ -PromptManagerModule.prototype.restoreTextAreaHeights = function(container, heights) { - if (Object.keys(heights).length === 0) return; - - $(container).find('textarea').each(function () { - const height = heights[this.id]; - if (height > 0) $(this).height(height); - }); -} - -/** - * Saves the current height of each textarea in the container - * @param container The container to search for textareas - * @returns {{}} An object with textarea ids as keys and heights as values - */ -PromptManagerModule.prototype.saveTextAreaHeights = function(container) { - const heights = {}; - - $(container).find('textarea').each(function () { - heights[this.id] = $(this).height(); - }); - - return heights; -} - /** * Empties, then re-assembles the prompt list */ From 48992d4f53a618794b260b9a91320eb8c861c1b2 Mon Sep 17 00:00:00 2001 From: maver Date: Tue, 22 Aug 2023 20:39:46 +0200 Subject: [PATCH 27/32] Prevent context profiler from dangling on error --- public/scripts/PromptManager.js | 1 + 1 file changed, 1 insertion(+) diff --git a/public/scripts/PromptManager.js b/public/scripts/PromptManager.js index af36abb1a..fd90dcc4e 100644 --- a/public/scripts/PromptManager.js +++ b/public/scripts/PromptManager.js @@ -664,6 +664,7 @@ PromptManagerModule.prototype.render = function (afterTryGenerate = true) { this.makeDraggable(); this.profileEnd('render'); }).catch(error => { + this.profileEnd('filling context'); this.log('Error caught during render: ' + error); this.renderPromptManager(); this.renderPromptManagerListItems() From 1ea7ad2572fe713e1998c624199831e9247dcaa1 Mon Sep 17 00:00:00 2001 From: maver Date: Tue, 22 Aug 2023 20:49:54 +0200 Subject: [PATCH 28/32] Update quick edit when oai preset changes --- public/scripts/PromptManager.js | 35 +++++++++++---------------------- 1 file changed, 11 insertions(+), 24 deletions(-) diff --git a/public/scripts/PromptManager.js b/public/scripts/PromptManager.js index fd90dcc4e..59909497f 100644 --- a/public/scripts/PromptManager.js +++ b/public/scripts/PromptManager.js @@ -551,31 +551,9 @@ PromptManagerModule.prototype.init = function (moduleConfiguration, serviceSetti eventSource.on(event_types.MESSAGE_RECEIVED, () => this.renderDebounced()); // Re-render when chatcompletion settings change - eventSource.on(event_types.CHATCOMPLETION_SOURCE_CHANGED, () => { - this.renderDebounced(); + eventSource.on(event_types.CHATCOMPLETION_SOURCE_CHANGED, () => this.renderDebounced()); - const mainPrompt = this.getPromptById('main'); - this.updateQuickEdit('main', mainPrompt); - - const nsfwPrompt = this.getPromptById('nsfw'); - this.updateQuickEdit('nsfw', nsfwPrompt); - - const jailbreakPrompt = this.getPromptById('jailbreak'); - this.updateQuickEdit('jailbreak', jailbreakPrompt); - }); - - eventSource.on(event_types.CHATCOMPLETION_MODEL_CHANGED, () => { - this.renderDebounced(); - - const mainPrompt = this.getPromptById('main'); - this.updateQuickEdit('main', mainPrompt); - - const nsfwPrompt = this.getPromptById('nsfw'); - this.updateQuickEdit('nsfw', nsfwPrompt); - - const jailbreakPrompt = this.getPromptById('jailbreak'); - this.updateQuickEdit('jailbreak', jailbreakPrompt); - }); + eventSource.on(event_types.CHATCOMPLETION_MODEL_CHANGED, () => this.renderDebounced()); // Re-render when the character changes. eventSource.on('chatLoaded', (event) => { @@ -632,6 +610,15 @@ PromptManagerModule.prototype.init = function (moduleConfiguration, serviceSetti this.hidePopup(); this.clearEditForm(); this.renderDebounced(); + + const mainPrompt = this.getPromptById('main'); + this.updateQuickEdit('main', mainPrompt); + + const nsfwPrompt = this.getPromptById('nsfw'); + this.updateQuickEdit('nsfw', nsfwPrompt); + + const jailbreakPrompt = this.getPromptById('jailbreak'); + this.updateQuickEdit('jailbreak', jailbreakPrompt); }); }); From fd95b79ae91647c84c40553f4ba0611eefa0fe0e Mon Sep 17 00:00:00 2001 From: Cohee <18619528+Cohee1207@users.noreply.github.com> Date: Tue, 22 Aug 2023 22:45:12 +0300 Subject: [PATCH 29/32] Fix auto-translate plugin. Add new event types for post-rendering --- public/script.js | 44 +++++++++++++------- public/scripts/extensions.js | 26 ++++++------ public/scripts/extensions/translate/index.js | 4 +- public/scripts/slash-commands.js | 3 ++ public/scripts/utils.js | 41 +++++++++++++++--- public/scripts/world-info.js | 3 +- 6 files changed, 84 insertions(+), 37 deletions(-) diff --git a/public/script.js b/public/script.js index 4f67580f1..7ffcef682 100644 --- a/public/script.js +++ b/public/script.js @@ -136,7 +136,7 @@ import { PAGINATION_TEMPLATE, } from "./scripts/utils.js"; -import { extension_settings, getContext, loadExtensionSettings, registerExtensionHelper, runGenerationInterceptors, saveMetadataDebounced } from "./scripts/extensions.js"; +import { extension_settings, getContext, loadExtensionSettings, processExtensionHelpers, registerExtensionHelper, runGenerationInterceptors, saveMetadataDebounced } from "./scripts/extensions.js"; import { executeSlashCommands, getSlashCommandsHelp, registerSlashCommand } from "./scripts/slash-commands.js"; import { tag_map, @@ -278,10 +278,20 @@ export const event_types = { OAI_PRESET_CHANGED: 'oai_preset_changed', WORLDINFO_SETTINGS_UPDATED: 'worldinfo_settings_updated', CHARACTER_EDITED: 'character_edited', + USER_MESSAGE_RENDERED: 'user_message_rendered', + CHARACTER_MESSAGE_RENDERED: 'character_message_rendered', } export const eventSource = new EventEmitter(); +// Check for override warnings every 5 seconds... +setInterval(displayOverrideWarnings, 5000); +// ...or when the chat changes +eventSource.on(event_types.CHAT_CHANGED, displayOverrideWarnings); +eventSource.on(event_types.CHAT_CHANGED, setChatLockedPersona); +eventSource.on(event_types.MESSAGE_RECEIVED, processExtensionHelpers); +eventSource.on(event_types.MESSAGE_SENT, processExtensionHelpers); + const gpt3 = new GPT3BrowserTokenizer({ type: 'gpt3' }); hljs.addPlugin({ "before:highlightElement": ({ el }) => { el.textContent = el.innerText } }); @@ -2179,14 +2189,17 @@ class StreamingProcessor { async onFinishStreaming(messageId, text) { this.hideMessageButtons(this.messageId); - - const eventType = this.type !== 'impersonate' ? event_types.MESSAGE_RECEIVED : event_types.IMPERSONATE_READY; - const eventData = this.type !== 'impersonate' ? this.messageId : text; - await eventSource.emit(eventType, eventData); - this.onProgressStreaming(messageId, text, true); addCopyToCodeBlocks($(`#chat .mes[mesid="${messageId}"]`)); - saveChatConditional(); + + if (this.type !== 'impersonate') { + await eventSource.emit(event_types.MESSAGE_RECEIVED, this.messageId); + await eventSource.emit(event_types.CHARACTER_MESSAGE_RENDERED, this.messageId); + } else { + await eventSource.emit(event_types.IMPERSONATE_READY, text); + } + + await saveChatConditional(); activateSendButtons(); showSwipeButtons(); setGenerationProgress(0); @@ -3360,6 +3373,7 @@ export async function sendMessageAsUser(textareaText, messageBias) { // Wait for all handlers to finish before continuing with the prompt await eventSource.emit(event_types.MESSAGE_SENT, (chat.length - 1)); addOneMessage(chat[chat.length - 1]); + await eventSource.emit(event_types.USER_MESSAGE_RENDERED, (chat.length - 1)); console.debug('message sent as user'); } @@ -3913,6 +3927,7 @@ async function saveReply(type, getMessage, this_mes_is_name, title) { chat[chat.length - 1]['extra']['model'] = getGeneratingModel(); await eventSource.emit(event_types.MESSAGE_RECEIVED, (chat.length - 1)); addOneMessage(chat[chat.length - 1], { type: 'swipe' }); + await eventSource.emit(event_types.CHARACTER_MESSAGE_RENDERED, (chat.length - 1)); } else { chat[chat.length - 1]['mes'] = getMessage; } @@ -3928,6 +3943,7 @@ async function saveReply(type, getMessage, this_mes_is_name, title) { chat[chat.length - 1]["extra"]["model"] = getGeneratingModel(); await eventSource.emit(event_types.MESSAGE_RECEIVED, (chat.length - 1)); addOneMessage(chat[chat.length - 1], { type: 'swipe' }); + await eventSource.emit(event_types.CHARACTER_MESSAGE_RENDERED, (chat.length - 1)); } else if (type === 'appendFinal') { oldMessage = chat[chat.length - 1]['mes']; console.debug("Trying to appendFinal.") @@ -3940,6 +3956,7 @@ async function saveReply(type, getMessage, this_mes_is_name, title) { chat[chat.length - 1]["extra"]["model"] = getGeneratingModel(); await eventSource.emit(event_types.MESSAGE_RECEIVED, (chat.length - 1)); addOneMessage(chat[chat.length - 1], { type: 'swipe' }); + await eventSource.emit(event_types.CHARACTER_MESSAGE_RENDERED, (chat.length - 1)); } else { console.debug('entering chat update routine for non-swipe post'); @@ -3974,6 +3991,7 @@ async function saveReply(type, getMessage, this_mes_is_name, title) { saveImageToMessage(img, chat[chat.length - 1]); await eventSource.emit(event_types.MESSAGE_RECEIVED, (chat.length - 1)); addOneMessage(chat[chat.length - 1]); + await eventSource.emit(event_types.CHARACTER_MESSAGE_RENDERED, (chat.length - 1)); } const item = chat[chat.length - 1]; @@ -4428,6 +4446,7 @@ async function getChatResult() { if (chat.length === 1) { await eventSource.emit(event_types.MESSAGE_RECEIVED, (chat.length - 1)); + await eventSource.emit(event_types.CHARACTER_MESSAGE_RENDERED, (chat.length - 1)); } } @@ -5009,7 +5028,7 @@ function lockUserNameToChat() { updateUserLockIcon(); } -eventSource.on(event_types.CHAT_CHANGED, () => { +function setChatLockedPersona() { // Define a persona for this chat let chatPersona = ''; @@ -5051,7 +5070,7 @@ eventSource.on(event_types.CHAT_CHANGED, () => { // Persona avatar found, select it personaAvatar.trigger('click'); updateUserLockIcon(); -}); +} async function doOnboarding(avatarId) { const template = $('#onboarding_template .onboarding'); @@ -6593,6 +6612,7 @@ async function createOrEditCharacter(e) { //console.log('form create submission calling addOneMessage'); await eventSource.emit(event_types.MESSAGE_RECEIVED, (chat.length - 1)); addOneMessage(chat[0]); + await eventSource.emit(event_types.CHARACTER_MESSAGE_RENDERED, (chat.length - 1)); } } $("#create_button").removeAttr("disabled"); @@ -7020,12 +7040,6 @@ function connectAPISlash(_, text) { toastr.info(`API set to ${text}, trying to connect..`); } - -// Check for override warnings every 5 seconds... -setInterval(displayOverrideWarnings, 5000); -// ...or when the chat changes -eventSource.on(event_types.CHAT_CHANGED, displayOverrideWarnings); - function importCharacter(file) { const ext = file.name.match(/\.(\w+)$/); if ( diff --git a/public/scripts/extensions.js b/public/scripts/extensions.js index 70be2dc44..07d16cbc2 100644 --- a/public/scripts/extensions.js +++ b/public/scripts/extensions.js @@ -1,4 +1,4 @@ -import { callPopup, eventSource, event_types, saveSettings, saveSettingsDebounced, getRequestHeaders } from "../script.js"; +import { callPopup, eventSource, event_types, saveSettings, saveSettingsDebounced, getRequestHeaders, substituteParams } from "../script.js"; import { isSubsetOf, debounce, waitUntilCondition } from "./utils.js"; export { getContext, @@ -12,7 +12,7 @@ export { }; let extensionNames = []; -let manifests = []; +let manifests = {}; const defaultUrl = "http://localhost:5100"; export const saveMetadataDebounced = debounce(async () => await getContext().saveMetadata(), 1000); @@ -31,7 +31,7 @@ export function registerExtensionHelper(name, helper) { * Applies handlebars extension helpers to a message. * @param {number} messageId Message index in the chat. */ -function processExtensionHelpers(messageId) { +export function processExtensionHelpers(messageId) { const context = getContext(); const message = context.chat[messageId]; @@ -40,12 +40,12 @@ function processExtensionHelpers(messageId) { } // Don't waste time if there are no mustaches - if (!message.mes.includes('{{')) { + if (!substituteParams(message.mes).includes('{{')) { return; } try { - const template = extensionsHandlebars.compile(message.mes, { noEscape: true }); + const template = extensionsHandlebars.compile(substituteParams(message.mes), { noEscape: true }); message.mes = template({}); } catch { // Ignore @@ -211,7 +211,10 @@ async function getManifests(names) { } else { reject(); } - }).catch(err => reject() && console.log('Could not load manifest.json for ' + name, err)); + }).catch(err => { + reject(); + console.log('Could not load manifest.json for ' + name, err); + }); }); promises.push(promise); @@ -268,9 +271,9 @@ async function activateExtensions() { async function connectClickHandler() { const baseUrl = $("#extensions_url").val(); - extension_settings.apiUrl = baseUrl; + extension_settings.apiUrl = String(baseUrl); const testApiKey = $("#extensions_api_key").val(); - extension_settings.apiKey = testApiKey; + extension_settings.apiKey = String(testApiKey); saveSettingsDebounced(); await connectToApi(baseUrl); } @@ -495,7 +498,7 @@ async function generateExtensionHtml(name, manifest, isActive, isDisabled, isExt * Gets extension data and generates the corresponding HTML for displaying the extension. * * @param {Array} extension - An array where the first element is the extension name and the second element is the extension manifest. - * @return {object} - An object with 'isExternal' indicating whether the extension is external, and 'extensionHtml' for the extension's HTML string. + * @return {Promise} - An object with 'isExternal' indicating whether the extension is external, and 'extensionHtml' for the extension's HTML string. */ async function getExtensionData(extension) { const name = extension[0]; @@ -612,7 +615,7 @@ async function onDeleteClick() { * Fetches the version details of a specific extension. * * @param {string} extensionName - The name of the extension. - * @return {object} - An object containing the extension's version details. + * @return {Promise} - An object containing the extension's version details. * This object includes the currentBranchName, currentCommitHash, isUpToDate, and remoteUrl. * @throws {error} - If there is an error during the fetch operation, it logs the error to the console. */ @@ -669,9 +672,6 @@ jQuery(function () { setTimeout(async function () { addExtensionsButtonAndMenu(); $("#extensionsMenuButton").css("display", "flex"); - await waitUntilCondition(() => eventSource !== undefined, 1000, 100); - eventSource.on(event_types.MESSAGE_RECEIVED, processExtensionHelpers); - eventSource.on(event_types.MESSAGE_SENT, processExtensionHelpers); }, 100) $("#extensions_connect").on('click', connectClickHandler); diff --git a/public/scripts/extensions/translate/index.js b/public/scripts/extensions/translate/index.js index bb8b1ddb7..ac477ffe3 100644 --- a/public/scripts/extensions/translate/index.js +++ b/public/scripts/extensions/translate/index.js @@ -421,9 +421,9 @@ jQuery(() => { loadSettings(); - eventSource.on(event_types.MESSAGE_RECEIVED, handleIncomingMessage); + eventSource.on(event_types.CHARACTER_MESSAGE_RENDERED, handleIncomingMessage); eventSource.on(event_types.MESSAGE_SWIPED, handleIncomingMessage); - eventSource.on(event_types.MESSAGE_SENT, handleOutgoingMessage); + eventSource.on(event_types.USER_MESSAGE_RENDERED, handleOutgoingMessage); eventSource.on(event_types.IMPERSONATE_READY, handleImpersonateReady); eventSource.on(event_types.MESSAGE_EDITED, handleMessageEdit); diff --git a/public/scripts/slash-commands.js b/public/scripts/slash-commands.js index eb2c2e15d..294a61fb9 100644 --- a/public/scripts/slash-commands.js +++ b/public/scripts/slash-commands.js @@ -340,6 +340,7 @@ async function sendMessageAs(_, text) { chat.push(message); await eventSource.emit(event_types.MESSAGE_SENT, (chat.length - 1)); addOneMessage(message); + await eventSource.emit(event_types.USER_MESSAGE_RENDERED, (chat.length - 1)); saveChatConditional(); } @@ -371,6 +372,7 @@ async function sendNarratorMessage(_, text) { chat.push(message); await eventSource.emit(event_types.MESSAGE_SENT, (chat.length - 1)); addOneMessage(message); + await eventSource.emit(event_types.USER_MESSAGE_RENDERED, (chat.length - 1)); saveChatConditional(); } @@ -396,6 +398,7 @@ async function sendCommentMessage(_, text) { chat.push(message); await eventSource.emit(event_types.MESSAGE_SENT, (chat.length - 1)); addOneMessage(message); + await eventSource.emit(event_types.USER_MESSAGE_RENDERED, (chat.length - 1)); saveChatConditional(); } diff --git a/public/scripts/utils.js b/public/scripts/utils.js index 874552998..48140cbe6 100644 --- a/public/scripts/utils.js +++ b/public/scripts/utils.js @@ -1,8 +1,25 @@ import { getContext } from "./extensions.js"; import { getRequestHeaders } from "../script.js"; +/** + * Pagination status string template. + * @type {string} + */ export const PAGINATION_TEMPLATE = '<%= rangeStart %>-<%= rangeEnd %> of <%= totalNumber %>'; +/** + * Navigation options for pagination. + * @enum {number} + */ +export const navigation_option = { none: 0, previous: 1, last: 2, }; + +/** + * Determines if a value is unique in an array. + * @param {any} value Current value. + * @param {number} index Current index. + * @param {any} array The array being processed. + * @returns {boolean} True if the value is unique, false otherwise. + */ export function onlyUnique(value, index, array) { return array.indexOf(value) === index; } @@ -19,7 +36,10 @@ export function isDigitsOnly(str) { return /^\d+$/.test(str); } -// Increase delay on touch screens +/** + * Gets a drag delay for sortable elements. This is to prevent accidental drags when scrolling. + * @returns {number} The delay in milliseconds. 100ms for desktop, 750ms for mobile. + */ export function getSortableDelay() { return navigator.maxTouchPoints > 0 ? 750 : 100; } @@ -60,12 +80,23 @@ export function download(content, fileName, contentType) { a.click(); } +/** + * Fetches a file by URL and parses its contents as data URI. + * @param {string} url The URL to fetch. + * @param {any} params Fetch parameters. + * @returns {Promise} A promise that resolves to the data URI. + */ export async function urlContentToDataUri(url, params) { const response = await fetch(url, params); const blob = await response.blob(); - return await new Promise(callback => { - let reader = new FileReader(); - reader.onload = function () { callback(this.result); }; + return await new Promise((resolve, reject) => { + const reader = new FileReader(); + reader.onload = function () { + resolve(String(reader.result)); + }; + reader.onerror = function (error) { + reject(error); + }; reader.readAsDataURL(blob); }); } @@ -195,7 +226,7 @@ export function throttle(func, limit = 300) { /** * Checks if an element is in the viewport. - * @param {any[]} el The element to check. + * @param {Element} el The element to check. * @returns {boolean} True if the element is in the viewport, false otherwise. */ export function isElementInViewport(el) { diff --git a/public/scripts/world-info.js b/public/scripts/world-info.js index 9c4118687..630dc735b 100644 --- a/public/scripts/world-info.js +++ b/public/scripts/world-info.js @@ -1,5 +1,5 @@ import { saveSettings, callPopup, substituteParams, getTokenCount, getRequestHeaders, chat_metadata, this_chid, characters, saveCharacterDebounced, menu_type, eventSource, event_types } from "../script.js"; -import { download, debounce, initScrollHeight, resetScrollHeight, parseJsonFile, extractDataFromPng, getFileBuffer, getCharaFilename, deepClone, getSortableDelay, escapeRegex, PAGINATION_TEMPLATE } from "./utils.js"; +import { download, debounce, initScrollHeight, resetScrollHeight, parseJsonFile, extractDataFromPng, getFileBuffer, getCharaFilename, deepClone, getSortableDelay, escapeRegex, PAGINATION_TEMPLATE, navigation_option } from "./utils.js"; import { getContext } from "./extensions.js"; import { NOTE_MODULE_NAME, metadata_keys, shouldWIAddPrompt } from "./authors-note.js"; import { registerSlashCommand } from "./slash-commands.js"; @@ -46,7 +46,6 @@ const saveSettingsDebounced = debounce(() => { saveSettings() }, 1000); const sortFn = (a, b) => b.order - a.order; -const navigation_option = { none: 0, previous: 1, last: 2, }; let updateEditor = (navigation) => { navigation; }; // Do not optimize. updateEditor is a function that is updated by the displayWorldEntries with new data. From 07a9e493dc272c97db66e134a0d187f786bc0a34 Mon Sep 17 00:00:00 2001 From: Cohee <18619528+Cohee1207@users.noreply.github.com> Date: Tue, 22 Aug 2023 23:03:55 +0300 Subject: [PATCH 30/32] Fix preset saving stacking for instruct --- public/scripts/preset-manager.js | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/public/scripts/preset-manager.js b/public/scripts/preset-manager.js index 4ebb55418..810ee070f 100644 --- a/public/scripts/preset-manager.js +++ b/public/scripts/preset-manager.js @@ -118,7 +118,7 @@ class PresetManager { async savePresetAs() { const popupText = `

    Preset name:

    -

    Hint: Use a character/group name to bind preset to a specific chat.

    `; + ${!this.isNonGenericApi() ? '

    Hint: Use a character/group name to bind preset to a specific chat.

    ' : ''}`; const name = await callPopup(popupText, "input"); if (!name) { @@ -131,7 +131,8 @@ class PresetManager { } async savePreset(name, settings) { - const preset = settings ?? this.getPresetSettings(); + const preset = settings ?? this.getPresetSettings(name); + const res = await fetch(`/save_preset`, { method: "POST", headers: getRequestHeaders(), @@ -220,7 +221,7 @@ class PresetManager { } } - getPresetSettings() { + getPresetSettings(name) { function getSettingsByApiId(apiId) { switch (apiId) { case "koboldhorde": @@ -232,7 +233,7 @@ class PresetManager { return textgenerationwebui_settings; case "instruct": const preset = deepClone(power_user.instruct); - preset['name'] = power_user.instruct.preset; + preset['name'] = name || power_user.instruct.preset; return preset; default: console.warn(`Unknown API ID ${apiId}`); @@ -346,7 +347,7 @@ jQuery(async () => { const selected = $(presetManager.select).find("option:selected"); const name = selected.text(); - const preset = presetManager.getPresetSettings(); + const preset = presetManager.getPresetSettings(name); const data = JSON.stringify(preset, null, 4); download(data, `${name}.json`, "application/json"); }); From 7250770c5dc97f619bf2520d67db0e46684dce33 Mon Sep 17 00:00:00 2001 From: Cohee <18619528+Cohee1207@users.noreply.github.com> Date: Tue, 22 Aug 2023 23:20:53 +0300 Subject: [PATCH 31/32] Don't reduce Claude token counts by 2 --- public/scripts/openai.js | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/public/scripts/openai.js b/public/scripts/openai.js index 621e094db..c01b1cf26 100644 --- a/public/scripts/openai.js +++ b/public/scripts/openai.js @@ -1437,6 +1437,11 @@ function countTokens(messages, full = false) { for (const message of messages) { const model = getTokenizerModel(); + + if (model === 'claude' || shouldTokenizeAI21) { + full = true; + } + const hash = getStringHash(JSON.stringify(message)); const cacheKey = `${model}-${hash}`; const cachedCount = tokenCache[chatId][cacheKey]; From bc5fc6790690697f99378ad5905fd76553455ea0 Mon Sep 17 00:00:00 2001 From: Cohee <18619528+Cohee1207@users.noreply.github.com> Date: Wed, 23 Aug 2023 02:38:43 +0300 Subject: [PATCH 32/32] Put tokenizer functions to a separate file. Cache local models token counts --- public/jsconfig.json | 3 +- public/script.js | 129 +------ public/scripts/RossAscends-mods.js | 4 +- public/scripts/authors-note.js | 2 +- .../extensions/infinity-context/index.js | 3 +- .../scripts/extensions/token-counter/index.js | 2 +- public/scripts/nai-settings.js | 4 +- public/scripts/openai.js | 163 +-------- public/scripts/power-user.js | 13 +- public/scripts/tokenizers.js | 342 ++++++++++++++++++ public/scripts/world-info.js | 3 +- 11 files changed, 360 insertions(+), 308 deletions(-) create mode 100644 public/scripts/tokenizers.js diff --git a/public/jsconfig.json b/public/jsconfig.json index 1fe7ce877..0751d9915 100644 --- a/public/jsconfig.json +++ b/public/jsconfig.json @@ -21,7 +21,8 @@ "showdown-katex", "droll", "handlebars", - "highlight.js" + "highlight.js", + "localforage" ] } } diff --git a/public/script.js b/public/script.js index 7ffcef682..3389cc096 100644 --- a/public/script.js +++ b/public/script.js @@ -1,7 +1,5 @@ import { humanizedDateTime, favsToHotswap, getMessageTimeStamp, dragElement, isMobile, } from "./scripts/RossAscends-mods.js"; import { userStatsHandler, statMesProcess } from './scripts/stats.js'; -import { encode } from "../lib/gpt-2-3-tokenizer/mod.js"; -import { GPT3BrowserTokenizer } from "../lib/gpt-3-tokenizer/gpt3-tokenizer.js"; import { generateKoboldWithStreaming, kai_settings, @@ -65,7 +63,6 @@ import { fixMarkdown, power_user, pygmalion_options, - tokenizers, persona_description_positions, loadMovingUIState, getCustomStoppingStrings, @@ -86,9 +83,7 @@ import { oai_settings, is_get_status_openai, openai_messages_count, - getTokenCountOpenAI, chat_completion_sources, - getTokenizerModel, getChatCompletionModel, } from "./scripts/openai.js"; @@ -172,6 +167,7 @@ import { autoSelectInstructPreset, } from "./scripts/instruct-mode.js"; import { applyLocale } from "./scripts/i18n.js"; +import { getTokenCount, getTokenizerModel, saveTokenCache } from "./scripts/tokenizers.js"; //exporting functions and vars for mods export { @@ -208,7 +204,6 @@ export { setGenerationProgress, updateChatMetadata, scrollChatToBottom, - getTokenCount, isStreamingEnabled, getThumbnailUrl, getStoppingStrings, @@ -292,7 +287,6 @@ eventSource.on(event_types.CHAT_CHANGED, setChatLockedPersona); eventSource.on(event_types.MESSAGE_RECEIVED, processExtensionHelpers); eventSource.on(event_types.MESSAGE_SENT, processExtensionHelpers); -const gpt3 = new GPT3BrowserTokenizer({ type: 'gpt3' }); hljs.addPlugin({ "before:highlightElement": ({ el }) => { el.textContent = el.innerText } }); // Markdown converter @@ -535,123 +529,6 @@ async function getClientVersion() { } } -function getTokenizerBestMatch() { - if (main_api === 'novel') { - if (nai_settings.model_novel.includes('krake') || nai_settings.model_novel.includes('euterpe')) { - return tokenizers.CLASSIC; - } - if (nai_settings.model_novel.includes('clio')) { - return tokenizers.NERD; - } - if (nai_settings.model_novel.includes('kayra')) { - return tokenizers.NERD2; - } - } - if (main_api === 'kobold' || main_api === 'textgenerationwebui' || main_api === 'koboldhorde') { - return tokenizers.LLAMA; - } - - return power_user.NONE; -} - -/** - * Gets the token count for a string using the current model tokenizer. - * @param {string} str String to tokenize - * @param {number | undefined} padding Optional padding tokens. Defaults to 0. - * @returns {number} Token count. - */ -function getTokenCount(str, padding = undefined) { - if (typeof str !== 'string' || !str?.length) { - return 0; - } - - let tokenizerType = power_user.tokenizer; - - if (main_api === 'openai') { - if (padding === power_user.token_padding) { - // For main "shadow" prompt building - tokenizerType = tokenizers.NONE; - } else { - // For extensions and WI - return getTokenCountOpenAI(str); - } - } - - if (tokenizerType === tokenizers.BEST_MATCH) { - tokenizerType = getTokenizerBestMatch(); - } - - if (padding === undefined) { - padding = 0; - } - - switch (tokenizerType) { - case tokenizers.NONE: - return Math.ceil(str.length / CHARACTERS_PER_TOKEN_RATIO) + padding; - case tokenizers.GPT3: - return gpt3.encode(str).bpe.length + padding; - case tokenizers.CLASSIC: - return encode(str).length + padding; - case tokenizers.LLAMA: - return countTokensRemote('/tokenize_llama', str, padding); - case tokenizers.NERD: - return countTokensRemote('/tokenize_nerdstash', str, padding); - case tokenizers.NERD2: - return countTokensRemote('/tokenize_nerdstash_v2', str, padding); - case tokenizers.API: - return countTokensRemote('/tokenize_via_api', str, padding); - default: - console.warn("Unknown tokenizer type", tokenizerType); - return Math.ceil(str.length / CHARACTERS_PER_TOKEN_RATIO) + padding; - } -} - -function countTokensRemote(endpoint, str, padding) { - let tokenCount = 0; - jQuery.ajax({ - async: false, - type: 'POST', - url: endpoint, - data: JSON.stringify({ text: str }), - dataType: "json", - contentType: "application/json", - success: function (data) { - tokenCount = data.count; - } - }); - return tokenCount + padding; -} - -function getTextTokensRemote(endpoint, str) { - let ids = []; - jQuery.ajax({ - async: false, - type: 'POST', - url: endpoint, - data: JSON.stringify({ text: str }), - dataType: "json", - contentType: "application/json", - success: function (data) { - ids = data.ids; - } - }); - return ids; -} - -export function getTextTokens(tokenizerType, str) { - switch (tokenizerType) { - case tokenizers.LLAMA: - return getTextTokensRemote('/tokenize_llama', str); - case tokenizers.NERD: - return getTextTokensRemote('/tokenize_nerdstash', str); - case tokenizers.NERD2: - return getTextTokensRemote('/tokenize_nerdstash_v2', str); - default: - console.warn("Calling getTextTokens with unsupported tokenizer type", tokenizerType); - return []; - } -} - function reloadMarkdownProcessor(render_formulas = false) { if (render_formulas) { converter = new showdown.Converter({ @@ -699,7 +576,6 @@ function getCurrentChatId() { } } -export const CHARACTERS_PER_TOKEN_RATIO = 3.35; const talkativeness_default = 0.5; var is_advanced_char_open = false; @@ -6132,6 +6008,9 @@ export async function saveChatConditional() { else { await saveChat(); } + + // Save token cache to IndexedDB storage + await saveTokenCache(); } async function importCharacterChat(formData) { diff --git a/public/scripts/RossAscends-mods.js b/public/scripts/RossAscends-mods.js index 222451067..1f7cdcfe9 100644 --- a/public/scripts/RossAscends-mods.js +++ b/public/scripts/RossAscends-mods.js @@ -2,15 +2,12 @@ esversion: 6 import { Generate, - this_chid, characters, online_status, main_api, api_server, api_server_textgenerationwebui, is_send_press, - getTokenCount, - menu_type, max_context, saveSettingsDebounced, active_group, @@ -35,6 +32,7 @@ import { } from "./secrets.js"; import { debounce, delay, getStringHash, waitUntilCondition } from "./utils.js"; import { chat_completion_sources, oai_settings } from "./openai.js"; +import { getTokenCount } from "./tokenizers.js"; var RPanelPin = document.getElementById("rm_button_panel_pin"); var LPanelPin = document.getElementById("lm_button_panel_pin"); diff --git a/public/scripts/authors-note.js b/public/scripts/authors-note.js index b58d6f947..03a2d1d1c 100644 --- a/public/scripts/authors-note.js +++ b/public/scripts/authors-note.js @@ -2,7 +2,6 @@ import { chat_metadata, eventSource, event_types, - getTokenCount, saveSettingsDebounced, this_chid, } from "../script.js"; @@ -10,6 +9,7 @@ import { selected_group } from "./group-chats.js"; import { extension_settings, getContext, saveMetadataDebounced } from "./extensions.js"; import { registerSlashCommand } from "./slash-commands.js"; import { getCharaFilename, debounce, waitUntilCondition, delay } from "./utils.js"; +import { getTokenCount } from "./tokenizers.js"; export { MODULE_NAME as NOTE_MODULE_NAME }; const MODULE_NAME = '2_floating_prompt'; // <= Deliberate, for sorting lower than memory diff --git a/public/scripts/extensions/infinity-context/index.js b/public/scripts/extensions/infinity-context/index.js index 7f3aba368..b06309e5b 100644 --- a/public/scripts/extensions/infinity-context/index.js +++ b/public/scripts/extensions/infinity-context/index.js @@ -1,6 +1,7 @@ -import { saveSettingsDebounced, getCurrentChatId, system_message_types, extension_prompt_types, eventSource, event_types, getRequestHeaders, CHARACTERS_PER_TOKEN_RATIO, substituteParams, max_context, } from "../../../script.js"; +import { saveSettingsDebounced, getCurrentChatId, system_message_types, extension_prompt_types, eventSource, event_types, getRequestHeaders, substituteParams, } from "../../../script.js"; import { humanizedDateTime } from "../../RossAscends-mods.js"; import { getApiUrl, extension_settings, getContext, doExtrasFetch } from "../../extensions.js"; +import { CHARACTERS_PER_TOKEN_RATIO } from "../../tokenizers.js"; import { getFileText, onlyUnique, splitRecursive } from "../../utils.js"; export { MODULE_NAME }; diff --git a/public/scripts/extensions/token-counter/index.js b/public/scripts/extensions/token-counter/index.js index 430fb5771..bdc8e19f0 100644 --- a/public/scripts/extensions/token-counter/index.js +++ b/public/scripts/extensions/token-counter/index.js @@ -1,6 +1,6 @@ import { callPopup, main_api } from "../../../script.js"; import { getContext } from "../../extensions.js"; -import { getTokenizerModel } from "../../openai.js"; +import { getTokenizerModel } from "../../tokenizers.js"; async function doTokenCounter() { const selectedTokenizer = main_api == 'openai' diff --git a/public/scripts/nai-settings.js b/public/scripts/nai-settings.js index 6d5a9d0bc..41fc87791 100644 --- a/public/scripts/nai-settings.js +++ b/public/scripts/nai-settings.js @@ -1,14 +1,14 @@ import { getRequestHeaders, getStoppingStrings, - getTextTokens, max_context, novelai_setting_names, saveSettingsDebounced, setGenerationParamsFromPreset } from "../script.js"; import { getCfgPrompt } from "./extensions/cfg/util.js"; -import { MAX_CONTEXT_DEFAULT, tokenizers } from "./power-user.js"; +import { MAX_CONTEXT_DEFAULT } from "./power-user.js"; +import { getTextTokens, tokenizers } from "./tokenizers.js"; import { getSortableDelay, getStringHash, diff --git a/public/scripts/openai.js b/public/scripts/openai.js index c01b1cf26..aef41beb8 100644 --- a/public/scripts/openai.js +++ b/public/scripts/openai.js @@ -48,10 +48,10 @@ import { delay, download, getFileText, getSortableDelay, - getStringHash, parseJsonFile, stringFormat, } from "./utils.js"; +import { countTokensOpenAI } from "./tokenizers.js"; export { is_get_status_openai, @@ -67,7 +67,6 @@ export { sendOpenAIRequest, setOpenAIOnlineStatus, getChatCompletionModel, - countTokens, TokenHandler, IdentifierNotFoundError, Message, @@ -124,40 +123,6 @@ const openrouter_website_model = 'OR_Website'; let biasCache = undefined; let model_list = []; -const objectStore = new localforage.createInstance({ name: "SillyTavern_ChatCompletions" }); - -let tokenCache = {}; - -async function loadTokenCache() { - try { - console.debug('Chat Completions: loading token cache') - tokenCache = await objectStore.getItem('tokenCache') || {}; - } catch (e) { - console.log('Chat Completions: unable to load token cache, using default value', e); - tokenCache = {}; - } -} - -async function saveTokenCache() { - try { - console.debug('Chat Completions: saving token cache') - await objectStore.setItem('tokenCache', tokenCache); - } catch (e) { - console.log('Chat Completions: unable to save token cache', e); - } -} - -async function resetTokenCache() { - try { - console.debug('Chat Completions: resetting token cache'); - Object.keys(tokenCache).forEach(key => delete tokenCache[key]); - await objectStore.removeItem('tokenCache'); - } catch (e) { - console.log('Chat Completions: unable to reset token cache', e); - } -} - -window['resetTokenCache'] = resetTokenCache; export const chat_completion_sources = { OPENAI: 'openai', @@ -268,10 +233,6 @@ const oai_settings = { let openai_setting_names; let openai_settings; -export function getTokenCountOpenAI(text) { - const message = { role: 'system', content: text }; - return countTokens(message, true); -} let promptManager = null; @@ -871,8 +832,6 @@ function prepareOpenAIMessages({ const chat = chatCompletion.getChat(); openai_messages_count = chat.filter(x => x?.role === "user" || x?.role === "assistant")?.length || 0; - // Save token cache to IndexedDB storage (async, no need to await) - saveTokenCache(); return [chat, promptManager.tokenHandler.counts]; } @@ -1410,68 +1369,8 @@ class TokenHandler { } } -function countTokens(messages, full = false) { - let shouldTokenizeAI21 = oai_settings.chat_completion_source === chat_completion_sources.AI21 && oai_settings.use_ai21_tokenizer; - let chatId = 'undefined'; - try { - if (selected_group) { - chatId = groups.find(x => x.id == selected_group)?.chat_id; - } - else if (this_chid) { - chatId = characters[this_chid].chat; - } - } catch { - console.log('No character / group selected. Using default cache item'); - } - - if (typeof tokenCache[chatId] !== 'object') { - tokenCache[chatId] = {}; - } - - if (!Array.isArray(messages)) { - messages = [messages]; - } - - let token_count = -1; - - for (const message of messages) { - const model = getTokenizerModel(); - - if (model === 'claude' || shouldTokenizeAI21) { - full = true; - } - - const hash = getStringHash(JSON.stringify(message)); - const cacheKey = `${model}-${hash}`; - const cachedCount = tokenCache[chatId][cacheKey]; - - if (typeof cachedCount === 'number') { - token_count += cachedCount; - } - - else { - jQuery.ajax({ - async: false, - type: 'POST', // - url: shouldTokenizeAI21 ? '/tokenize_ai21' : `/tokenize_openai?model=${model}`, - data: JSON.stringify([message]), - dataType: "json", - contentType: "application/json", - success: function (data) { - token_count += Number(data.token_count); - tokenCache[chatId][cacheKey] = Number(data.token_count); - } - }); - } - } - - if (!full) token_count -= 2; - - return token_count; -} - -const tokenHandler = new TokenHandler(countTokens); +const tokenHandler = new TokenHandler(countTokensOpenAI); // Thrown by ChatCompletion when a requested prompt couldn't be found. class IdentifierNotFoundError extends Error { @@ -1908,62 +1807,6 @@ class ChatCompletion { } } -export function getTokenizerModel() { - // OpenAI models always provide their own tokenizer - if (oai_settings.chat_completion_source == chat_completion_sources.OPENAI) { - return oai_settings.openai_model; - } - - const turboTokenizer = 'gpt-3.5-turbo'; - const gpt4Tokenizer = 'gpt-4'; - const gpt2Tokenizer = 'gpt2'; - const claudeTokenizer = 'claude'; - - // Assuming no one would use it for different models.. right? - if (oai_settings.chat_completion_source == chat_completion_sources.SCALE) { - return gpt4Tokenizer; - } - - // Select correct tokenizer for WindowAI proxies - if (oai_settings.chat_completion_source == chat_completion_sources.WINDOWAI && oai_settings.windowai_model) { - if (oai_settings.windowai_model.includes('gpt-4')) { - return gpt4Tokenizer; - } - else if (oai_settings.windowai_model.includes('gpt-3.5-turbo')) { - return turboTokenizer; - } - else if (oai_settings.windowai_model.includes('claude')) { - return claudeTokenizer; - } - else if (oai_settings.windowai_model.includes('GPT-NeoXT')) { - return gpt2Tokenizer; - } - } - - // And for OpenRouter (if not a site model, then it's impossible to determine the tokenizer) - if (oai_settings.chat_completion_source == chat_completion_sources.OPENROUTER && oai_settings.openrouter_model) { - if (oai_settings.openrouter_model.includes('gpt-4')) { - return gpt4Tokenizer; - } - else if (oai_settings.openrouter_model.includes('gpt-3.5-turbo')) { - return turboTokenizer; - } - else if (oai_settings.openrouter_model.includes('claude')) { - return claudeTokenizer; - } - else if (oai_settings.openrouter_model.includes('GPT-NeoXT')) { - return gpt2Tokenizer; - } - } - - if (oai_settings.chat_completion_source == chat_completion_sources.CLAUDE) { - return claudeTokenizer; - } - - // Default to Turbo 3.5 - return turboTokenizer; -} - function loadOpenAISettings(data, settings) { openai_setting_names = data.openai_setting_names; openai_settings = data.openai_settings; @@ -3036,8 +2879,6 @@ function updateScaleForm() { } $(document).ready(async function () { - await loadTokenCache(); - $('#test_api_button').on('click', testApiConnection); $('#scale-alt').on('change', function () { diff --git a/public/scripts/power-user.js b/public/scripts/power-user.js index 9ccbd7db9..a0365cc0f 100644 --- a/public/scripts/power-user.js +++ b/public/scripts/power-user.js @@ -23,6 +23,7 @@ import { import { loadInstructMode } from "./instruct-mode.js"; import { registerSlashCommand } from "./slash-commands.js"; +import { tokenizers } from "./tokenizers.js"; import { delay } from "./utils.js"; @@ -35,7 +36,6 @@ export { fixMarkdown, power_user, pygmalion_options, - tokenizers, send_on_enter_options, }; @@ -63,17 +63,6 @@ const pygmalion_options = { ENABLED: 1, } -const tokenizers = { - NONE: 0, - GPT3: 1, - CLASSIC: 2, - LLAMA: 3, - NERD: 4, - NERD2: 5, - API: 6, - BEST_MATCH: 99, -} - const send_on_enter_options = { DISABLED: -1, AUTO: 0, diff --git a/public/scripts/tokenizers.js b/public/scripts/tokenizers.js new file mode 100644 index 000000000..2714c12ef --- /dev/null +++ b/public/scripts/tokenizers.js @@ -0,0 +1,342 @@ +import { characters, main_api, nai_settings, this_chid } from "../script.js"; +import { power_user } from "./power-user.js"; +import { encode } from "../lib/gpt-2-3-tokenizer/mod.js"; +import { GPT3BrowserTokenizer } from "../lib/gpt-3-tokenizer/gpt3-tokenizer.js"; +import { chat_completion_sources, oai_settings } from "./openai.js"; +import { groups, selected_group } from "./group-chats.js"; +import { getStringHash } from "./utils.js"; + +export const CHARACTERS_PER_TOKEN_RATIO = 3.35; + +export const tokenizers = { + NONE: 0, + GPT3: 1, + CLASSIC: 2, + LLAMA: 3, + NERD: 4, + NERD2: 5, + API: 6, + BEST_MATCH: 99, +}; + +const objectStore = new localforage.createInstance({ name: "SillyTavern_ChatCompletions" }); +const gpt3 = new GPT3BrowserTokenizer({ type: 'gpt3' }); + +let tokenCache = {}; + +async function loadTokenCache() { + try { + console.debug('Chat Completions: loading token cache') + tokenCache = await objectStore.getItem('tokenCache') || {}; + } catch (e) { + console.log('Chat Completions: unable to load token cache, using default value', e); + tokenCache = {}; + } +} + +export async function saveTokenCache() { + try { + console.debug('Chat Completions: saving token cache') + await objectStore.setItem('tokenCache', tokenCache); + } catch (e) { + console.log('Chat Completions: unable to save token cache', e); + } +} + +async function resetTokenCache() { + try { + console.debug('Chat Completions: resetting token cache'); + Object.keys(tokenCache).forEach(key => delete tokenCache[key]); + await objectStore.removeItem('tokenCache'); + } catch (e) { + console.log('Chat Completions: unable to reset token cache', e); + } +} + +window['resetTokenCache'] = resetTokenCache; + +function getTokenizerBestMatch() { + if (main_api === 'novel') { + if (nai_settings.model_novel.includes('krake') || nai_settings.model_novel.includes('euterpe')) { + return tokenizers.CLASSIC; + } + if (nai_settings.model_novel.includes('clio')) { + return tokenizers.NERD; + } + if (nai_settings.model_novel.includes('kayra')) { + return tokenizers.NERD2; + } + } + if (main_api === 'kobold' || main_api === 'textgenerationwebui' || main_api === 'koboldhorde') { + return tokenizers.LLAMA; + } + + return tokenizers.NONE; +} + +/** + * Gets the token count for a string using the current model tokenizer. + * @param {string} str String to tokenize + * @param {number | undefined} padding Optional padding tokens. Defaults to 0. + * @returns {number} Token count. + */ +export function getTokenCount(str, padding = undefined) { + /** + * Calculates the token count for a string. + * @param {number} [type] Tokenizer type. + * @returns {number} Token count. + */ + function calculate(type) { + switch (type) { + case tokenizers.NONE: + return Math.ceil(str.length / CHARACTERS_PER_TOKEN_RATIO) + padding; + case tokenizers.GPT3: + return gpt3.encode(str).bpe.length + padding; + case tokenizers.CLASSIC: + return encode(str).length + padding; + case tokenizers.LLAMA: + return countTokensRemote('/tokenize_llama', str, padding); + case tokenizers.NERD: + return countTokensRemote('/tokenize_nerdstash', str, padding); + case tokenizers.NERD2: + return countTokensRemote('/tokenize_nerdstash_v2', str, padding); + case tokenizers.API: + return countTokensRemote('/tokenize_via_api', str, padding); + default: + console.warn("Unknown tokenizer type", type); + return calculate(tokenizers.NONE); + } + } + + if (typeof str !== 'string' || !str?.length) { + return 0; + } + + let tokenizerType = power_user.tokenizer; + + if (main_api === 'openai') { + if (padding === power_user.token_padding) { + // For main "shadow" prompt building + tokenizerType = tokenizers.NONE; + } else { + // For extensions and WI + return counterWrapperOpenAI(str); + } + } + + if (tokenizerType === tokenizers.BEST_MATCH) { + tokenizerType = getTokenizerBestMatch(); + } + + if (padding === undefined) { + padding = 0; + } + + const cacheObject = getTokenCacheObject(); + const hash = getStringHash(str); + const cacheKey = `${tokenizerType}-${hash}`; + + if (typeof cacheObject[cacheKey] === 'number') { + return cacheObject[cacheKey]; + } + + const result = calculate(tokenizerType); + + if (isNaN(result)) { + console.warn("Token count calculation returned NaN"); + return 0; + } + + cacheObject[cacheKey] = result; + return result; +} + +/** + * Gets the token count for a string using the OpenAI tokenizer. + * @param {string} text Text to tokenize. + * @returns {number} Token count. + */ +function counterWrapperOpenAI(text) { + const message = { role: 'system', content: text }; + return countTokensOpenAI(message, true); +} + +export function getTokenizerModel() { + // OpenAI models always provide their own tokenizer + if (oai_settings.chat_completion_source == chat_completion_sources.OPENAI) { + return oai_settings.openai_model; + } + + const turboTokenizer = 'gpt-3.5-turbo'; + const gpt4Tokenizer = 'gpt-4'; + const gpt2Tokenizer = 'gpt2'; + const claudeTokenizer = 'claude'; + + // Assuming no one would use it for different models.. right? + if (oai_settings.chat_completion_source == chat_completion_sources.SCALE) { + return gpt4Tokenizer; + } + + // Select correct tokenizer for WindowAI proxies + if (oai_settings.chat_completion_source == chat_completion_sources.WINDOWAI && oai_settings.windowai_model) { + if (oai_settings.windowai_model.includes('gpt-4')) { + return gpt4Tokenizer; + } + else if (oai_settings.windowai_model.includes('gpt-3.5-turbo')) { + return turboTokenizer; + } + else if (oai_settings.windowai_model.includes('claude')) { + return claudeTokenizer; + } + else if (oai_settings.windowai_model.includes('GPT-NeoXT')) { + return gpt2Tokenizer; + } + } + + // And for OpenRouter (if not a site model, then it's impossible to determine the tokenizer) + if (oai_settings.chat_completion_source == chat_completion_sources.OPENROUTER && oai_settings.openrouter_model) { + if (oai_settings.openrouter_model.includes('gpt-4')) { + return gpt4Tokenizer; + } + else if (oai_settings.openrouter_model.includes('gpt-3.5-turbo')) { + return turboTokenizer; + } + else if (oai_settings.openrouter_model.includes('claude')) { + return claudeTokenizer; + } + else if (oai_settings.openrouter_model.includes('GPT-NeoXT')) { + return gpt2Tokenizer; + } + } + + if (oai_settings.chat_completion_source == chat_completion_sources.CLAUDE) { + return claudeTokenizer; + } + + // Default to Turbo 3.5 + return turboTokenizer; +} + +/** + * @param {any[] | Object} messages + */ +export function countTokensOpenAI(messages, full = false) { + const shouldTokenizeAI21 = oai_settings.chat_completion_source === chat_completion_sources.AI21 && oai_settings.use_ai21_tokenizer; + const cacheObject = getTokenCacheObject(); + + if (!Array.isArray(messages)) { + messages = [messages]; + } + + let token_count = -1; + + for (const message of messages) { + const model = getTokenizerModel(); + + if (model === 'claude' || shouldTokenizeAI21) { + full = true; + } + + const hash = getStringHash(JSON.stringify(message)); + const cacheKey = `${model}-${hash}`; + const cachedCount = cacheObject[cacheKey]; + + if (typeof cachedCount === 'number') { + token_count += cachedCount; + } + + else { + jQuery.ajax({ + async: false, + type: 'POST', // + url: shouldTokenizeAI21 ? '/tokenize_ai21' : `/tokenize_openai?model=${model}`, + data: JSON.stringify([message]), + dataType: "json", + contentType: "application/json", + success: function (data) { + token_count += Number(data.token_count); + cacheObject[cacheKey] = Number(data.token_count); + } + }); + } + } + + if (!full) token_count -= 2; + + return token_count; +} + +/** + * Gets the token cache object for the current chat. + * @returns {Object} Token cache object for the current chat. + */ +function getTokenCacheObject() { + let chatId = 'undefined'; + + try { + if (selected_group) { + chatId = groups.find(x => x.id == selected_group)?.chat_id; + } + else if (this_chid !== undefined) { + chatId = characters[this_chid].chat; + } + } catch { + console.log('No character / group selected. Using default cache item'); + } + + if (typeof tokenCache[chatId] !== 'object') { + tokenCache[chatId] = {}; + } + + return tokenCache[String(chatId)]; +} + +function countTokensRemote(endpoint, str, padding) { + let tokenCount = 0; + jQuery.ajax({ + async: false, + type: 'POST', + url: endpoint, + data: JSON.stringify({ text: str }), + dataType: "json", + contentType: "application/json", + success: function (data) { + tokenCount = data.count; + } + }); + return tokenCount + padding; +} + +function getTextTokensRemote(endpoint, str) { + let ids = []; + jQuery.ajax({ + async: false, + type: 'POST', + url: endpoint, + data: JSON.stringify({ text: str }), + dataType: "json", + contentType: "application/json", + success: function (data) { + ids = data.ids; + } + }); + return ids; +} + +export function getTextTokens(tokenizerType, str) { + switch (tokenizerType) { + case tokenizers.LLAMA: + return getTextTokensRemote('/tokenize_llama', str); + case tokenizers.NERD: + return getTextTokensRemote('/tokenize_nerdstash', str); + case tokenizers.NERD2: + return getTextTokensRemote('/tokenize_nerdstash_v2', str); + default: + console.warn("Calling getTextTokens with unsupported tokenizer type", tokenizerType); + return []; + } +} + +jQuery(async () => { + await loadTokenCache(); +}); diff --git a/public/scripts/world-info.js b/public/scripts/world-info.js index 630dc735b..9674914f6 100644 --- a/public/scripts/world-info.js +++ b/public/scripts/world-info.js @@ -1,10 +1,11 @@ -import { saveSettings, callPopup, substituteParams, getTokenCount, getRequestHeaders, chat_metadata, this_chid, characters, saveCharacterDebounced, menu_type, eventSource, event_types } from "../script.js"; +import { saveSettings, callPopup, substituteParams, getRequestHeaders, chat_metadata, this_chid, characters, saveCharacterDebounced, menu_type, eventSource, event_types } from "../script.js"; import { download, debounce, initScrollHeight, resetScrollHeight, parseJsonFile, extractDataFromPng, getFileBuffer, getCharaFilename, deepClone, getSortableDelay, escapeRegex, PAGINATION_TEMPLATE, navigation_option } from "./utils.js"; import { getContext } from "./extensions.js"; import { NOTE_MODULE_NAME, metadata_keys, shouldWIAddPrompt } from "./authors-note.js"; import { registerSlashCommand } from "./slash-commands.js"; import { deviceInfo } from "./RossAscends-mods.js"; import { FILTER_TYPES, FilterHelper } from "./filters.js"; +import { getTokenCount } from "./tokenizers.js"; export { world_info,