From 9daac1ce4d1ce49aa98610a9b146be3ce072f5f2 Mon Sep 17 00:00:00 2001 From: RossAscends <124905043+RossAscends@users.noreply.github.com> Date: Sat, 13 May 2023 23:29:20 +0900 Subject: [PATCH 01/64] per-message prompt itemization display --- public/index.html | 1 + public/script.js | 244 ++++++++++++++++++++++++++++++++-------------- public/style.css | 16 ++- 3 files changed, 189 insertions(+), 72 deletions(-) diff --git a/public/index.html b/public/index.html index fe3b2d347..cf8cfc2a2 100644 --- a/public/index.html +++ b/public/index.html @@ -2327,6 +2327,7 @@ ${characterName}
+
diff --git a/public/script.js b/public/script.js index 579f7c830..8b3116cbb 100644 --- a/public/script.js +++ b/public/script.js @@ -203,6 +203,9 @@ hljs.addPlugin({ "before:highlightElement": ({ el }) => { el.textContent = el.in let converter; reloadMarkdownProcessor(); +// array for prompt token calculations +let itemizedPrompts = []; + /* let bg_menu_toggle = false; */ export const systemUserName = "SillyTavern System"; let default_user_name = "You"; @@ -1126,6 +1129,27 @@ function addOneMessage(mes, { type = "normal", insertAfter = null, scroll = true if (isSystem) { newMessage.find(".mes_edit").hide(); + newMessage.find(".mes_prompt").hide(); //dont'd need prompt display for sys messages + } + + // don't need prompt butons for user messages + if (params.isUser === true) { + newMessage.find(".mes_prompt").hide(); + } + + //shows or hides the Prompt display button + let mesIdToFind = Number(newMessage.attr('mesId')); + if (itemizedPrompts.length !== 0) { + for (var i = 0; i < itemizedPrompts.length; i++) { + if (itemizedPrompts[i].mesId === mesIdToFind) { + newMessage.find(".mes_prompt").show(); + } else { + console.log('no cache found for mesID, hiding prompt button and continuing search'); + newMessage.find(".mes_prompt").hide(); + } + } + } else { //hide all when prompt cache is empty + $(".mes_prompt").hide(); } newMessage.find('.avatar img').on('error', function () { @@ -2073,7 +2097,8 @@ async function Generate(type, { automatic_trigger, force_name2, resolve, reject, mesSendString = '\n' + mesSendString; //mesSendString = mesSendString; //This edit simply removes the first "" that is prepended to all context prompts } - let finalPromt = worldInfoBefore + + let finalPromt = + worldInfoBefore + storyString + worldInfoAfter + afterScenarioAnchor + @@ -2082,55 +2107,30 @@ async function Generate(type, { automatic_trigger, force_name2, resolve, reject, generatedPromtCache + promptBias; - /* let finalPromptTokens = getTokenCount(finalPromt); - let allAnchorsTokens = getTokenCount(allAnchors); - let afterScenarioAnchorTokens = getTokenCount(afterScenarioAnchor); - let zeroDepthAnchorTokens = getTokenCount(afterScenarioAnchor); - let worldInfoStringTokens = getTokenCount(worldInfoString); - let storyStringTokens = getTokenCount(storyString); - let examplesStringTokens = getTokenCount(examplesString); - let charPersonalityTokens = getTokenCount(charPersonality); - let charDescriptionTokens = getTokenCount(charDescription); - let scenarioTextTokens = getTokenCount(scenarioText); - let promptBiasTokens = getTokenCount(promptBias); - let mesSendStringTokens = getTokenCount(mesSendString) - let ActualChatHistoryTokens = mesSendStringTokens - allAnchorsTokens + power_user.token_padding; - - let totalTokensInPrompt = - allAnchorsTokens + // AN and/or legacy anchors - //afterScenarioAnchorTokens + //only counts if AN is set to 'after scenario' - //zeroDepthAnchorTokens + //same as above, even if AN not on 0 depth - worldInfoStringTokens + - storyStringTokens + //chardefs total - promptBiasTokens + //{{}} - ActualChatHistoryTokens + //chat history - power_user.token_padding; - - console.log( - ` - Prompt Itemization - ------------------- - Extension Add-ins AN: ${allAnchorsTokens} - - World Info: ${worldInfoStringTokens} - - Character Definitions: ${storyStringTokens} - -- Description: ${charDescriptionTokens} - -- Example Messages: ${examplesStringTokens} - -- Character Personality: ${charPersonalityTokens} - -- Character Scenario: ${scenarioTextTokens} - - Chat History: ${ActualChatHistoryTokens} - {{}} Bias: ${promptBiasTokens} - Padding: ${power_user.token_padding} - ------------------- - Total Tokens in Prompt: ${totalTokensInPrompt} - vs - finalPrompt: ${finalPromptTokens} - Max Context: ${this_max_context} - - ` - ); */ + //set array object for prompt token itemization of this message + let thisPromptBits = { + mesId: count_view_mes, + worldInfoBefore: worldInfoBefore, + allAnchors: allAnchors, + worldInfoString: worldInfoString, + storyString: storyString, + worldInfoAfter: worldInfoAfter, + afterScenarioAnchor: afterScenarioAnchor, + examplesString: examplesString, + mesSendString: mesSendString, + generatedPromtCache: generatedPromtCache, + promptBias: promptBias, + finalPromt: finalPromt, + charDescription: charDescription, + charPersonality: charPersonality, + scenarioText: scenarioText, + promptBias: promptBias, + storyString: storyString, + this_max_context: this_max_context, + padding: power_user.token_padding + } + + itemizedPrompts.push(thisPromptBits); if (zeroDepthAnchor && zeroDepthAnchor.length) { if (!isMultigenEnabled() || tokens_already_generated == 0) { @@ -2407,6 +2407,122 @@ async function Generate(type, { automatic_trigger, force_name2, resolve, reject, //console.log('generate ending'); } //generate ends +function promptItemize(itemizedPrompts, requestedMesId) { + let incomingMesId = Number(requestedMesId); + let thisPromptSet = undefined; + + for (var i = 0; i < itemizedPrompts.length; i++) { + if (itemizedPrompts[i].mesId === incomingMesId) { + thisPromptSet = i; + } + } + + if (thisPromptSet === undefined) { + console.log(`couldnt find the right mesId. looked for ${incomingMesId}`); + console.log(itemizedPrompts); + return null; + } + + let finalPromptTokens = getTokenCount(itemizedPrompts[thisPromptSet].finalPromt); + let allAnchorsTokens = getTokenCount(itemizedPrompts[thisPromptSet].allAnchors); + let afterScenarioAnchorTokens = getTokenCount(itemizedPrompts[thisPromptSet].afterScenarioAnchor); + let zeroDepthAnchorTokens = getTokenCount(itemizedPrompts[thisPromptSet].afterScenarioAnchor); + let worldInfoStringTokens = getTokenCount(itemizedPrompts[thisPromptSet].worldInfoString); + let storyStringTokens = getTokenCount(itemizedPrompts[thisPromptSet].storyString); + let examplesStringTokens = getTokenCount(itemizedPrompts[thisPromptSet].examplesString); + let charPersonalityTokens = getTokenCount(itemizedPrompts[thisPromptSet].charPersonality); + let charDescriptionTokens = getTokenCount(itemizedPrompts[thisPromptSet].charDescription); + let scenarioTextTokens = getTokenCount(itemizedPrompts[thisPromptSet].scenarioText); + let promptBiasTokens = getTokenCount(itemizedPrompts[thisPromptSet].promptBias); + let mesSendStringTokens = getTokenCount(itemizedPrompts[thisPromptSet].mesSendString) + let ActualChatHistoryTokens = mesSendStringTokens - allAnchorsTokens + power_user.token_padding; + let thisPrompt_max_context = itemizedPrompts[thisPromptSet].this_max_context; + let thisPrompt_padding = itemizedPrompts[thisPromptSet].padding; + + let totalTokensInPrompt = + storyStringTokens + //chardefs total + worldInfoStringTokens + + ActualChatHistoryTokens + //chat history + allAnchorsTokens + // AN and/or legacy anchors + //afterScenarioAnchorTokens + //only counts if AN is set to 'after scenario' + //zeroDepthAnchorTokens + //same as above, even if AN not on 0 depth + promptBiasTokens + //{{}} + - thisPrompt_padding; //not sure this way of calculating is correct, but the math results in same value as 'finalPromt' + + let storyStringTokensPercentage = ((storyStringTokens / (totalTokensInPrompt + thisPrompt_padding)) * 100).toFixed(2); + let ActualChatHistoryTokensPercentage = ((ActualChatHistoryTokens / (totalTokensInPrompt + thisPrompt_padding)) * 100).toFixed(2); + let promptBiasTokensPercentage = ((promptBiasTokens / (totalTokensInPrompt + thisPrompt_padding)) * 100).toFixed(2); + let worldInfoStringTokensPercentage = ((worldInfoStringTokens / (totalTokensInPrompt + thisPrompt_padding)) * 100).toFixed(2); + let allAnchorsTokensPercentage = ((allAnchorsTokens / (totalTokensInPrompt + thisPrompt_padding)) * 100).toFixed(2); + + callPopup( + ` +

Prompt Itemization

+
+
+
+
+
+
+
+
+
+
+
+
+
+
Character Definitions:
${storyStringTokens}
+
+
+
-- Description:
${charDescriptionTokens}
+
+
+
-- Personality:
${charPersonalityTokens}
+
+
+
-- Scenario:
${scenarioTextTokens}
+
+
+
-- Examples:
${examplesStringTokens}
+
+
+
+
World Info:
${worldInfoStringTokens}
+
+
+
Chat History:
${ActualChatHistoryTokens}
+
+
+
Author's Note:
${allAnchorsTokens}
+
+
+
{{}} Bias:
${promptBiasTokens}
+
+
+ +
+
+
+
+
Total Tokens in Prompt:
${totalTokensInPrompt}
+
+ +
+
Max Context:
${thisPrompt_max_context}
+
+
+
- Padding:
${thisPrompt_padding}
+
+
+
Actual Max Context Allowed:
${thisPrompt_max_context - thisPrompt_padding}
+
+
+
+
+ `, 'text' + ); +} + function setInContextMessages(lastmsg, type) { $("#chat .mes").removeClass('lastInContext'); @@ -5348,27 +5464,6 @@ $(document).ready(function () { saveSettingsDebounced(); }); - /* $("#donation").click(function () { - $("#shadow_tips_popup").css("display", "block"); - $("#shadow_tips_popup").transition({ - opacity: 1.0, - duration: 100, - easing: animation_easing, - complete: function () { }, - }); - }); */ - - /* $("#tips_cross").click(function () { - $("#shadow_tips_popup").transition({ - opacity: 0.0, - duration: 100, - easing: animation_easing, - complete: function () { - $("#shadow_tips_popup").css("display", "none"); - }, - }); - }); */ - $("#select_chat_cross").click(function () { $("#shadow_select_chat_popup").transition({ opacity: 0, @@ -5412,6 +5507,13 @@ $(document).ready(function () { }); } + $(document).on("pointerup", ".mes_prompt", function () { + let mesIdForItemization = $(this).closest('.mes').attr('mesId'); + if (itemizedPrompts.length !== undefined && itemizedPrompts.length !== 0) { + promptItemize(itemizedPrompts, mesIdForItemization); + } + }) + //******************** //***Message Editor*** diff --git a/public/style.css b/public/style.css index 0d415f11d..e3f0a1442 100644 --- a/public/style.css +++ b/public/style.css @@ -132,6 +132,18 @@ table.responsiveTable { border-top: 2px solid grey; } +.tokenItemizingSubclass { + font-size: calc(var(--mainFontSize) * 0.8); + color: var(--SmartThemeEmColor); +} + +.tokenGraph { + border-radius: 10px; + border: 1px solid var(--white30a); + max-height: 100%; + overflow: hidden; +} + .fa-solid::before, .fa-regular::before { vertical-align: middle; @@ -2130,6 +2142,7 @@ input[type="range"]::-webkit-slider-thumb { right: 0px; } +.mes_prompt, .mes_copy, .mes_edit { cursor: pointer; @@ -2146,7 +2159,8 @@ input[type="range"]::-webkit-slider-thumb { opacity: 1; } -.last_mes .mes_copy { +.last_mes .mes_copy, +.last_mes .mes_prompt { grid-row-start: 1; position: relative; right: -30px; From 0f131e799e8077fd58ae28b32dc5e0d87e3c47e2 Mon Sep 17 00:00:00 2001 From: RossAscends <124905043+RossAscends@users.noreply.github.com> Date: Sat, 13 May 2023 23:58:22 +0900 Subject: [PATCH 02/64] add tokenizer name to prompt itemization --- public/script.js | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/public/script.js b/public/script.js index 8b3116cbb..d8ebff5a2 100644 --- a/public/script.js +++ b/public/script.js @@ -2454,10 +2454,11 @@ function promptItemize(itemizedPrompts, requestedMesId) { let promptBiasTokensPercentage = ((promptBiasTokens / (totalTokensInPrompt + thisPrompt_padding)) * 100).toFixed(2); let worldInfoStringTokensPercentage = ((worldInfoStringTokens / (totalTokensInPrompt + thisPrompt_padding)) * 100).toFixed(2); let allAnchorsTokensPercentage = ((allAnchorsTokens / (totalTokensInPrompt + thisPrompt_padding)) * 100).toFixed(2); - + let selectedTokenizer = $("#tokenizer").find(':selected').text(); callPopup( `

Prompt Itemization

+ Tokenizer: ${selectedTokenizer}
From 84e48adc69f516f18acdeaf634a6683a9ed4105c Mon Sep 17 00:00:00 2001 From: SillyLossy Date: Sat, 13 May 2023 18:42:08 +0300 Subject: [PATCH 03/64] #295 Sanitize get/save chat names --- server.js | 16 +++++++--------- 1 file changed, 7 insertions(+), 9 deletions(-) diff --git a/server.js b/server.js index 7aee9bb00..fabcd07ba 100644 --- a/server.js +++ b/server.js @@ -528,7 +528,7 @@ app.post("/savechat", jsonParser, function (request, response) { var dir_name = String(request.body.avatar_url).replace('.png', ''); let chat_data = request.body.chat; let jsonlData = chat_data.map(JSON.stringify).join('\n'); - fs.writeFile(chatsPath + dir_name + "/" + request.body.file_name + '.jsonl', jsonlData, 'utf8', function (err) { + fs.writeFile(`${chatsPath + dir_name}/${sanitize(request.body.file_name)}.jsonl`, jsonlData, 'utf8', function (err) { if (err) { response.send(err); return console.log(err); @@ -552,11 +552,10 @@ app.post("/getchat", jsonParser, function (request, response) { if (err === null) { //if there is a dir, then read the requested file from the JSON call - fs.stat(chatsPath + dir_name + "/" + request.body.file_name + ".jsonl", function (err, stat) { - + fs.stat(`${chatsPath + dir_name}/${sanitize(request.body.file_name)}.jsonl`, function (err, stat) { if (err === null) { //if no error (the file exists), read the file if (stat !== undefined) { - fs.readFile(chatsPath + dir_name + "/" + request.body.file_name + ".jsonl", 'utf8', (err, data) => { + fs.readFile(`${chatsPath + dir_name}/${sanitize(request.body.file_name)}.jsonl`, 'utf8', (err, data) => { if (err) { console.error(err); response.send(err); @@ -585,9 +584,8 @@ app.post("/getchat", jsonParser, function (request, response) { } } }); - - }); + app.post("/getstatus", jsonParser, async function (request, response_getstatus = response) { if (!request.body) return response_getstatus.sendStatus(400); api_server = request.body.api_server; @@ -1281,7 +1279,7 @@ app.post('/getsettings', jsonParser, (request, response) => { //Wintermute's cod .filter(x => path.parse(x).ext == '.json') .sort(); - instructFiles.forEach(item => { + instructFiles.forEach(item => { const file = fs.readFileSync( path.join(directories.instruct, item), 'utf-8', @@ -1642,7 +1640,7 @@ app.post("/importcharacter", urlencodedParser, async function (request, response } catch { console.error('WEBP image conversion failed. Using the default character image.'); - uploadPath = defaultAvatarPath; + uploadPath = defaultAvatarPath; } } @@ -2847,7 +2845,7 @@ app.post('/writesecret', jsonParser, (request, response) => { const key = request.body.key; const value = request.body.value; - writeSecret(key,value); + writeSecret(key, value); return response.send('ok'); }); From 835c17b40c48a4282afbda2e8dd7a4fa7dd06295 Mon Sep 17 00:00:00 2001 From: SillyLossy Date: Sat, 13 May 2023 18:48:51 +0300 Subject: [PATCH 04/64] #292 Add button titles --- public/index.html | 2 +- public/scripts/extensions/caption/index.js | 1 + public/scripts/extensions/dice/index.js | 2 +- public/scripts/extensions/stable-diffusion/index.js | 2 +- public/scripts/extensions/tts/index.js | 2 +- 5 files changed, 5 insertions(+), 4 deletions(-) diff --git a/public/index.html b/public/index.html index cf8cfc2a2..78422fd89 100644 --- a/public/index.html +++ b/public/index.html @@ -2433,7 +2433,7 @@
-
+
diff --git a/public/scripts/extensions/caption/index.js b/public/scripts/extensions/caption/index.js index 67d3a527a..992413213 100644 --- a/public/scripts/extensions/caption/index.js +++ b/public/scripts/extensions/caption/index.js @@ -96,6 +96,7 @@ $(document).ready(function () { function addSendPictureButton() { const sendButton = document.createElement('div'); sendButton.id = 'send_picture'; + sendButton.title = 'Send a picture to chat'; sendButton.classList.add('fa-solid'); $(sendButton).hide(); $(sendButton).on('click', () => $('#img_file').click()); diff --git a/public/scripts/extensions/dice/index.js b/public/scripts/extensions/dice/index.js index 851b229ba..35f791140 100644 --- a/public/scripts/extensions/dice/index.js +++ b/public/scripts/extensions/dice/index.js @@ -29,7 +29,7 @@ async function doDiceRoll() { function addDiceRollButton() { const buttonHtml = ` -
+
`; const dropdownHtml = `
diff --git a/public/scripts/extensions/stable-diffusion/index.js b/public/scripts/extensions/stable-diffusion/index.js index f8392d136..c944a7375 100644 --- a/public/scripts/extensions/stable-diffusion/index.js +++ b/public/scripts/extensions/stable-diffusion/index.js @@ -326,7 +326,7 @@ async function sendMessage(prompt, image) { function addSDGenButtons() { const buttonHtml = ` -
+
`; const waitButtonHtml = ` diff --git a/public/scripts/extensions/tts/index.js b/public/scripts/extensions/tts/index.js index ae4818ef6..975289a45 100644 --- a/public/scripts/extensions/tts/index.js +++ b/public/scripts/extensions/tts/index.js @@ -165,7 +165,7 @@ function onAudioControlClicked() { function addAudioControl() { $('#send_but_sheld').prepend('
') - $('#tts_media_control').on('click', onAudioControlClicked) + $('#tts_media_control').attr('title', 'TTS play/pause').on('click', onAudioControlClicked) audioControl = document.getElementById('tts_media_control') updateUiAudioPlayState() } From 1749a669237a282e48a53b8547037c55350e6c51 Mon Sep 17 00:00:00 2001 From: RossAscends <124905043+RossAscends@users.noreply.github.com> Date: Sun, 14 May 2023 05:07:48 +0900 Subject: [PATCH 05/64] parse memory & AN in prompt itemizer --- public/script.js | 56 +++++++++++++++++++++++++++++++++++------------- 1 file changed, 41 insertions(+), 15 deletions(-) diff --git a/public/script.js b/public/script.js index d8ebff5a2..56e81c954 100644 --- a/public/script.js +++ b/public/script.js @@ -107,7 +107,7 @@ import { } from "./scripts/poe.js"; import { debounce, delay, restoreCaretPosition, saveCaretPosition } from "./scripts/utils.js"; -import { extension_settings, loadExtensionSettings } from "./scripts/extensions.js"; +import { extension_settings, getContext, loadExtensionSettings } from "./scripts/extensions.js"; import { executeSlashCommands, getSlashCommandsHelp, registerSlashCommand } from "./scripts/slash-commands.js"; import { tag_map, @@ -2112,6 +2112,8 @@ async function Generate(type, { automatic_trigger, force_name2, resolve, reject, mesId: count_view_mes, worldInfoBefore: worldInfoBefore, allAnchors: allAnchors, + summarizeString: extension_prompts['1_memory'].value, + authorsNoteString: extension_prompts['2_floating_prompt'].value, worldInfoString: worldInfoString, storyString: storyString, worldInfoAfter: worldInfoAfter, @@ -2425,6 +2427,8 @@ function promptItemize(itemizedPrompts, requestedMesId) { let finalPromptTokens = getTokenCount(itemizedPrompts[thisPromptSet].finalPromt); let allAnchorsTokens = getTokenCount(itemizedPrompts[thisPromptSet].allAnchors); + let summarizeStringTokens = getTokenCount(itemizedPrompts[thisPromptSet].summarizeString); + let authorsNoteStringTokens = getTokenCount(itemizedPrompts[thisPromptSet].authorsNoteString); let afterScenarioAnchorTokens = getTokenCount(itemizedPrompts[thisPromptSet].afterScenarioAnchor); let zeroDepthAnchorTokens = getTokenCount(itemizedPrompts[thisPromptSet].afterScenarioAnchor); let worldInfoStringTokens = getTokenCount(itemizedPrompts[thisPromptSet].worldInfoString); @@ -2458,7 +2462,11 @@ function promptItemize(itemizedPrompts, requestedMesId) { callPopup( `

Prompt Itemization

- Tokenizer: ${selectedTokenizer} + Tokenizer: ${selectedTokenizer}
+ + Only the white numbers really matter. All numbers are estimates. + Grey color items may not have been included in the context due to certain prompt format settings. +
@@ -2472,29 +2480,47 @@ function promptItemize(itemizedPrompts, requestedMesId) {
-
Character Definitions:
${storyStringTokens}
+
Character Definitions:
+
${storyStringTokens}
-
-
-- Description:
${charDescriptionTokens}
+
+
-- Description:
+
${charDescriptionTokens}
-
-
-- Personality:
${charPersonalityTokens}
+
+
-- Personality:
+
${charPersonalityTokens}
-
-
-- Scenario:
${scenarioTextTokens}
+
+
-- Scenario:
+
${scenarioTextTokens}
-
-
-- Examples:
${examplesStringTokens}
+
+
-- Examples:
+
${examplesStringTokens}
-
World Info:
${worldInfoStringTokens}
+
World Info:
+
${worldInfoStringTokens}
-
Chat History:
${ActualChatHistoryTokens}
+
Chat History:
+
${ActualChatHistoryTokens}
-
-
Author's Note:
${allAnchorsTokens}
+
+
+
Extensions:
+
${allAnchorsTokens}
+
+
+
-- Summarize:
+
${summarizeStringTokens}
+
+
+
-- Author's Note:
+
${authorsNoteStringTokens}
+
{{}} Bias:
${promptBiasTokens}
From cace7fa96d5f01b0a5f57ad260de1d38e2d0311e Mon Sep 17 00:00:00 2001 From: SillyLossy Date: Sun, 14 May 2023 00:33:10 +0300 Subject: [PATCH 06/64] Fix itemize without extensions running --- public/script.js | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/public/script.js b/public/script.js index 2382bbe36..a80630f79 100644 --- a/public/script.js +++ b/public/script.js @@ -2128,8 +2128,8 @@ async function Generate(type, { automatic_trigger, force_name2, resolve, reject, mesId: count_view_mes, worldInfoBefore: worldInfoBefore, allAnchors: allAnchors, - summarizeString: extension_prompts['1_memory'].value, - authorsNoteString: extension_prompts['2_floating_prompt'].value, + summarizeString: (extension_prompts['1_memory']?.value || ''), + authorsNoteString: (extension_prompts['2_floating_prompt']?.value || ''), worldInfoString: worldInfoString, storyString: storyString, worldInfoAfter: worldInfoAfter, From 06a745ebf8e7832f9379d00801d4ceaabc9acb4c Mon Sep 17 00:00:00 2001 From: SillyLossy Date: Sun, 14 May 2023 00:50:49 +0300 Subject: [PATCH 07/64] Better sentence separation for TTS --- public/scripts/extensions/tts/elevenlabs.js | 1 + public/scripts/extensions/tts/index.js | 3 ++- public/scripts/extensions/tts/silerotts.js | 1 + public/scripts/extensions/tts/system.js | 1 + 4 files changed, 5 insertions(+), 1 deletion(-) diff --git a/public/scripts/extensions/tts/elevenlabs.js b/public/scripts/extensions/tts/elevenlabs.js index 0429f5ea8..65c3cf302 100644 --- a/public/scripts/extensions/tts/elevenlabs.js +++ b/public/scripts/extensions/tts/elevenlabs.js @@ -7,6 +7,7 @@ class ElevenLabsTtsProvider { settings voices = [] + separator = ' ... ... ... ' get settings() { return this.settings diff --git a/public/scripts/extensions/tts/index.js b/public/scripts/extensions/tts/index.js index 696a18c05..4eeded46c 100644 --- a/public/scripts/extensions/tts/index.js +++ b/public/scripts/extensions/tts/index.js @@ -247,7 +247,8 @@ async function processTtsQueue() { const special_quotes = /[“”]/g; // Extend this regex to include other special quotes text = text.replace(special_quotes, '"'); const matches = text.match(/".*?"/g); // Matches text inside double quotes, non-greedily - text = matches ? matches.join(' ... ... ... ') : text; + const partJoiner = (ttsProvider?.separator || ' ... '); + text = matches ? matches.join(partJoiner) : text; } console.log(`TTS: ${text}`) const char = currentTtsJob.name diff --git a/public/scripts/extensions/tts/silerotts.js b/public/scripts/extensions/tts/silerotts.js index 7b00e78e1..97e6db528 100644 --- a/public/scripts/extensions/tts/silerotts.js +++ b/public/scripts/extensions/tts/silerotts.js @@ -9,6 +9,7 @@ class SileroTtsProvider { settings voices = [] + separator = ' ... ' defaultSettings = { provider_endpoint: "http://localhost:8001/tts", diff --git a/public/scripts/extensions/tts/system.js b/public/scripts/extensions/tts/system.js index e07a32843..dd8c324fb 100644 --- a/public/scripts/extensions/tts/system.js +++ b/public/scripts/extensions/tts/system.js @@ -21,6 +21,7 @@ class SystemTtsProvider { fallbackPreview = 'Neque porro quisquam est qui dolorem ipsum quia dolor sit amet' settings voices = [] + separator = ' ... ' defaultSettings = { voiceMap: {}, From 0774196f985ebfcc3048cc4217d4b067d0d44b2a Mon Sep 17 00:00:00 2001 From: SillyLossy Date: Sun, 14 May 2023 00:58:01 +0300 Subject: [PATCH 08/64] Silero ellipsis adjust --- public/scripts/extensions/tts/silerotts.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/public/scripts/extensions/tts/silerotts.js b/public/scripts/extensions/tts/silerotts.js index 97e6db528..862ab5d0e 100644 --- a/public/scripts/extensions/tts/silerotts.js +++ b/public/scripts/extensions/tts/silerotts.js @@ -9,7 +9,7 @@ class SileroTtsProvider { settings voices = [] - separator = ' ... ' + separator = ' .. ' defaultSettings = { provider_endpoint: "http://localhost:8001/tts", From 1b2e113a348b741d5c93e8af623e337b671dd4dd Mon Sep 17 00:00:00 2001 From: Aisu Wata Date: Sat, 13 May 2023 22:15:47 -0300 Subject: [PATCH 09/64] Feature: Auto Swipe --- public/index.html | 14 ++ public/script.js | 348 +++++++++++++++++++---------------- public/scripts/openai.js | 4 +- public/scripts/power-user.js | 42 +++++ 4 files changed, 248 insertions(+), 160 deletions(-) diff --git a/public/index.html b/public/index.html index b8b486660..1a62dbbf3 100644 --- a/public/index.html +++ b/public/index.html @@ -1661,6 +1661,20 @@
+ +

Auto-swipe

+ +
Minimum generated message length
+ +
Blacklisted words
+
+ +
Blacklisted word count to swipe
+ +
diff --git a/public/script.js b/public/script.js index cff9b4a37..6770d9899 100644 --- a/public/script.js +++ b/public/script.js @@ -1184,7 +1184,6 @@ function substituteParams(content, _name1, _name2) { _name1 = _name1 ?? name1; _name2 = _name2 ?? name2; if (!content) { - console.warn("No content on substituteParams") return '' } @@ -1469,13 +1468,46 @@ class StreamingProcessor { this.hideStopButton(this.messageId); this.onProgressStreaming(messageId, text); addCopyToCodeBlocks($(`#chat .mes[mesid="${messageId}"]`)); - playMessageSound(); saveChatConditional(); activateSendButtons(); showSwipeButtons(); setGenerationProgress(0); $('.mes_buttons:last').show(); generatedPromtCache = ''; + + console.log("Generated text size:", text.length, text) + + if (power_user.auto_swipe) { + function containsBlacklistedWords(str, blacklist, threshold) { + const regex = new RegExp(`\\b(${blacklist.join('|')})\\b`, 'gi'); + const matches = str.match(regex) || []; + return matches.length >= threshold; + } + + const generatedTextFiltered = (text) => { + if (text) { + if (power_user.auto_swipe_minimum_length) { + if (text.length < power_user.auto_swipe_minimum_length && text.length !== 0) { + console.log("Generated text size too small") + return true + } + } + if (power_user.auto_swipe_blacklist_threshold) { + if (containsBlacklistedWords(text, power_user.auto_swipe_blacklist, power_user.auto_swipe_blacklist_threshold)) { + console.log("Generated text has blacklisted words") + return true + } + } + } + return false + } + + if (generatedTextFiltered(text)) { + swipe_right() + return + } + } + playMessageSound(); } onErrorStreaming() { @@ -1630,8 +1662,6 @@ async function Generate(type, { automatic_trigger, force_name2, resolve, reject, let anchorTop = ''; let anchorBottom = ''; if (!is_pygmalion) { - console.log('saw not pyg'); - let postAnchorChar = character_anchor ? name2 + " Elaborate speaker" : ""; let postAnchorStyle = style_anchor ? "Writing style: very long messages" : ""; if (anchor_order === 0) { @@ -4005,7 +4035,162 @@ window["SillyTavern"].getContext = function () { }; }; +// when we click swipe right button +const swipe_right = () => { + if (chat.length - 1 === Number(this_edit_mes_id)) { + closeMessageEditor(); + } + if (isHordeGenerationNotAllowed()) { + return; + } + + const swipe_duration = 200; + const swipe_range = 700; + //console.log(swipe_range); + let run_generate = false; + let run_swipe_right = false; + if (chat[chat.length - 1]['swipe_id'] === undefined) { // if there is no swipe-message in the last spot of the chat array + chat[chat.length - 1]['swipe_id'] = 0; // set it to id 0 + chat[chat.length - 1]['swipes'] = []; // empty the array + chat[chat.length - 1]['swipes'][0] = chat[chat.length - 1]['mes']; //assign swipe array with last message from chat + } + chat[chat.length - 1]['swipe_id']++; //make new slot in array + // if message has memory attached - remove it to allow regen + if (chat[chat.length - 1].extra && chat[chat.length - 1].extra.memory) { + delete chat[chat.length - 1].extra.memory; + } + //console.log(chat[chat.length-1]['swipes']); + if (parseInt(chat[chat.length - 1]['swipe_id']) === chat[chat.length - 1]['swipes'].length) { //if swipe id of last message is the same as the length of the 'swipes' array + delete chat[chat.length - 1].gen_started; + delete chat[chat.length - 1].gen_finished; + run_generate = true; + } else if (parseInt(chat[chat.length - 1]['swipe_id']) < chat[chat.length - 1]['swipes'].length) { //otherwise, if the id is less than the number of swipes + chat[chat.length - 1]['mes'] = chat[chat.length - 1]['swipes'][chat[chat.length - 1]['swipe_id']]; //load the last mes box with the latest generation + run_swipe_right = true; //then prepare to do normal right swipe to show next message + } + + const currentMessage = $("#chat").children().filter(`[mesid="${count_view_mes - 1}"]`); + let this_div = currentMessage.children('.swipe_right'); + let this_mes_div = this_div.parent(); + + if (chat[chat.length - 1]['swipe_id'] > chat[chat.length - 1]['swipes'].length) { //if we swipe right while generating (the swipe ID is greater than what we are viewing now) + chat[chat.length - 1]['swipe_id'] = chat[chat.length - 1]['swipes'].length; //show that message slot (will be '...' while generating) + } + if (run_generate) { //hide swipe arrows while generating + this_div.css('display', 'none'); + } + // handles animated transitions when swipe right, specifically height transitions between messages + if (run_generate || run_swipe_right) { + let this_mes_block = this_mes_div.children('.mes_block').children('.mes_text'); + const this_mes_div_height = this_mes_div[0].scrollHeight; + const this_mes_block_height = this_mes_block[0].scrollHeight; + + this_mes_div.children('.swipe_left').css('display', 'flex'); + this_mes_div.children('.mes_block').transition({ // this moves the div back and forth + x: '-' + swipe_range, + duration: swipe_duration, + easing: animation_easing, + queue: false, + complete: function () { + /*if (!selected_group) { + var typingIndicator = $("#typing_indicator_template .typing_indicator").clone(); + typingIndicator.find(".typing_indicator_name").text(characters[this_chid].name); + } */ + /* $("#chat").append(typingIndicator); */ + const is_animation_scroll = ($('#chat').scrollTop() >= ($('#chat').prop("scrollHeight") - $('#chat').outerHeight()) - 10); + //console.log(parseInt(chat[chat.length-1]['swipe_id'])); + //console.log(chat[chat.length-1]['swipes'].length); + if (run_generate && parseInt(chat[chat.length - 1]['swipe_id']) === chat[chat.length - 1]['swipes'].length) { + //console.log('showing ""..."'); + /* if (!selected_group) { + } else { */ + $("#chat") + .find('[mesid="' + (count_view_mes - 1) + '"]') + .find('.mes_text') + .html('...'); //shows "..." while generating + $("#chat") + .find('[mesid="' + (count_view_mes - 1) + '"]') + .find('.mes_timer') + .html(''); // resets the timer + /* } */ + } else { + //console.log('showing previously generated swipe candidate, or "..."'); + //console.log('onclick right swipe calling addOneMessage'); + addOneMessage(chat[chat.length - 1], { type: 'swipe' }); + } + let new_height = this_mes_div_height - (this_mes_block_height - this_mes_block[0].scrollHeight); + if (new_height < 103) new_height = 103; + + + this_mes_div.animate({ height: new_height + 'px' }, { + duration: 0, //used to be 100 + queue: false, + progress: function () { + // Scroll the chat down as the message expands + if (is_animation_scroll) $("#chat").scrollTop($("#chat")[0].scrollHeight); + }, + complete: function () { + this_mes_div.css('height', 'auto'); + // Scroll the chat down to the bottom once the animation is complete + if (is_animation_scroll) $("#chat").scrollTop($("#chat")[0].scrollHeight); + } + }); + this_mes_div.children('.mes_block').transition({ + x: swipe_range, + duration: 0, + easing: animation_easing, + queue: false, + complete: function () { + this_mes_div.children('.mes_block').transition({ + x: '0px', + duration: swipe_duration, + easing: animation_easing, + queue: false, + complete: function () { + if (run_generate && !is_send_press && parseInt(chat[chat.length - 1]['swipe_id']) === chat[chat.length - 1]['swipes'].length) { + console.log('caught here 2'); + is_send_press = true; + $('.mes_buttons:last').hide(); + Generate('swipe'); + } else { + if (parseInt(chat[chat.length - 1]['swipe_id']) !== chat[chat.length - 1]['swipes'].length) { + saveChatConditional(); + } + } + } + }); + } + }); + } + }); + this_mes_div.children('.avatar').transition({ // moves avatar along with swipe + x: '-' + swipe_range, + duration: swipe_duration, + easing: animation_easing, + queue: false, + complete: function () { + this_mes_div.children('.avatar').transition({ + x: swipe_range, + duration: 0, + easing: animation_easing, + queue: false, + complete: function () { + this_mes_div.children('.avatar').transition({ + x: '0px', + duration: swipe_duration, + easing: animation_easing, + queue: false, + complete: function () { + + } + }); + } + }); + } + }); + } +} $(document).ready(function () { @@ -4052,160 +4237,7 @@ $(document).ready(function () { ///// SWIPE BUTTON CLICKS /////// - $(document).on('click', '.swipe_right', function () { //when we click swipe right button - if (chat.length - 1 === Number(this_edit_mes_id)) { - closeMessageEditor(); - } - - if (isHordeGenerationNotAllowed()) { - return; - } - - const swipe_duration = 200; - const swipe_range = 700; - //console.log(swipe_range); - let run_generate = false; - let run_swipe_right = false; - if (chat[chat.length - 1]['swipe_id'] === undefined) { // if there is no swipe-message in the last spot of the chat array - chat[chat.length - 1]['swipe_id'] = 0; // set it to id 0 - chat[chat.length - 1]['swipes'] = []; // empty the array - chat[chat.length - 1]['swipes'][0] = chat[chat.length - 1]['mes']; //assign swipe array with last message from chat - } - chat[chat.length - 1]['swipe_id']++; //make new slot in array - // if message has memory attached - remove it to allow regen - if (chat[chat.length - 1].extra && chat[chat.length - 1].extra.memory) { - delete chat[chat.length - 1].extra.memory; - } - //console.log(chat[chat.length-1]['swipes']); - if (parseInt(chat[chat.length - 1]['swipe_id']) === chat[chat.length - 1]['swipes'].length) { //if swipe id of last message is the same as the length of the 'swipes' array - delete chat[chat.length - 1].gen_started; - delete chat[chat.length - 1].gen_finished; - run_generate = true; - } else if (parseInt(chat[chat.length - 1]['swipe_id']) < chat[chat.length - 1]['swipes'].length) { //otherwise, if the id is less than the number of swipes - chat[chat.length - 1]['mes'] = chat[chat.length - 1]['swipes'][chat[chat.length - 1]['swipe_id']]; //load the last mes box with the latest generation - run_swipe_right = true; //then prepare to do normal right swipe to show next message - } - - if (chat[chat.length - 1]['swipe_id'] > chat[chat.length - 1]['swipes'].length) { //if we swipe right while generating (the swipe ID is greater than what we are viewing now) - chat[chat.length - 1]['swipe_id'] = chat[chat.length - 1]['swipes'].length; //show that message slot (will be '...' while generating) - } - if (run_generate) { //hide swipe arrows while generating - $(this).css('display', 'none'); - } - if (run_generate || run_swipe_right) { // handles animated transitions when swipe right, specifically height transitions between messages - - let this_mes_div = $(this).parent(); - let this_mes_block = $(this).parent().children('.mes_block').children('.mes_text'); - const this_mes_div_height = this_mes_div[0].scrollHeight; - const this_mes_block_height = this_mes_block[0].scrollHeight; - - this_mes_div.children('.swipe_left').css('display', 'flex'); - this_mes_div.children('.mes_block').transition({ // this moves the div back and forth - x: '-' + swipe_range, - duration: swipe_duration, - easing: animation_easing, - queue: false, - complete: function () { - /*if (!selected_group) { - var typingIndicator = $("#typing_indicator_template .typing_indicator").clone(); - typingIndicator.find(".typing_indicator_name").text(characters[this_chid].name); - } */ - /* $("#chat").append(typingIndicator); */ - const is_animation_scroll = ($('#chat').scrollTop() >= ($('#chat').prop("scrollHeight") - $('#chat').outerHeight()) - 10); - //console.log(parseInt(chat[chat.length-1]['swipe_id'])); - //console.log(chat[chat.length-1]['swipes'].length); - if (run_generate && parseInt(chat[chat.length - 1]['swipe_id']) === chat[chat.length - 1]['swipes'].length) { - //console.log('showing ""..."'); - /* if (!selected_group) { - } else { */ - $("#chat") - .find('[mesid="' + (count_view_mes - 1) + '"]') - .find('.mes_text') - .html('...'); //shows "..." while generating - $("#chat") - .find('[mesid="' + (count_view_mes - 1) + '"]') - .find('.mes_timer') - .html(''); // resets the timer - /* } */ - } else { - //console.log('showing previously generated swipe candidate, or "..."'); - //console.log('onclick right swipe calling addOneMessage'); - addOneMessage(chat[chat.length - 1], { type: 'swipe' }); - } - let new_height = this_mes_div_height - (this_mes_block_height - this_mes_block[0].scrollHeight); - if (new_height < 103) new_height = 103; - - - this_mes_div.animate({ height: new_height + 'px' }, { - duration: 0, //used to be 100 - queue: false, - progress: function () { - // Scroll the chat down as the message expands - if (is_animation_scroll) $("#chat").scrollTop($("#chat")[0].scrollHeight); - }, - complete: function () { - this_mes_div.css('height', 'auto'); - // Scroll the chat down to the bottom once the animation is complete - if (is_animation_scroll) $("#chat").scrollTop($("#chat")[0].scrollHeight); - } - }); - this_mes_div.children('.mes_block').transition({ - x: swipe_range, - duration: 0, - easing: animation_easing, - queue: false, - complete: function () { - this_mes_div.children('.mes_block').transition({ - x: '0px', - duration: swipe_duration, - easing: animation_easing, - queue: false, - complete: function () { - if (run_generate && !is_send_press && parseInt(chat[chat.length - 1]['swipe_id']) === chat[chat.length - 1]['swipes'].length) { - console.log('caught here 2'); - is_send_press = true; - $('.mes_buttons:last').hide(); - Generate('swipe'); - } else { - if (parseInt(chat[chat.length - 1]['swipe_id']) !== chat[chat.length - 1]['swipes'].length) { - saveChatConditional(); - } - } - } - }); - } - }); - } - }); - - $(this).parent().children('.avatar').transition({ // moves avatar aong with swipe - x: '-' + swipe_range, - duration: swipe_duration, - easing: animation_easing, - queue: false, - complete: function () { - $(this).parent().children('.avatar').transition({ - x: swipe_range, - duration: 0, - easing: animation_easing, - queue: false, - complete: function () { - $(this).parent().children('.avatar').transition({ - x: '0px', - duration: swipe_duration, - easing: animation_easing, - queue: false, - complete: function () { - - } - }); - } - }); - } - }); - } - - }); + $(document).on('click', '.swipe_right', swipe_right); $(document).on('click', '.swipe_left', function () { // when we swipe left..but no generation. if (chat.length - 1 === Number(this_edit_mes_id)) { diff --git a/public/scripts/openai.js b/public/scripts/openai.js index e8053b44a..8721cc0af 100644 --- a/public/scripts/openai.js +++ b/public/scripts/openai.js @@ -310,7 +310,7 @@ async function prepareOpenAIMessages(name2, storyString, worldInfoBefore, worldI let whole_prompt = getSystemPrompt(nsfw_toggle_prompt, enhance_definitions_prompt, wiBefore, storyString, wiAfter, extensionPrompt, isImpersonate); // Join by a space and replace placeholders with real user/char names - storyString = substituteParams(whole_prompt.join(" ")).replace(/\r/gm, '').trim(); + storyString = substituteParams(whole_prompt.join("\n")).replace(/\r/gm, '').trim(); let prompt_msg = { "role": "system", "content": storyString } let examples_tosend = []; @@ -469,7 +469,7 @@ function getSystemPrompt(nsfw_toggle_prompt, enhance_definitions_prompt, wiBefor whole_prompt = [nsfw_toggle_prompt, oai_settings.main_prompt, enhance_definitions_prompt + "\n\n" + wiBefore, storyString, wiAfter, extensionPrompt]; } else { - whole_prompt = [oai_settings.main_prompt, nsfw_toggle_prompt, enhance_definitions_prompt + "\n\n" + wiBefore, storyString, wiAfter, extensionPrompt]; + whole_prompt = [oai_settings.main_prompt, nsfw_toggle_prompt, enhance_definitions_prompt, "\n", wiBefore, storyString, wiAfter, extensionPrompt].filter(elem => elem); } } return whole_prompt; diff --git a/public/scripts/power-user.js b/public/scripts/power-user.js index 19931c12b..947115b6f 100644 --- a/public/scripts/power-user.js +++ b/public/scripts/power-user.js @@ -106,6 +106,10 @@ let power_user = { noShadows: false, theme: 'Default (Dark)', + auto_swipe: false, + auto_swipe_minimum_length: 0, + auto_swipe_blacklist: ["ethical", "guidelines", "harmful", "illegal", "comfortable", "generating"], + auto_swipe_blacklist_threshold: 2, auto_scroll_chat_to_bottom: true, auto_fix_generated_markdown: true, send_on_enter: send_on_enter_options.AUTO, @@ -476,6 +480,11 @@ function loadPowerUserSettings(settings, data) { power_user.font_scale = Number(localStorage.getItem(storage_keys.font_scale) ?? 1); power_user.blur_strength = Number(localStorage.getItem(storage_keys.blur_strength) ?? 10); + $('#auto_swipe').prop("checked", power_user.auto_swipe); + $('#auto_swipe_minimum_length').val(power_user.auto_swipe_minimum_length); + $('#auto_swipe_blacklist').val(power_user.auto_swipe_blacklist.join(", ")); + $('#auto_swipe_blacklist_threshold').val(power_user.auto_swipe_blacklist_threshold); + $('#auto_fix_generated_markdown').prop("checked", power_user.auto_fix_generated_markdown); $('#auto_scroll_chat_to_bottom').prop("checked", power_user.auto_scroll_chat_to_bottom); $(`#tokenizer option[value="${power_user.tokenizer}"]`).attr('selected', true); @@ -999,6 +1008,39 @@ $(document).ready(() => { saveSettingsDebounced(); }); + $('#auto_swipe').on('input', function () { + power_user.auto_swipe = !!$(this).prop('checked'); + console.log("power_user.auto_swipe", power_user.auto_swipe) + saveSettingsDebounced(); + }); + + $('#auto_swipe_blacklist').on('input', function () { + power_user.auto_swipe_blacklist = $(this).val() + .split(",") + .map(str => str.trim()) + .filter(str => str); + console.log("power_user.auto_swipe_blacklist", power_user.auto_swipe_blacklist) + saveSettingsDebounced(); + }); + + $('#auto_swipe_minimum_length').on('input', function () { + const number = parseInt($(this).val()); + if (!isNaN(number)) { + power_user.auto_swipe_minimum_length = number; + console.log("power_user.auto_swipe_minimum_length", power_user.auto_swipe_minimum_length) + saveSettingsDebounced(); + } + }); + + $('#auto_swipe_blacklist_threshold').on('input', function () { + const number = parseInt($(this).val()); + if (!isNaN(number)) { + power_user.auto_swipe_blacklist_threshold = number; + console.log("power_user.auto_swipe_blacklist_threshold", power_user.auto_swipe_blacklist_threshold) + saveSettingsDebounced(); + } + }); + $('#auto_fix_generated_markdown').on('input', function () { power_user.auto_fix_generated_markdown = !!$(this).prop('checked'); reloadCurrentChat(); From 4ba712e5b04145c44fa58b05c2b38d66e6f7c18e Mon Sep 17 00:00:00 2001 From: Aisu Wata Date: Sat, 13 May 2023 22:23:39 -0300 Subject: [PATCH 10/64] Auto Swipe: changed default and placeholder --- public/index.html | 2 +- public/scripts/power-user.js | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/public/index.html b/public/index.html index 9e5f31670..b9214dd96 100644 --- a/public/index.html +++ b/public/index.html @@ -1690,7 +1690,7 @@
Blacklisted words
- +
Blacklisted word count to swipe
diff --git a/public/scripts/power-user.js b/public/scripts/power-user.js index 053cf63dc..8a7e021a3 100644 --- a/public/scripts/power-user.js +++ b/public/scripts/power-user.js @@ -109,7 +109,7 @@ let power_user = { auto_swipe: false, auto_swipe_minimum_length: 0, - auto_swipe_blacklist: ["ethical", "guidelines", "harmful", "illegal", "comfortable", "generating"], + auto_swipe_blacklist: [], auto_swipe_blacklist_threshold: 2, auto_scroll_chat_to_bottom: true, auto_fix_generated_markdown: true, From 3b0d0b580a80832362abbc774b44e0a8d52ea4a9 Mon Sep 17 00:00:00 2001 From: RossAscends <124905043+RossAscends@users.noreply.github.com> Date: Sun, 14 May 2023 23:24:26 +0900 Subject: [PATCH 11/64] Update readme.md Remote Connections Instructions --- readme.md | 59 +++++++++++++++++++++++++++++++++---------------------- 1 file changed, 36 insertions(+), 23 deletions(-) diff --git a/readme.md b/readme.md index 093341471..2a4b3ac3b 100644 --- a/readme.md +++ b/readme.md @@ -168,45 +168,58 @@ In order to enable viewing your keys by clicking a button in the API block: ## Remote connections -Most often this is for people who want to use SillyTavern on their mobile phones while at home. -If you want to enable other devices to connect to your TAI server, open 'config.conf' in a text editor, and change: +Most often this is for people who want to use SillyTavern on their mobile phones while their PC runs the ST server on the same wifi network. -``` -const whitelistMode = true; -``` +However, it can be used to allow remote connections from anywhere as well. -to +**IMPORTANT: SillyTavern is a single-user program, so anyone who logs in will be able to see all characters and chats, and be able to change any settings inside the UI.** -``` -const whitelistMode = false; -``` +### 1. Managing whitelisted IPs -Save the file. -Restart your TAI server. - -You will now be able to connect from other devices. - -### Managing whitelisted IPs - -You can add or remove whitelisted IPs by editing the `whitelist` array in `config.conf`. You can also provide a `whitelist.txt` file in the same directory as `config.conf` with one IP address per line like: +* Create a new text file inside your SillyTavern base install folder called `whitelist.txt`. +* Open the file in a text editor, add a list of IPs you want to be allowed to connect. +*IP ranges are not accepted. Each IP must be listed individually like this:* ```txt 192.168.0.1 192.168.0.2 +192.168.0.3 +192.168.0.4 ``` +* Save the `whitelist.txt` file. +* Restart your TAI server. -The `whitelist` array in `config.conf` will be ignored if `whitelist.txt` exists. +Now devices which have the IP specified in the file will be able to connect. -***Disclaimer: Anyone else who knows your IP address and TAI port number will be able to connect as well*** +*Note: `config.conf` also has a `whitelist` array, which you can use in the same way, but this array will be ignored if `whitelist.txt` exists.* -To connect over wifi you'll need your PC's local wifi IP address +### 2. Connecting to ST from a remote device -* (For Windows: windows button > type 'cmd.exe' in the search bar> type 'ipconfig' in the console, hit Enter > "IPv4" listing) -if you want other people on the internet to connect, check [here](https://whatismyipaddress.com/) for 'IPv4' +After the whitelist has been setup, to connect over wifi you'll need the IP of the ST-hosting device. + +If the ST-hosting device is on the same wifi network, you will point your remote device's browser to the ST-host's internal wifi IP: + +* For Windows: windows button > type `cmd.exe` in the search bar > type `ipconfig` in the console, hit Enter > look for `IPv4` listing. + +If you (or someone else) wants to connect to your hosted ST while not being on the same network, you will need the public IP of your ST-hosting device. + +While using the ST-hosting device, access [this page](https://whatismyipaddress.com/) and look for for `IPv4`. This is what you would use to connect from the remote device. + +### Opening your ST to all IPs + +We do not reccomend doing this, but you can open `config.conf` and change `whitelist` to `false`. + +You must remove (or rename) `whitelist.txt` in the SillyTavern base install folder, if it exists. + +This is usually an insecure practice, so we require you to set a username and password when you do this. + +The username and password are set in `config.conf`. + +After restarting your ST server, any device will be able to connect to it, regardless of their IP as long as they know the username and password. ### Still Unable To Connect? -- Create an inbound/outbound firewall rule for the port found in `config.conf`. Do NOT mistake this for portforwarding on your router, otherwise someone could find your chat logs and that's a big no-no. +* Create an inbound/outbound firewall rule for the port found in `config.conf`. Do NOT mistake this for portforwarding on your router, otherwise someone could find your chat logs and that's a big no-no. * Enable the Private Network profile type in Settings > Network and Internet > Ethernet. This is VERY important for Windows 11, otherwise you would be unable to connect even with the aforementioned firewall rules. ## Performance issues? From 30a43f96de2f48b67a0e9ae9c8a45cdd991ba7ff Mon Sep 17 00:00:00 2001 From: RossAscends <124905043+RossAscends@users.noreply.github.com> Date: Mon, 15 May 2023 01:08:45 +0900 Subject: [PATCH 12/64] OAI token itemization WIP (integrate PR299) --- public/index.html | 2 +- public/script.js | 350 +++++++++++++++++++++++++++++++-------- public/scripts/openai.js | 125 ++++++++++---- 3 files changed, 374 insertions(+), 103 deletions(-) diff --git a/public/index.html b/public/index.html index 78422fd89..e74b768d6 100644 --- a/public/index.html +++ b/public/index.html @@ -2431,7 +2431,7 @@
-
+
diff --git a/public/script.js b/public/script.js index a80630f79..e3a1fad3d 100644 --- a/public/script.js +++ b/public/script.js @@ -125,6 +125,7 @@ import { secret_state, writeSecret } from "./scripts/secrets.js"; +import uniqolor from "./scripts/uniqolor.js"; //exporting functions and vars for mods export { @@ -204,6 +205,7 @@ let converter; reloadMarkdownProcessor(); // array for prompt token calculations +console.log('initializing Prompt Itemization Array on Startup'); let itemizedPrompts = []; /* let bg_menu_toggle = false; */ @@ -1129,28 +1131,34 @@ function addOneMessage(mes, { type = "normal", insertAfter = null, scroll = true if (isSystem) { newMessage.find(".mes_edit").hide(); - newMessage.find(".mes_prompt").hide(); //dont'd need prompt display for sys messages + newMessage.find(".mes_prompt").hide(); //don't need prompt button for sys } - // don't need prompt butons for user messages + // don't need prompt button for user if (params.isUser === true) { newMessage.find(".mes_prompt").hide(); + console.log(`hiding prompt for user mesID ${params.mesId}`); } //shows or hides the Prompt display button let mesIdToFind = Number(newMessage.attr('mesId')); if (itemizedPrompts.length !== 0) { + console.log(`itemizedPrompt.length = ${itemizedPrompts.length}`) for (var i = 0; i < itemizedPrompts.length; i++) { if (itemizedPrompts[i].mesId === mesIdToFind) { newMessage.find(".mes_prompt").show(); + console.log(`showing prompt for mesID ${params.mesId} from ${params.characterName}`); } else { - console.log('no cache found for mesID, hiding prompt button and continuing search'); + console.log(`no cache obj for mesID ${mesIdToFind}, hiding prompt button and continuing search`); newMessage.find(".mes_prompt").hide(); + console.log(itemizedPrompts); } } - } else { //hide all when prompt cache is empty + } else if (params.isUser !== true) { //hide all when prompt cache is empty + console.log('saw empty prompt cache, hiding all prompt buttons'); $(".mes_prompt").hide(); - } + console.log(itemizedPrompts); + } else { console.log('skipping prompt data for User Message'); } newMessage.find('.avatar img').on('error', function () { $(this).hide(); @@ -1594,6 +1602,7 @@ async function Generate(type, { automatic_trigger, force_name2, resolve, reject, const isImpersonate = type == "impersonate"; const isInstruct = power_user.instruct.enabled; + message_already_generated = isImpersonate ? `${name1}: ` : `${name2}: `; // Name for the multigen prefix const magName = isImpersonate ? (is_pygmalion ? 'You' : name1) : name2; @@ -2123,32 +2132,7 @@ async function Generate(type, { automatic_trigger, force_name2, resolve, reject, generatedPromtCache + promptBias; - //set array object for prompt token itemization of this message - let thisPromptBits = { - mesId: count_view_mes, - worldInfoBefore: worldInfoBefore, - allAnchors: allAnchors, - summarizeString: (extension_prompts['1_memory']?.value || ''), - authorsNoteString: (extension_prompts['2_floating_prompt']?.value || ''), - worldInfoString: worldInfoString, - storyString: storyString, - worldInfoAfter: worldInfoAfter, - afterScenarioAnchor: afterScenarioAnchor, - examplesString: examplesString, - mesSendString: mesSendString, - generatedPromtCache: generatedPromtCache, - promptBias: promptBias, - finalPromt: finalPromt, - charDescription: charDescription, - charPersonality: charPersonality, - scenarioText: scenarioText, - promptBias: promptBias, - storyString: storyString, - this_max_context: this_max_context, - padding: power_user.token_padding - } - itemizedPrompts.push(thisPromptBits); if (zeroDepthAnchor && zeroDepthAnchor.length) { if (!isMultigenEnabled() || tokens_already_generated == 0) { @@ -2167,6 +2151,11 @@ async function Generate(type, { automatic_trigger, force_name2, resolve, reject, } } + // Add quiet generation prompt at depth 0 + if (quiet_prompt && quiet_prompt.length) { + finalPromt += `\n${quiet_prompt}`; + } + finalPromt = finalPromt.replace(/\r/gm, ''); if (power_user.collapse_newlines) { @@ -2202,6 +2191,8 @@ async function Generate(type, { automatic_trigger, force_name2, resolve, reject, } } + let thisPromptBits = []; + if (main_api == 'kobold' && horde_settings.use_horde && horde_settings.auto_adjust_response_length) { this_amount_gen = Math.min(this_amount_gen, adjustedParams.maxLength); this_amount_gen = Math.max(this_amount_gen, MIN_AMOUNT_GEN); // prevent validation errors @@ -2237,7 +2228,50 @@ async function Generate(type, { automatic_trigger, force_name2, resolve, reject, console.log('rungenerate calling API'); if (main_api == 'openai') { - let prompt = await prepareOpenAIMessages(name2, storyString, worldInfoBefore, worldInfoAfter, afterScenarioAnchor, promptBias, type); + let [prompt, counts] = await prepareOpenAIMessages(name2, storyString, worldInfoBefore, worldInfoAfter, afterScenarioAnchor, promptBias, type); + + + // counts will return false if the user has not enabled the token breakdown feature + if (counts) { + + //$('#token_breakdown').css('display', 'flex'); + const breakdown_bar = $('#token_breakdown div:first-child'); + breakdown_bar.empty(); + + const total = Object.values(counts).reduce((acc, val) => acc + val, 0); + + thisPromptBits.push({ + oaiStartTokens: Object.entries(counts)[0][1], + oaiPromptTokens: Object.entries(counts)[1][1], + oaiBiasTokens: Object.entries(counts)[2][1], + oaiNudgeTokens: Object.entries(counts)[3][1], + oaiJailbreakTokens: Object.entries(counts)[4][1], + oaiImpersonateTokens: Object.entries(counts)[5][1], + oaiExamplesTokens: Object.entries(counts)[6][1], + oaiConversationTokens: Object.entries(counts)[7][1], + oaiTotalTokens: total, + }) + + + console.log(`added OAI prompt bits to array`); + + Object.entries(counts).forEach(([type, value]) => { + if (value === 0) { + return; + } + const percent_value = (value / total) * 100; + const color = uniqolor(type, { saturation: 50, lightness: 75, }).color; + const bar = document.createElement('div'); + bar.style.width = `${percent_value}%`; + bar.classList.add('token_breakdown_segment'); + bar.style.backgroundColor = color + 'AA'; + bar.style.borderColor = color + 'FF'; + bar.innerText = value; + bar.title = `${type}: ${percent_value.toFixed(2)}%`; + breakdown_bar.append(bar); + }); + } + setInContextMessages(openai_messages_count, type); if (isStreamingEnabled() && type !== 'quiet') { @@ -2277,6 +2311,41 @@ async function Generate(type, { automatic_trigger, force_name2, resolve, reject, }); //end of "if not data error" } + //set array object for prompt token itemization of this message + let currentArrayEntry = Number(thisPromptBits.length - 1); + let additionalPromptStuff = { + ...thisPromptBits[currentArrayEntry], + mesId: Number(count_view_mes), + worldInfoBefore: worldInfoBefore, + allAnchors: allAnchors, + summarizeString: (extension_prompts['1_memory']?.value || ''), + authorsNoteString: (extension_prompts['2_floating_prompt']?.value || ''), + worldInfoString: worldInfoString, + storyString: storyString, + worldInfoAfter: worldInfoAfter, + afterScenarioAnchor: afterScenarioAnchor, + examplesString: examplesString, + mesSendString: mesSendString, + generatedPromtCache: generatedPromtCache, + promptBias: promptBias, + finalPromt: finalPromt, + charDescription: charDescription, + charPersonality: charPersonality, + scenarioText: scenarioText, + this_max_context: this_max_context, + padding: power_user.token_padding, + main_api: main_api, + }; + + thisPromptBits = additionalPromptStuff; + + //console.log(thisPromptBits); + + itemizedPrompts.push(thisPromptBits); + //console.log(`pushed prompt bits to itemizedPrompts array. Length is now: ${itemizedPrompts.length}`); + + + if (isStreamingEnabled() && type !== 'quiet') { hideSwipeButtons(); let getMessage = await streamingProcessor.generate(); @@ -2285,7 +2354,7 @@ async function Generate(type, { automatic_trigger, force_name2, resolve, reject, // I wasn't able to get multigen working with real streaming // consistently without screwing the interim prompting if (isMultigenEnabled()) { - tokens_already_generated += this_amount_gen; + tokens_already_generated += this_amount_gen; // add new gen amt to any prev gen counter.. message_already_generated += getMessage; promptBias = ''; if (!streamingProcessor.isStopped && shouldContinueMultigen(getMessage, isImpersonate)) { @@ -2432,8 +2501,9 @@ async function Generate(type, { automatic_trigger, force_name2, resolve, reject, } //generate ends function promptItemize(itemizedPrompts, requestedMesId) { - let incomingMesId = Number(requestedMesId); - let thisPromptSet = undefined; + var incomingMesId = Number(requestedMesId); + console.log(`looking for MesId ${incomingMesId}`); + var thisPromptSet = undefined; for (var i = 0; i < itemizedPrompts.length; i++) { if (itemizedPrompts[i].mesId === incomingMesId) { @@ -2447,44 +2517,183 @@ function promptItemize(itemizedPrompts, requestedMesId) { return null; } - let finalPromptTokens = getTokenCount(itemizedPrompts[thisPromptSet].finalPromt); - let allAnchorsTokens = getTokenCount(itemizedPrompts[thisPromptSet].allAnchors); - let summarizeStringTokens = getTokenCount(itemizedPrompts[thisPromptSet].summarizeString); - let authorsNoteStringTokens = getTokenCount(itemizedPrompts[thisPromptSet].authorsNoteString); - let afterScenarioAnchorTokens = getTokenCount(itemizedPrompts[thisPromptSet].afterScenarioAnchor); - let zeroDepthAnchorTokens = getTokenCount(itemizedPrompts[thisPromptSet].afterScenarioAnchor); - let worldInfoStringTokens = getTokenCount(itemizedPrompts[thisPromptSet].worldInfoString); - let storyStringTokens = getTokenCount(itemizedPrompts[thisPromptSet].storyString); - let examplesStringTokens = getTokenCount(itemizedPrompts[thisPromptSet].examplesString); - let charPersonalityTokens = getTokenCount(itemizedPrompts[thisPromptSet].charPersonality); - let charDescriptionTokens = getTokenCount(itemizedPrompts[thisPromptSet].charDescription); - let scenarioTextTokens = getTokenCount(itemizedPrompts[thisPromptSet].scenarioText); - let promptBiasTokens = getTokenCount(itemizedPrompts[thisPromptSet].promptBias); - let mesSendStringTokens = getTokenCount(itemizedPrompts[thisPromptSet].mesSendString) - let ActualChatHistoryTokens = mesSendStringTokens - allAnchorsTokens + power_user.token_padding; - let thisPrompt_max_context = itemizedPrompts[thisPromptSet].this_max_context; - let thisPrompt_padding = itemizedPrompts[thisPromptSet].padding; + //these happen regardless of API + var charPersonalityTokens = getTokenCount(itemizedPrompts[thisPromptSet].charPersonality); + var charDescriptionTokens = getTokenCount(itemizedPrompts[thisPromptSet].charDescription); + var scenarioTextTokens = getTokenCount(itemizedPrompts[thisPromptSet].scenarioText); + var allAnchorsTokens = getTokenCount(itemizedPrompts[thisPromptSet].allAnchors); + var summarizeStringTokens = getTokenCount(itemizedPrompts[thisPromptSet].summarizeString); + var authorsNoteStringTokens = getTokenCount(itemizedPrompts[thisPromptSet].authorsNoteString); + var afterScenarioAnchorTokens = getTokenCount(itemizedPrompts[thisPromptSet].afterScenarioAnchor); + var zeroDepthAnchorTokens = getTokenCount(itemizedPrompts[thisPromptSet].afterScenarioAnchor); + var worldInfoStringTokens = getTokenCount(itemizedPrompts[thisPromptSet].worldInfoString); + var thisPrompt_max_context = itemizedPrompts[thisPromptSet].this_max_context; + var thisPrompt_padding = itemizedPrompts[thisPromptSet].padding; + console.log(`"${itemizedPrompts[thisPromptSet].promptBias}"`); + var promptBiasTokens = getTokenCount(itemizedPrompts[thisPromptSet].promptBias); + var this_main_api = itemizedPrompts[thisPromptSet].main_api; - let totalTokensInPrompt = - storyStringTokens + //chardefs total - worldInfoStringTokens + - ActualChatHistoryTokens + //chat history - allAnchorsTokens + // AN and/or legacy anchors - //afterScenarioAnchorTokens + //only counts if AN is set to 'after scenario' - //zeroDepthAnchorTokens + //same as above, even if AN not on 0 depth - promptBiasTokens + //{{}} - - thisPrompt_padding; //not sure this way of calculating is correct, but the math results in same value as 'finalPromt' + if (this_main_api == 'openai') { + //for OAI API + //console.log('-- Counting OAI Tokens'); + var finalPromptTokens = itemizedPrompts[thisPromptSet].oaiTotalTokens; + var oaiStartTokens = itemizedPrompts[thisPromptSet].oaiStartTokens; + var oaiPromptTokens = itemizedPrompts[thisPromptSet].oaiPromptTokens; + var ActualChatHistoryTokens = itemizedPrompts[thisPromptSet].oaiConversationTokens; + var examplesStringTokens = itemizedPrompts[thisPromptSet].oaiExamplesTokens; + var oaiBiasTokens = itemizedPrompts[thisPromptSet].oaiBiasTokens; + var oaiJailbreakTokens = itemizedPrompts[thisPromptSet].oaiJailbreakTokens; + var oaiStartTokens = itemizedPrompts[thisPromptSet].oaiStartTokens; + var oaiNudgeTokens = itemizedPrompts[thisPromptSet].oaiNudgeTokens; + var oaiImpersonateTokens = itemizedPrompts[thisPromptSet].oaiImpersonateTokens; - let storyStringTokensPercentage = ((storyStringTokens / (totalTokensInPrompt + thisPrompt_padding)) * 100).toFixed(2); - let ActualChatHistoryTokensPercentage = ((ActualChatHistoryTokens / (totalTokensInPrompt + thisPrompt_padding)) * 100).toFixed(2); - let promptBiasTokensPercentage = ((promptBiasTokens / (totalTokensInPrompt + thisPrompt_padding)) * 100).toFixed(2); - let worldInfoStringTokensPercentage = ((worldInfoStringTokens / (totalTokensInPrompt + thisPrompt_padding)) * 100).toFixed(2); - let allAnchorsTokensPercentage = ((allAnchorsTokens / (totalTokensInPrompt + thisPrompt_padding)) * 100).toFixed(2); - let selectedTokenizer = $("#tokenizer").find(':selected').text(); - callPopup( - ` + + } else { + //for non-OAI APIs + //console.log('-- Counting non-OAI Tokens'); + var finalPromptTokens = getTokenCount(itemizedPrompts[thisPromptSet].finalPromt); + var storyStringTokens = getTokenCount(itemizedPrompts[thisPromptSet].storyString); + var examplesStringTokens = getTokenCount(itemizedPrompts[thisPromptSet].examplesString); + var mesSendStringTokens = getTokenCount(itemizedPrompts[thisPromptSet].mesSendString) + var ActualChatHistoryTokens = mesSendStringTokens - allAnchorsTokens + power_user.token_padding; + + var totalTokensInPrompt = + storyStringTokens + //chardefs total + worldInfoStringTokens + + ActualChatHistoryTokens + //chat history + allAnchorsTokens + // AN and/or legacy anchors + //afterScenarioAnchorTokens + //only counts if AN is set to 'after scenario' + //zeroDepthAnchorTokens + //same as above, even if AN not on 0 depth + promptBiasTokens; //{{}} + //- thisPrompt_padding; //not sure this way of calculating is correct, but the math results in same value as 'finalPromt' + } + + if (this_main_api == 'openai') { + //console.log('-- applying % on OAI tokens'); + var oaiStartTokensPercentage = ((oaiStartTokens / (finalPromptTokens)) * 100).toFixed(2); + var storyStringTokensPercentage = ((oaiPromptTokens / (finalPromptTokens)) * 100).toFixed(2); + var ActualChatHistoryTokensPercentage = ((ActualChatHistoryTokens / (finalPromptTokens)) * 100).toFixed(2); + var promptBiasTokensPercentage = ((oaiBiasTokens / (finalPromptTokens)) * 100).toFixed(2); + var worldInfoStringTokensPercentage = ((worldInfoStringTokens / (finalPromptTokens)) * 100).toFixed(2); + var allAnchorsTokensPercentage = ((allAnchorsTokens / (finalPromptTokens)) * 100).toFixed(2); + var selectedTokenizer = $("#tokenizer").find(':selected').text(); + + } else { + //console.log('-- applying % on non-OAI tokens'); + var storyStringTokensPercentage = ((storyStringTokens / (totalTokensInPrompt)) * 100).toFixed(2); + var ActualChatHistoryTokensPercentage = ((ActualChatHistoryTokens / (totalTokensInPrompt)) * 100).toFixed(2); + var promptBiasTokensPercentage = ((promptBiasTokens / (totalTokensInPrompt)) * 100).toFixed(2); + var worldInfoStringTokensPercentage = ((worldInfoStringTokens / (totalTokensInPrompt)) * 100).toFixed(2); + var allAnchorsTokensPercentage = ((allAnchorsTokens / (totalTokensInPrompt)) * 100).toFixed(2); + var selectedTokenizer = $("#tokenizer").find(':selected').text(); + } + + if (this_main_api == 'openai') { + //console.log('-- calling popup for OAI tokens'); + callPopup( + `

Prompt Itemization

Tokenizer: ${selectedTokenizer}
+ API Used: ${this_main_api}
+ + Only the white numbers really matter. All numbers are estimates. + Grey color items may not have been included in the context due to certain prompt format settings. + +
+
+
+
+
+
+
+
+
+
+
+
+
+
+
Chat Startup:
+
${oaiStartTokens}
+
+
+
+
+
Prompt Tokens:
+
${oaiPromptTokens}
+
+
+
-- Description:
+
${charDescriptionTokens}
+
+
+
-- Personality:
+
${charPersonalityTokens}
+
+
+
-- Scenario:
+
${scenarioTextTokens}
+
+
+
-- Examples:
+
${examplesStringTokens}
+
+
+
+
World Info:
+
${worldInfoStringTokens}
+
+
+
Chat History:
+
${ActualChatHistoryTokens}
+
+
+
+
Extensions:
+
${allAnchorsTokens}
+
+
+
-- Summarize:
+
${summarizeStringTokens}
+
+
+
-- Author's Note:
+
${authorsNoteStringTokens}
+
+
+
+
{{}} Bias:
${oaiBiasTokens}
+
+
+ +
+
+
+
+
Total Tokens in Prompt:
${finalPromptTokens}
+
+
+
Max Context:
${thisPrompt_max_context}
+
+
+
- Padding:
${thisPrompt_padding}
+
+
+
Actual Max Context Allowed:
${thisPrompt_max_context - thisPrompt_padding}
+
+
+
+
+ `, 'text' + ); + + } else { + //console.log('-- calling popup for non-OAI tokens'); + callPopup( + ` +

Prompt Itemization

+ Tokenizer: ${selectedTokenizer}
+ API Used: ${this_main_api}
Only the white numbers really matter. All numbers are estimates. Grey color items may not have been included in the context due to certain prompt format settings. @@ -2569,7 +2778,8 @@ function promptItemize(itemizedPrompts, requestedMesId) {

`, 'text' - ); + ); + } } function setInContextMessages(lastmsg, type) { @@ -3295,8 +3505,10 @@ function changeMainAPI() { // Hide common settings for OpenAI if (selectedVal == "openai") { $("#common-gen-settings-block").css("display", "none"); + $("#token_breakdown").css("display", "flex"); } else { $("#common-gen-settings-block").css("display", "block"); + $("#token_breakdown").css("display", "none"); } // Hide amount gen for poe if (selectedVal == "poe") { diff --git a/public/scripts/openai.js b/public/scripts/openai.js index e8053b44a..88ad3d25f 100644 --- a/public/scripts/openai.js +++ b/public/scripts/openai.js @@ -101,6 +101,7 @@ const default_settings = { openai_model: 'gpt-3.5-turbo', jailbreak_system: false, reverse_proxy: '', + oai_breakdown: false, }; const oai_settings = { @@ -125,6 +126,7 @@ const oai_settings = { openai_model: 'gpt-3.5-turbo', jailbreak_system: false, reverse_proxy: '', + oai_breakdown: false, }; let openai_setting_names; @@ -317,16 +319,18 @@ async function prepareOpenAIMessages(name2, storyString, worldInfoBefore, worldI let openai_msgs_tosend = []; // todo: static value, maybe include in the initial context calculation + const handler_instance = new TokenHandler(countTokens); + let new_chat_msg = { "role": "system", "content": "[Start a new chat]" }; - let start_chat_count = countTokens([new_chat_msg], true); + let start_chat_count = handler_instance.count([new_chat_msg], true, 'start_chat'); await delay(1); - let total_count = countTokens([prompt_msg], true) + start_chat_count; + let total_count = handler_instance.count([prompt_msg], true, 'prompt') + start_chat_count; await delay(1); if (bias && bias.trim().length) { let bias_msg = { "role": "system", "content": bias.trim() }; openai_msgs.push(bias_msg); - total_count += countTokens([bias_msg], true); + total_count += handler_instance.count([bias_msg], true, 'bias'); await delay(1); } @@ -343,13 +347,14 @@ async function prepareOpenAIMessages(name2, storyString, worldInfoBefore, worldI openai_msgs.push(group_nudge); // add a group nudge count - let group_nudge_count = countTokens([group_nudge], true); + let group_nudge_count = handler_instance.count([group_nudge], true, 'nudge'); await delay(1); total_count += group_nudge_count; // recount tokens for new start message total_count -= start_chat_count - start_chat_count = countTokens([new_chat_msg], true); + handler_instance.uncount(start_chat_count, 'start_chat'); + start_chat_count = handler_instance.count([new_chat_msg], true); await delay(1); total_count += start_chat_count; } @@ -358,7 +363,7 @@ async function prepareOpenAIMessages(name2, storyString, worldInfoBefore, worldI const jailbreakMessage = { "role": "system", "content": substituteParams(oai_settings.jailbreak_prompt) }; openai_msgs.push(jailbreakMessage); - total_count += countTokens([jailbreakMessage], true); + total_count += handler_instance.count([jailbreakMessage], true, 'jailbreak'); await delay(1); } @@ -366,7 +371,7 @@ async function prepareOpenAIMessages(name2, storyString, worldInfoBefore, worldI const impersonateMessage = { "role": "system", "content": substituteParams(oai_settings.impersonation_prompt) }; openai_msgs.push(impersonateMessage); - total_count += countTokens([impersonateMessage], true); + total_count += handler_instance.count([impersonateMessage], true, 'impersonate'); await delay(1); } @@ -389,12 +394,12 @@ async function prepareOpenAIMessages(name2, storyString, worldInfoBefore, worldI examples_tosend.push(example); } } - total_count += countTokens(examples_tosend, true); + total_count += handler_instance.count(examples_tosend, true, 'examples'); await delay(1); // go from newest message to oldest, because we want to delete the older ones from the context for (let j = openai_msgs.length - 1; j >= 0; j--) { let item = openai_msgs[j]; - let item_count = countTokens(item, true); + let item_count = handler_instance.count(item, true, 'conversation'); await delay(1); // If we have enough space for this message, also account for the max assistant reply size if ((total_count + item_count) < (this_max_context - oai_settings.openai_max_tokens)) { @@ -403,13 +408,14 @@ async function prepareOpenAIMessages(name2, storyString, worldInfoBefore, worldI } else { // early break since if we still have more messages, they just won't fit anyway + handler_instance.uncount(item_count, 'conversation'); break; } } } else { for (let j = openai_msgs.length - 1; j >= 0; j--) { let item = openai_msgs[j]; - let item_count = countTokens(item, true); + let item_count = handler_instance.count(item, true, 'conversation'); await delay(1); // If we have enough space for this message, also account for the max assistant reply size if ((total_count + item_count) < (this_max_context - oai_settings.openai_max_tokens)) { @@ -418,11 +424,12 @@ async function prepareOpenAIMessages(name2, storyString, worldInfoBefore, worldI } else { // early break since if we still have more messages, they just won't fit anyway + handler_instance.uncount(item_count, 'conversation'); break; } } - console.log(total_count); + //console.log(total_count); // each example block contains multiple user/bot messages for (let example_block of openai_msgs_example) { @@ -432,7 +439,7 @@ async function prepareOpenAIMessages(name2, storyString, worldInfoBefore, worldI example_block = [new_chat_msg, ...example_block]; // add the block only if there is enough space for all its messages - const example_count = countTokens(example_block, true); + const example_count = handler_instance.count(example_block, true, 'examples'); await delay(1); if ((total_count + example_count) < (this_max_context - oai_settings.openai_max_tokens)) { examples_tosend.push(...example_block) @@ -440,6 +447,7 @@ async function prepareOpenAIMessages(name2, storyString, worldInfoBefore, worldI } else { // early break since more examples probably won't fit anyway + handler_instance.uncount(example_count, 'examples'); break; } } @@ -451,10 +459,14 @@ async function prepareOpenAIMessages(name2, storyString, worldInfoBefore, worldI openai_msgs_tosend.reverse(); openai_msgs_tosend = [prompt_msg, ...examples_tosend, new_chat_msg, ...openai_msgs_tosend] - console.log("We're sending this:") - console.log(openai_msgs_tosend); - console.log(`Calculated the total context to be ${total_count} tokens`); - return openai_msgs_tosend; + //console.log("We're sending this:") + //console.log(openai_msgs_tosend); + //console.log(`Calculated the total context to be ${total_count} tokens`); + handler_instance.log(); + return [ + openai_msgs_tosend, + oai_settings.oai_breakdown ? handler_instance.counts : false, + ]; } function getSystemPrompt(nsfw_toggle_prompt, enhance_definitions_prompt, wiBefore, storyString, wiAfter, extensionPrompt, isImpersonate) { @@ -616,9 +628,42 @@ async function calculateLogitBias() { } } +class TokenHandler { + constructor(countTokenFn) { + this.countTokenFn = countTokenFn; + this.counts = { + 'start_chat': 0, + 'prompt': 0, + 'bias': 0, + 'nudge': 0, + 'jailbreak': 0, + 'impersonate': 0, + 'examples': 0, + 'conversation': 0, + }; + } + + uncount(value, type) { + this.counts[type] -= value; + } + + count(messages, full, type) { + console.log(messages); + const token_count = this.countTokenFn(messages, full); + this.counts[type] += token_count; + + return token_count; + } + + log() { + const total = Object.values(this.counts).reduce((a, b) => a + b); + console.table({ ...this.counts, 'total': total }); + } +} + function countTokens(messages, full = false) { let chatId = 'undefined'; - + try { if (selected_group) { chatId = groups.find(x => x.id == selected_group)?.chat_id; @@ -705,6 +750,7 @@ function loadOpenAISettings(data, settings) { if (settings.nsfw_first !== undefined) oai_settings.nsfw_first = !!settings.nsfw_first; if (settings.openai_model !== undefined) oai_settings.openai_model = settings.openai_model; if (settings.jailbreak_system !== undefined) oai_settings.jailbreak_system = !!settings.jailbreak_system; + if (settings.oai_breakdown !== undefined) oai_settings.oai_breakdown = !!settings.oai_breakdown; $('#stream_toggle').prop('checked', oai_settings.stream_openai); @@ -720,6 +766,7 @@ function loadOpenAISettings(data, settings) { $('#wrap_in_quotes').prop('checked', oai_settings.wrap_in_quotes); $('#nsfw_first').prop('checked', oai_settings.nsfw_first); $('#jailbreak_system').prop('checked', oai_settings.jailbreak_system); + $('#oai_breakdown').prop('checked', oai_settings.oai_breakdown); if (settings.main_prompt !== undefined) oai_settings.main_prompt = settings.main_prompt; if (settings.nsfw_prompt !== undefined) oai_settings.nsfw_prompt = settings.nsfw_prompt; @@ -839,6 +886,7 @@ async function saveOpenAIPreset(name, settings) { jailbreak_system: settings.jailbreak_system, impersonation_prompt: settings.impersonation_prompt, bias_preset_selected: settings.bias_preset_selected, + oai_breakdown: settings.oai_breakdown, }; const savePresetSettings = await fetch(`/savepreset_openai?name=${name}`, { @@ -1046,7 +1094,7 @@ async function onDeletePresetClick() { const response = await fetch('/deletepreset_openai', { method: 'POST', headers: getRequestHeaders(), - body: JSON.stringify({name: nameToDelete}), + body: JSON.stringify({ name: nameToDelete }), }); if (!response.ok) { @@ -1097,6 +1145,7 @@ function onSettingsPresetChange() { wrap_in_quotes: ['#wrap_in_quotes', 'wrap_in_quotes', true], nsfw_first: ['#nsfw_first', 'nsfw_first', true], jailbreak_system: ['#jailbreak_system', 'jailbreak_system', true], + oai_breakdown: ['#oai_breakdown', 'oai_breakdown', true], main_prompt: ['#main_prompt_textarea', 'main_prompt', false], nsfw_prompt: ['#nsfw_prompt_textarea', 'nsfw_prompt', false], jailbreak_prompt: ['#jailbreak_prompt_textarea', 'jailbreak_prompt', false], @@ -1163,7 +1212,7 @@ function onReverseProxyInput() { async function onConnectButtonClick(e) { e.stopPropagation(); const api_key_openai = $('#api_key_openai').val().trim(); - + if (api_key_openai.length) { await writeSecret(SECRET_KEYS.OPENAI, api_key_openai); } @@ -1269,6 +1318,16 @@ $(document).ready(function () { saveSettingsDebounced(); }); + $("#oai_breakdown").on('change', function () { + oai_settings.oai_breakdown = !!$(this).prop("checked"); + if (!oai_settings.oai_breakdown) { + $("#token_breakdown").css('display', 'none'); + } else { + $("#token_breakdown").css('display', 'flex'); + } + saveSettingsDebounced(); + }); + // auto-select a preset based on character/group name $(document).on("click", ".character_select", function () { const chid = $(this).attr('chid'); @@ -1322,18 +1381,18 @@ $(document).ready(function () { saveSettingsDebounced(); }); - $("#api_button_openai").on('click', onConnectButtonClick); - $("#openai_reverse_proxy").on('input', onReverseProxyInput); - $("#model_openai_select").on('change', onModelChange); - $("#settings_perset_openai").on('change', onSettingsPresetChange); - $("#new_oai_preset").on('click', onNewPresetClick); - $("#delete_oai_preset").on('click', onDeletePresetClick); - $("#openai_api_usage").on('click', showApiKeyUsage); - $('#openai_logit_bias_preset').on('change', onLogitBiasPresetChange); - $('#openai_logit_bias_new_preset').on('click', createNewLogitBiasPreset); - $('#openai_logit_bias_new_entry').on('click', createNewLogitBiasEntry); - $('#openai_logit_bias_import_file').on('input', onLogitBiasPresetImportFileChange); - $('#openai_logit_bias_import_preset').on('click', onLogitBiasPresetImportClick); - $('#openai_logit_bias_export_preset').on('click', onLogitBiasPresetExportClick); - $('#openai_logit_bias_delete_preset').on('click', onLogitBiasPresetDeleteClick); + $("#api_button_openai").on("click", onConnectButtonClick); + $("#openai_reverse_proxy").on("input", onReverseProxyInput); + $("#model_openai_select").on("change", onModelChange); + $("#settings_perset_openai").on("change", onSettingsPresetChange); + $("#new_oai_preset").on("click", onNewPresetClick); + $("#delete_oai_preset").on("click", onDeletePresetClick); + $("#openai_api_usage").on("click", showApiKeyUsage); + $("#openai_logit_bias_preset").on("change", onLogitBiasPresetChange); + $("#openai_logit_bias_new_preset").on("click", createNewLogitBiasPreset); + $("#openai_logit_bias_new_entry").on("click", createNewLogitBiasEntry); + $("#openai_logit_bias_import_file").on("input", onLogitBiasPresetImportFileChange); + $("#openai_logit_bias_import_preset").on("click", onLogitBiasPresetImportClick); + $("#openai_logit_bias_export_preset").on("click", onLogitBiasPresetExportClick); + $("#openai_logit_bias_delete_preset").on("click", onLogitBiasPresetDeleteClick); }); From e69cbe9a11fa53842fcf7bdecd0872e10e416928 Mon Sep 17 00:00:00 2001 From: RossAscends <124905043+RossAscends@users.noreply.github.com> Date: Mon, 15 May 2023 01:13:32 +0900 Subject: [PATCH 13/64] forgot requirements for OAI itemization --- package-lock.json | 6 ++++++ package.json | 1 + 2 files changed, 7 insertions(+) diff --git a/package-lock.json b/package-lock.json index af472b031..f8daa5cf4 100644 --- a/package-lock.json +++ b/package-lock.json @@ -34,6 +34,7 @@ "png-chunks-extract": "^1.0.0", "rimraf": "^3.0.2", "sanitize-filename": "^1.6.3", + "uniqolor": "^1.1.0", "webp-converter": "2.3.2", "ws": "^8.13.0", "yargs": "^17.7.1" @@ -1935,6 +1936,11 @@ "version": "0.0.6", "license": "MIT" }, + "node_modules/uniqolor": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/uniqolor/-/uniqolor-1.1.0.tgz", + "integrity": "sha512-j2XyokF24fsj+L5u6fbu4rM3RQc6VWJuAngYM2k0ZdG3yiVxt0smLkps2GmQIYqK8VkELGdM9vFU/HfOkK/zoQ==" + }, "node_modules/unpipe": { "version": "1.0.0", "license": "MIT", diff --git a/package.json b/package.json index b46c5a721..9457c7972 100644 --- a/package.json +++ b/package.json @@ -25,6 +25,7 @@ "png-chunks-extract": "^1.0.0", "rimraf": "^3.0.2", "sanitize-filename": "^1.6.3", + "uniqolor": "^1.1.0", "webp-converter": "2.3.2", "ws": "^8.13.0", "yargs": "^17.7.1" From d765e9bad98b064d5760232cecc29eaa37b22821 Mon Sep 17 00:00:00 2001 From: RossAscends <124905043+RossAscends@users.noreply.github.com> Date: Mon, 15 May 2023 01:14:46 +0900 Subject: [PATCH 14/64] add uniqolor.js --- public/scripts/uniqolor.js | 303 +++++++++++++++++++++++++++++++++++++ 1 file changed, 303 insertions(+) create mode 100644 public/scripts/uniqolor.js diff --git a/public/scripts/uniqolor.js b/public/scripts/uniqolor.js new file mode 100644 index 000000000..f2ceebd5c --- /dev/null +++ b/public/scripts/uniqolor.js @@ -0,0 +1,303 @@ +const SATURATION_BOUND = [0, 100]; +const LIGHTNESS_BOUND = [0, 100]; + +const pad2 = str => `${str.length === 1 ? '0' : ''}${str}`; + +const clamp = (num, min, max) => Math.max(Math.min(num, max), min); + +const random = (min, max) => Math.floor(Math.random() * ((max - min) + 1)) + min; + +const randomExclude = (min, max, exclude) => { + const r = random(min, max); + + for (let i = 0; i < exclude?.length; i++) { + const value = exclude[i]; + + if (value?.length === 2 && r >= value[0] && r <= value[1]) { + return randomExclude(min, max, exclude); + } + } + + return r; +}; + +/** + * Generate hashCode + * @param {string} str + * @return {number} + */ +const hashCode = str => { + const len = str.length; + let hash = 0; + + for (let i = 0; i < len; i++) { + hash = ((hash << 5) - hash) + str.charCodeAt(i); + hash &= hash; // Convert to 32bit integer + } + + return hash; +}; + +/** +* Clamps `num` within the inclusive `range` bounds +* @param {number} num +* @param {number|Array} range +* @return {number} +*/ +const boundHashCode = (num, range) => { + if (typeof range === 'number') { + return range; + } + + return (num % Math.abs(range[1] - range[0])) + range[0]; +}; + +/** + * Sanitizing the `range` + * @param {number|Array} range + * @param {Array} bound + * @return {number|Array} + */ +const sanitizeRange = (range, bound) => { + if (typeof range === 'number') { + return clamp(Math.abs(range), ...bound); + } + + if (range.length === 1 || range[0] === range[1]) { + return clamp(Math.abs(range[0]), ...bound); + } + + return [ + Math.abs(clamp(range[0], ...bound)), + clamp(Math.abs(range[1]), ...bound), + ]; +}; + +/** + * @param {number} p + * @param {number} q + * @param {number} t + * @return {number} + */ +const hueToRgb = (p, q, t) => { + if (t < 0) { + t += 1; + } else if (t > 1) { + t -= 1; + } + + if (t < 1 / 6) { + return p + ((q - p) * 6 * t); + } + + if (t < 1 / 2) { + return q; + } + + if (t < 2 / 3) { + return p + ((q - p) * ((2 / 3) - t) * 6); + } + + return p; +}; + +/** + * Converts an HSL color to RGB + * @param {number} h Hue + * @param {number} s Saturation + * @param {number} l Lightness + * @return {Array} + */ +const hslToRgb = (h, s, l) => { + let r; + let g; + let b; + + h /= 360; + s /= 100; + l /= 100; + + if (s === 0) { + // achromatic + r = g = b = l; + } else { + const q = l < 0.5 + ? l * (1 + s) + : (l + s) - (l * s); + const p = (2 * l) - q; + + r = hueToRgb(p, q, h + (1 / 3)); + g = hueToRgb(p, q, h); + b = hueToRgb(p, q, h - (1 / 3)); + } + + return [ + Math.round(r * 255), + Math.round(g * 255), + Math.round(b * 255), + ]; +}; + +/** + * Determines whether the RGB color is light or not + * http://www.w3.org/TR/AERT#color-contrast + * @param {number} r Red + * @param {number} g Green + * @param {number} b Blue + * @param {number} differencePoint + * @return {boolean} + */ +const rgbIsLight = (r, g, b, differencePoint) => ((r * 299) + (g * 587) + (b * 114)) / 1000 >= differencePoint; // eslint-disable-line max-len + +/** + * Converts an HSL color to string format + * @param {number} h Hue + * @param {number} s Saturation + * @param {number} l Lightness + * @return {string} + */ +const hslToString = (h, s, l) => `hsl(${h}, ${s}%, ${l}%)`; + +/** + * Converts RGB color to string format + * @param {number} r Red + * @param {number} g Green + * @param {number} b Blue + * @param {string} format Color format + * @return {string} + */ +const rgbFormat = (r, g, b, format) => { + switch (format) { + case 'rgb': + return `rgb(${r}, ${g}, ${b})`; + case 'hex': + default: + return `#${pad2(r.toString(16))}${pad2(g.toString(16))}${pad2(b.toString(16))}`; + } +}; + +/** + * Generate unique color from `value` + * @param {string|number} value + * @param {Object} [options={}] + * @param {string} [options.format='hex'] + * The color format, it can be one of `hex`, `rgb` or `hsl` + * @param {number|Array} [options.saturation=[50, 55]] + * Determines the color saturation, it can be a number or a range between 0 and 100 + * @param {number|Array} [options.lightness=[50, 60]] + * Determines the color lightness, it can be a number or a range between 0 and 100 + * @param {number} [options.differencePoint=130] + * Determines the color brightness difference point. We use it to obtain the `isLight` value + * in the output, it can be a number between 0 and 255 + * @return {Object} + * @example + * + * ```js + * uniqolor('Hello world!') + * // { color: "#5cc653", isLight: true } + * + * uniqolor('Hello world!', { format: 'rgb' }) + * // { color: "rgb(92, 198, 83)", isLight: true } + * + * uniqolor('Hello world!', { + * saturation: 30, + * lightness: [70, 80], + * }) + * // { color: "#afd2ac", isLight: true } + * + * uniqolor('Hello world!', { + * saturation: 30, + * lightness: [70, 80], + * differencePoint: 200, + * }) + * // { color: "#afd2ac", isLight: false } + * ``` + */ +const uniqolor = (value, { + format = 'hex', + saturation = [50, 55], + lightness = [50, 60], + differencePoint = 130, +} = {}) => { + const hash = Math.abs(hashCode(String(value))); + const h = boundHashCode(hash, [0, 360]); + const s = boundHashCode(hash, sanitizeRange(saturation, SATURATION_BOUND)); + const l = boundHashCode(hash, sanitizeRange(lightness, LIGHTNESS_BOUND)); + const [r, g, b] = hslToRgb(h, s, l); + + return { + color: format === 'hsl' + ? hslToString(h, s, l) + : rgbFormat(r, g, b, format), + isLight: rgbIsLight(r, g, b, differencePoint), + }; +}; + +/** + * Generate random color + * @param {Object} [options={}] + * @param {string} [options.format='hex'] + * The color format, it can be one of `hex`, `rgb` or `hsl` + * @param {number|Array} [options.saturation=[50, 55]] + * Determines the color saturation, it can be a number or a range between 0 and 100 + * @param {number|Array} [options.lightness=[50, 60]] + * Determines the color lightness, it can be a number or a range between 0 and 100 + * @param {number} [options.differencePoint=130] + * Determines the color brightness difference point. We use it to obtain the `isLight` value + * in the output, it can be a number between 0 and 255 + * @param {Array} [options.excludeHue] + * Exclude certain hue ranges. For example to exclude red color range: `[[0, 20], [325, 359]]` + * @return {Object} + * @example + * + * ```js + * // Generate random color + * uniqolor.random() + * // { color: "#644cc8", isLight: false } + * + * // Generate a random color with HSL format + * uniqolor.random({ format: 'hsl' }) + * // { color: "hsl(89, 55%, 60%)", isLight: true } + * + * // Generate a random color in specific saturation and lightness + * uniqolor.random({ + * saturation: 80, + * lightness: [70, 80], + * }) + * // { color: "#c7b9da", isLight: true } + * + * // Generate a random color but exclude red color range + * uniqolor.random({ + * excludeHue: [[0, 20], [325, 359]], + * }) + * // {color: '#53caab', isLight: true} + * ``` + */ +uniqolor.random = ({ + format = 'hex', + saturation = [50, 55], + lightness = [50, 60], + differencePoint = 130, + excludeHue, +} = {}) => { + saturation = sanitizeRange(saturation, SATURATION_BOUND); + lightness = sanitizeRange(lightness, LIGHTNESS_BOUND); + + const h = excludeHue ? randomExclude(0, 359, excludeHue) : random(0, 359); + const s = typeof saturation === 'number' + ? saturation + : random(...saturation); + const l = typeof lightness === 'number' + ? lightness + : random(...lightness); + const [r, g, b] = hslToRgb(h, s, l); + + return { + color: format === 'hsl' + ? hslToString(h, s, l) + : rgbFormat(r, g, b, format), + isLight: rgbIsLight(r, g, b, differencePoint), + }; +}; + +export default uniqolor; From 133caa58d2b3ab43f3f54946158eefdfae9a3aa4 Mon Sep 17 00:00:00 2001 From: RossAscends <124905043+RossAscends@users.noreply.github.com> Date: Mon, 15 May 2023 01:45:36 +0900 Subject: [PATCH 15/64] add in process files for OAI tokenization merge --- public/index.html | 20 +++++++++++++++++--- public/script.js | 30 ++++++++++++++++++++++++------ public/scripts/openai.js | 2 +- public/style.css | 12 ++++++++++++ 4 files changed, 54 insertions(+), 10 deletions(-) diff --git a/public/index.html b/public/index.html index e74b768d6..a120d3c67 100644 --- a/public/index.html +++ b/public/index.html @@ -366,6 +366,15 @@
+
+ +
+ Display a breakdown of the tokens used in the request. +
+
Context Size (tokens) @@ -1176,7 +1185,7 @@
-
+

Advanced Formatting @@ -2022,7 +2031,7 @@

-

- Advanced Defininitions +

- Advanced Definitions
@@ -2047,7 +2056,7 @@

Talkativeness

-
How often the chracter speaks in  group chats! +
How often the character speaks in  group chats!
@@ -2422,6 +2431,11 @@
+
diff --git a/public/script.js b/public/script.js index fd13ea4d7..3e925dcab 100644 --- a/public/script.js +++ b/public/script.js @@ -1157,7 +1157,7 @@ function addOneMessage(mes, { type = "normal", insertAfter = null, scroll = true } else if (params.isUser !== true) { //hide all when prompt cache is empty console.log('saw empty prompt cache, hiding all prompt buttons'); $(".mes_prompt").hide(); - console.log(itemizedPrompts); + //console.log(itemizedPrompts); } else { console.log('skipping prompt data for User Message'); } newMessage.find('.avatar img').on('error', function () { @@ -2250,6 +2250,7 @@ async function Generate(type, { automatic_trigger, force_name2, resolve, reject, breakdown_bar.empty(); const total = Object.values(counts).reduce((acc, val) => acc + val, 0); + console.log(`oai start tokens: ${Object.entries(counts)[0][1]}`); thisPromptBits.push({ oaiStartTokens: Object.entries(counts)[0][1], @@ -2350,7 +2351,7 @@ async function Generate(type, { automatic_trigger, force_name2, resolve, reject, thisPromptBits = additionalPromptStuff; - //console.log(thisPromptBits); + console.log(thisPromptBits); itemizedPrompts.push(thisPromptBits); //console.log(`pushed prompt bits to itemizedPrompts array. Length is now: ${itemizedPrompts.length}`); @@ -2361,6 +2362,23 @@ async function Generate(type, { automatic_trigger, force_name2, resolve, reject, hideSwipeButtons(); let getMessage = await streamingProcessor.generate(); + // Cohee: Basically a dead-end code... (disabled by isStreamingEnabled) + // I wasn't able to get multigen working with real streaming + // consistently without screwing the interim prompting + if (isMultigenEnabled()) { + tokens_already_generated += this_amount_gen; // add new gen amt to any prev gen counter.. + message_already_generated += getMessage; + promptBias = ''; + if (!streamingProcessor.isStopped && shouldContinueMultigen(getMessage, isImpersonate)) { + streamingProcessor.isFinished = false; + runGenerate(getMessage); + console.log('returning to make generate again'); + return; + } + + getMessage = message_already_generated; + } + if (streamingProcessor && !streamingProcessor.isStopped && streamingProcessor.isFinished) { streamingProcessor.onFinishStreaming(streamingProcessor.messageId, getMessage); streamingProcessor = null; @@ -2524,7 +2542,6 @@ function promptItemize(itemizedPrompts, requestedMesId) { var worldInfoStringTokens = getTokenCount(itemizedPrompts[thisPromptSet].worldInfoString); var thisPrompt_max_context = itemizedPrompts[thisPromptSet].this_max_context; var thisPrompt_padding = itemizedPrompts[thisPromptSet].padding; - console.log(`"${itemizedPrompts[thisPromptSet].promptBias}"`); var promptBiasTokens = getTokenCount(itemizedPrompts[thisPromptSet].promptBias); var this_main_api = itemizedPrompts[thisPromptSet].main_api; @@ -2533,12 +2550,12 @@ function promptItemize(itemizedPrompts, requestedMesId) { //console.log('-- Counting OAI Tokens'); var finalPromptTokens = itemizedPrompts[thisPromptSet].oaiTotalTokens; var oaiStartTokens = itemizedPrompts[thisPromptSet].oaiStartTokens; + console.log(oaiStartTokens); var oaiPromptTokens = itemizedPrompts[thisPromptSet].oaiPromptTokens; var ActualChatHistoryTokens = itemizedPrompts[thisPromptSet].oaiConversationTokens; var examplesStringTokens = itemizedPrompts[thisPromptSet].oaiExamplesTokens; var oaiBiasTokens = itemizedPrompts[thisPromptSet].oaiBiasTokens; var oaiJailbreakTokens = itemizedPrompts[thisPromptSet].oaiJailbreakTokens; - var oaiStartTokens = itemizedPrompts[thisPromptSet].oaiStartTokens; var oaiNudgeTokens = itemizedPrompts[thisPromptSet].oaiNudgeTokens; var oaiImpersonateTokens = itemizedPrompts[thisPromptSet].oaiImpersonateTokens; @@ -2566,6 +2583,7 @@ function promptItemize(itemizedPrompts, requestedMesId) { if (this_main_api == 'openai') { //console.log('-- applying % on OAI tokens'); var oaiStartTokensPercentage = ((oaiStartTokens / (finalPromptTokens)) * 100).toFixed(2); + console.log(oaiStartTokensPercentage); var storyStringTokensPercentage = ((oaiPromptTokens / (finalPromptTokens)) * 100).toFixed(2); var ActualChatHistoryTokensPercentage = ((ActualChatHistoryTokens / (finalPromptTokens)) * 100).toFixed(2); var promptBiasTokensPercentage = ((oaiBiasTokens / (finalPromptTokens)) * 100).toFixed(2); @@ -3504,10 +3522,10 @@ function changeMainAPI() { // Hide common settings for OpenAI if (selectedVal == "openai") { $("#common-gen-settings-block").css("display", "none"); - $("#token_breakdown").css("display", "flex"); + //$("#token_breakdown").css("display", "flex"); } else { $("#common-gen-settings-block").css("display", "block"); - $("#token_breakdown").css("display", "none"); + //$("#token_breakdown").css("display", "none"); } // Hide amount gen for poe if (selectedVal == "poe") { diff --git a/public/scripts/openai.js b/public/scripts/openai.js index 88ad3d25f..d6b91840f 100644 --- a/public/scripts/openai.js +++ b/public/scripts/openai.js @@ -648,7 +648,7 @@ class TokenHandler { } count(messages, full, type) { - console.log(messages); + //console.log(messages); const token_count = this.countTokenFn(messages, full); this.counts[type] += token_count; diff --git a/public/style.css b/public/style.css index e3f0a1442..ab0fa88bd 100644 --- a/public/style.css +++ b/public/style.css @@ -385,6 +385,18 @@ code { justify-content: center; } +#token_breakdown div { + display: flex; + width: 100%; + justify-content: center; +} + +.token_breakdown_segment { + min-width: 40px !important; + border: solid 2px; + border-radius: 5px; +} + #loading_mes { display: none; From 6a94bb5063a90ed6b68eac545fb91a4a5b5b8208 Mon Sep 17 00:00:00 2001 From: SillyLossy Date: Sun, 14 May 2023 19:47:32 +0300 Subject: [PATCH 16/64] Old anchors removed --- public/index.html | 43 +++++++------------ public/notes/content.md | 24 +++-------- public/script.js | 93 ++-------------------------------------- public/scripts/openai.js | 16 +------ 4 files changed, 27 insertions(+), 149 deletions(-) diff --git a/public/index.html b/public/index.html index 78422fd89..2d8aab041 100644 --- a/public/index.html +++ b/public/index.html @@ -1348,23 +1348,12 @@

- Anchors Order - - ? - + Anchors

- -
- - -
+

+ This feature is obsolete and has been removed. + Something else is coming soon in its place! +

@@ -1925,6 +1914,17 @@
+
+
+ Current Members +
+
+
+
+
+
+
+
Add Members @@ -1940,17 +1940,6 @@
-
-
- Current Members -
-
-
-
-
-
-
-
diff --git a/public/notes/content.md b/public/notes/content.md index edbf848d2..f0920c276 100644 --- a/public/notes/content.md +++ b/public/notes/content.md @@ -393,26 +393,9 @@ _Lost API keys can't be restored! Make sure to keep it safe!_ ## Anchors -Anchors are used to increase the length of messages. -There are two types of anchors: _Character Anchor_ and _Style Anchor_. +This feature is considered obsolete and has been removed. -_Character Anchor_ - affects the character played by the AI by motivating it to write longer messages. - -Looks like: `[Elaborate speaker]` - -_Style Anchor_ - affects the entire AI model, motivating the AI to write longer messages even when it is not acting as the character. - -Looks like: `[Writing style: very long messages]` - -*** - -Anchors Order sets the location of anchors in the prompt, the first anchor in the order is much further back in the context and thus has less influence than second. - -The second anchor is only turned on after 8-12 messages, because when the chat still only has a few messages, the first anchor creates enough effect on its own. - -Sometimes an AI model may not perceive anchors correctly or the AI model already generates sufficiently long messages. For these cases, you can disable the anchors by unchecking their respective boxes. - -_When using Pygmalion models these anchors are automatically disabled, since Pygmalion already generates long enough messages._ +The use of the Author's Note extension is now a preferred way to add prompt injections of variable depth. ## Instruct Mode @@ -594,6 +577,8 @@ Characters are drafted based on the order they are presented in group members li ## Multigen +*This feature provides a pseudo-streaming functionality which conflicts with token streaming. When Multigen is enabled and generation API supports streaming, only Multigen streaming will be used.* + SillyTavern tries to create faster and longer responses by chaining the generation using smaller batches. ### Default settings: @@ -614,6 +599,7 @@ Next batches = 30 tokens 2. Character starts speaking for You. 3. <|endoftext|> token reached. 4. No text generated. +5. Stop sequence generated. (Instruct mode only) ## User Settings diff --git a/public/script.js b/public/script.js index 64bdfa4a9..f864a300f 100644 --- a/public/script.js +++ b/public/script.js @@ -531,10 +531,6 @@ var message_already_generated = ""; var cycle_count_generation = 0; var swipes = true; - -let anchor_order = 0; -let style_anchor = true; -let character_anchor = true; let extension_prompts = {}; var main_api;// = "kobold"; @@ -1683,29 +1679,6 @@ async function Generate(type, { automatic_trigger, force_name2, resolve, reject, // bias from the latest message is top priority// promptBias = messageBias ?? promptBias ?? ''; - // Compute anchors - const topAnchorDepth = 8; - const bottomAnchorThreshold = 8; - let anchorTop = ''; - let anchorBottom = ''; - if (!is_pygmalion) { - console.log('saw not pyg'); - - let postAnchorChar = character_anchor ? name2 + " Elaborate speaker" : ""; - let postAnchorStyle = style_anchor ? "Writing style: very long messages" : ""; - if (anchor_order === 0) { - anchorTop = postAnchorChar; - anchorBottom = postAnchorStyle; - } else { // anchor_order === 1 - anchorTop = postAnchorStyle; - anchorBottom = postAnchorChar; - } - - if (anchorBottom) { - anchorBottom = "[" + anchorBottom + "]"; - } - } - //********************************* //PRE FORMATING STRING //********************************* @@ -1761,6 +1734,7 @@ async function Generate(type, { automatic_trigger, force_name2, resolve, reject, console.log(`Core/all messages: ${coreChat.length}/${chat.length}`); if (main_api === 'openai') { + message_already_generated = ''; // OpenAI doesn't have multigen setOpenAIMessages(coreChat, quiet_prompt); setOpenAIMessageExamples(mesExamplesArray); } @@ -1773,11 +1747,7 @@ async function Generate(type, { automatic_trigger, force_name2, resolve, reject, storyString += appendToStoryString(Scenario, power_user.disable_scenario_formatting ? '' : 'Scenario: '); } else { storyString += appendToStoryString(charDescription, ''); - - if (coreChat.length < topAnchorDepth) { - storyString += appendToStoryString(charPersonality, power_user.disable_personality_formatting ? '' : name2 + "'s personality: "); - } - + storyString += appendToStoryString(charPersonality, power_user.disable_personality_formatting ? '' : name2 + "'s personality: "); storyString += appendToStoryString(Scenario, power_user.disable_scenario_formatting ? '' : 'Circumstances and context of the dialogue: '); } @@ -1886,9 +1856,6 @@ async function Generate(type, { automatic_trigger, force_name2, resolve, reject, storyString, examplesString, chatString, - anchorTop, - anchorBottom, - charPersonality, promptBias, allAnchors, quiet_prompt, @@ -1950,7 +1917,7 @@ async function Generate(type, { automatic_trigger, force_name2, resolve, reject, generatedPromtCache += cycleGenerationPromt; if (generatedPromtCache.length == 0) { if (main_api === 'openai') { - generateOpenAIPromptCache(charPersonality, topAnchorDepth, anchorTop, bottomAnchorThreshold, anchorBottom); + generateOpenAIPromptCache(); } console.log('generating prompt'); @@ -1973,21 +1940,6 @@ async function Generate(type, { automatic_trigger, force_name2, resolve, reject, //item = item.substr(0, item.length - 1); } } - if (i === arrMes.length - topAnchorDepth && !is_pygmalion) { - //chatString = chatString.substr(0,chatString.length-1); - //anchorAndPersonality = "[Genre: roleplay chat][Tone: very long messages with descriptions]"; - let personalityAndAnchor = [charPersonality, anchorTop].filter(x => x).join(' '); - if (personalityAndAnchor) { - item += "[" + personalityAndAnchor + "]\n"; - } - } - if (i === arrMes.length - 1 && coreChat.length > bottomAnchorThreshold && item.trim().startsWith(name1 + ":") && !is_pygmalion) {//For add anchor in end - //chatString+=postAnchor+"\n";//"[Writing style: very long messages]\n"; - if (anchorBottom) { - item = item.replace(/\n$/, " "); - item += anchorBottom + "\n"; - } - } if (is_pygmalion && !isInstruct) { if (i === arrMes.length - 1 && item.trim().startsWith(name1 + ":")) {//for add name2 when user sent item = item + name2 + ":"; @@ -2075,9 +2027,6 @@ async function Generate(type, { automatic_trigger, force_name2, resolve, reject, storyString, mesExmString, mesSendString, - anchorTop, - anchorBottom, - charPersonality, generatedPromtCache, promptBias, allAnchors, @@ -3459,30 +3408,15 @@ async function getSettings(type) { `#settings_perset_novel option[value=${novelai_setting_names[nai_settings.preset_settings_novel]}]` ).attr("selected", "true"); - //Load AI model config settings (temp, context length, anchors, and anchor order) + //Load AI model config settings amount_gen = settings.amount_gen; if (settings.max_context !== undefined) max_context = parseInt(settings.max_context); - if (settings.anchor_order !== undefined) - anchor_order = parseInt(settings.anchor_order); - if (settings.style_anchor !== undefined) - style_anchor = !!settings.style_anchor; - if (settings.character_anchor !== undefined) - character_anchor = !!settings.character_anchor; - - $("#style_anchor").prop("checked", style_anchor); - $("#character_anchor").prop("checked", character_anchor); - $("#anchor_order option[value=" + anchor_order + "]").attr( - "selected", - "true" - ); swipes = settings.swipes !== undefined ? !!settings.swipes : true; // enable swipes by default $('#swipes-checkbox').prop('checked', swipes); /// swipecode - //console.log('getSettings -- swipes = ' + swipes + '. toggling box'); hideSwipeButtons(); - //console.log('getsettings calling showswipebtns'); showSwipeButtons(); // Kobold @@ -3580,9 +3514,6 @@ async function saveSettings(type) { user_avatar: user_avatar, amount_gen: amount_gen, max_context: max_context, - anchor_order: anchor_order, - style_anchor: style_anchor, - character_anchor: character_anchor, main_api: main_api, world_info: world_info, world_info_depth: world_info_depth, @@ -5519,17 +5450,6 @@ $(document).ready(function () { ////////////////////////////////////////////////////////////// - - $("#style_anchor").change(function () { - style_anchor = !!$("#style_anchor").prop("checked"); - saveSettingsDebounced(); - }); - - $("#character_anchor").change(function () { - character_anchor = !!$("#character_anchor").prop("checked"); - saveSettingsDebounced(); - }); - $("#select_chat_cross").click(function () { $("#shadow_select_chat_popup").transition({ opacity: 0, @@ -5809,11 +5729,6 @@ $(document).ready(function () { is_api_button_press_novel = true; }); - $("#anchor_order").change(function () { - anchor_order = parseInt($("#anchor_order").find(":selected").val()); - saveSettingsDebounced(); - }); - //**************************CHARACTER IMPORT EXPORT*************************// $("#character_import_button").click(function () { $("#character_import_file").click(); diff --git a/public/scripts/openai.js b/public/scripts/openai.js index e8053b44a..d4d206456 100644 --- a/public/scripts/openai.js +++ b/public/scripts/openai.js @@ -205,22 +205,10 @@ function setOpenAIMessageExamples(mesExamplesArray) { } } -function generateOpenAIPromptCache(charPersonality, topAnchorDepth, anchorTop, bottomAnchorThreshold, anchorBottom) { +function generateOpenAIPromptCache() { openai_msgs = openai_msgs.reverse(); - openai_msgs.forEach(function (msg, i, arr) {//For added anchors and others + openai_msgs.forEach(function (msg, i, arr) { let item = msg["content"]; - if (i === openai_msgs.length - topAnchorDepth) { - let personalityAndAnchor = [charPersonality, anchorTop].filter(x => x).join(' '); - if (personalityAndAnchor) { - item = `[${name2} is ${personalityAndAnchor}]\n${item}`; - } - } - if (i === openai_msgs.length - 1 && openai_msgs.length > bottomAnchorThreshold && msg.role === "user") {//For add anchor in end - if (anchorBottom) { - item = anchorBottom + "\n" + item; - } - } - msg["content"] = item; openai_msgs[i] = msg; }); From afd2e810a8abac433fc5f14253ef9812cfdbf95c Mon Sep 17 00:00:00 2001 From: SillyLossy Date: Sun, 14 May 2023 20:17:14 +0300 Subject: [PATCH 17/64] Fix OAI tokenization --- public/script.js | 24 ++++++++++++++---------- 1 file changed, 14 insertions(+), 10 deletions(-) diff --git a/public/script.js b/public/script.js index ac35d41b2..82b2e5de9 100644 --- a/public/script.js +++ b/public/script.js @@ -408,18 +408,21 @@ async function getClientVersion() { } } -function getTokenCount(str, padding = 0) { +function getTokenCount(str, padding = undefined) { let tokenizerType = power_user.tokenizer; if (main_api === 'openai') { - // For main prompt building - if (padding == power_user.token_padding) { + if (padding === power_user.token_padding) { + // For main "shadow" prompt building tokenizerType = tokenizers.NONE; - // For extensions and WI } else { + // For extensions and WI return getTokenCountOpenAI(str); } + } + if (padding === undefined) { + padding = 0; } switch (tokenizerType) { @@ -2190,7 +2193,6 @@ async function Generate(type, { automatic_trigger, force_name2, resolve, reject, if (main_api == 'openai') { let [prompt, counts] = await prepareOpenAIMessages(name2, storyString, worldInfoBefore, worldInfoAfter, afterScenarioAnchor, promptBias, type); - // counts will return false if the user has not enabled the token breakdown feature if (counts) { @@ -2198,7 +2200,7 @@ async function Generate(type, { automatic_trigger, force_name2, resolve, reject, const breakdown_bar = $('#token_breakdown div:first-child'); breakdown_bar.empty(); - const total = Object.values(counts).reduce((acc, val) => acc + val, 0); + const total = Object.values(counts).filter(x => !Number.isNaN(x)).reduce((acc, val) => acc + val, 0); console.log(`oai start tokens: ${Object.entries(counts)[0][1]}`); thisPromptBits.push({ @@ -2507,8 +2509,10 @@ function promptItemize(itemizedPrompts, requestedMesId) { var oaiJailbreakTokens = itemizedPrompts[thisPromptSet].oaiJailbreakTokens; var oaiNudgeTokens = itemizedPrompts[thisPromptSet].oaiNudgeTokens; var oaiImpersonateTokens = itemizedPrompts[thisPromptSet].oaiImpersonateTokens; - - + // OAI doesn't use padding + thisPrompt_padding = 0; + // Max context size - max completion tokens + thisPrompt_max_context = (oai_settings.openai_max_context - oai_settings.openai_max_tokens); } else { //for non-OAI APIs //console.log('-- Counting non-OAI Tokens'); @@ -2538,7 +2542,7 @@ function promptItemize(itemizedPrompts, requestedMesId) { var promptBiasTokensPercentage = ((oaiBiasTokens / (finalPromptTokens)) * 100).toFixed(2); var worldInfoStringTokensPercentage = ((worldInfoStringTokens / (finalPromptTokens)) * 100).toFixed(2); var allAnchorsTokensPercentage = ((allAnchorsTokens / (finalPromptTokens)) * 100).toFixed(2); - var selectedTokenizer = $("#tokenizer").find(':selected').text(); + var selectedTokenizer = `tiktoken (${oai_settings.openai_model})`; } else { //console.log('-- applying % on non-OAI tokens'); @@ -2635,7 +2639,7 @@ function promptItemize(itemizedPrompts, requestedMesId) {
Total Tokens in Prompt:
${finalPromptTokens}
-
Max Context:
${thisPrompt_max_context}
+
Max Context (Context Size - Response Length):
${thisPrompt_max_context}
- Padding:
${thisPrompt_padding}
From 291e23e2f54bd172d47cf6a2ad2bfe6c3385b954 Mon Sep 17 00:00:00 2001 From: RossAscends <124905043+RossAscends@users.noreply.github.com> Date: Mon, 15 May 2023 02:28:17 +0900 Subject: [PATCH 18/64] update OAI itemization --- public/script.js | 57 +++++++++++++++++++++++++++++++++++------------- 1 file changed, 42 insertions(+), 15 deletions(-) diff --git a/public/script.js b/public/script.js index ac35d41b2..baff9a7a4 100644 --- a/public/script.js +++ b/public/script.js @@ -1139,7 +1139,7 @@ function addOneMessage(mes, { type = "normal", insertAfter = null, scroll = true //shows or hides the Prompt display button let mesIdToFind = Number(newMessage.attr('mesId')); if (itemizedPrompts.length !== 0) { - console.log(`itemizedPrompt.length = ${itemizedPrompts.length}`) + //console.log(`itemizedPrompt.length = ${itemizedPrompts.length}`) for (var i = 0; i < itemizedPrompts.length; i++) { if (itemizedPrompts[i].mesId === mesIdToFind) { newMessage.find(".mes_prompt").show(); @@ -2300,7 +2300,7 @@ async function Generate(type, { automatic_trigger, force_name2, resolve, reject, thisPromptBits = additionalPromptStuff; - console.log(thisPromptBits); + //console.log(thisPromptBits); itemizedPrompts.push(thisPromptBits); //console.log(`pushed prompt bits to itemizedPrompts array. Length is now: ${itemizedPrompts.length}`); @@ -2497,9 +2497,9 @@ function promptItemize(itemizedPrompts, requestedMesId) { if (this_main_api == 'openai') { //for OAI API //console.log('-- Counting OAI Tokens'); - var finalPromptTokens = itemizedPrompts[thisPromptSet].oaiTotalTokens; + + //var finalPromptTokens = itemizedPrompts[thisPromptSet].oaiTotalTokens; var oaiStartTokens = itemizedPrompts[thisPromptSet].oaiStartTokens; - console.log(oaiStartTokens); var oaiPromptTokens = itemizedPrompts[thisPromptSet].oaiPromptTokens; var ActualChatHistoryTokens = itemizedPrompts[thisPromptSet].oaiConversationTokens; var examplesStringTokens = itemizedPrompts[thisPromptSet].oaiExamplesTokens; @@ -2507,6 +2507,18 @@ function promptItemize(itemizedPrompts, requestedMesId) { var oaiJailbreakTokens = itemizedPrompts[thisPromptSet].oaiJailbreakTokens; var oaiNudgeTokens = itemizedPrompts[thisPromptSet].oaiNudgeTokens; var oaiImpersonateTokens = itemizedPrompts[thisPromptSet].oaiImpersonateTokens; + var finalPromptTokens = + oaiBiasTokens + + oaiImpersonateTokens + + oaiJailbreakTokens + + oaiNudgeTokens + + oaiPromptTokens + + ActualChatHistoryTokens + + charDescriptionTokens + + charPersonalityTokens + + allAnchorsTokens + + worldInfoStringTokens + + examplesStringTokens; } else { @@ -2532,13 +2544,14 @@ function promptItemize(itemizedPrompts, requestedMesId) { if (this_main_api == 'openai') { //console.log('-- applying % on OAI tokens'); var oaiStartTokensPercentage = ((oaiStartTokens / (finalPromptTokens)) * 100).toFixed(2); - console.log(oaiStartTokensPercentage); var storyStringTokensPercentage = ((oaiPromptTokens / (finalPromptTokens)) * 100).toFixed(2); var ActualChatHistoryTokensPercentage = ((ActualChatHistoryTokens / (finalPromptTokens)) * 100).toFixed(2); var promptBiasTokensPercentage = ((oaiBiasTokens / (finalPromptTokens)) * 100).toFixed(2); var worldInfoStringTokensPercentage = ((worldInfoStringTokens / (finalPromptTokens)) * 100).toFixed(2); var allAnchorsTokensPercentage = ((allAnchorsTokens / (finalPromptTokens)) * 100).toFixed(2); var selectedTokenizer = $("#tokenizer").find(':selected').text(); + var oaiSystemTokens = oaiStartTokens + oaiImpersonateTokens + oaiNudgeTokens + oaiJailbreakTokens; + var oaiSystemTokensPercentage = ((oaiSystemTokens / (finalPromptTokens)) * 100).toFixed(2); } else { //console.log('-- applying % on non-OAI tokens'); @@ -2555,7 +2568,7 @@ function promptItemize(itemizedPrompts, requestedMesId) { callPopup( `

Prompt Itemization

- Tokenizer: ${selectedTokenizer}
+ Tokenizer: TikToken
API Used: ${this_main_api}
Only the white numbers really matter. All numbers are estimates. @@ -2565,7 +2578,7 @@ function promptItemize(itemizedPrompts, requestedMesId) {
-
+
@@ -2575,8 +2588,28 @@ function promptItemize(itemizedPrompts, requestedMesId) {
-
Chat Startup:
-
${oaiStartTokens}
+
System Info:
+
${oaiSystemTokens}
+
+
+
-- Chat Start:
+
${oaiStartTokens}
+
+
+
-- Jailbreak:
+
${oaiJailbreakTokens}
+
+
+
-- NSFW:
+
${oaiSystemTokens}
+
+
+
-- Nudge:
+
${oaiNudgeTokens}
+
+
+
-- Impersonate:
+
${oaiImpersonateTokens}
@@ -2637,12 +2670,6 @@ function promptItemize(itemizedPrompts, requestedMesId) {
Max Context:
${thisPrompt_max_context}
-
-
- Padding:
${thisPrompt_padding}
-
-
-
Actual Max Context Allowed:
${thisPrompt_max_context - thisPrompt_padding}
-

From e8eb1ac36b0472d22a5b10cb15aaa6aa04346e1a Mon Sep 17 00:00:00 2001 From: RossAscends <124905043+RossAscends@users.noreply.github.com> Date: Mon, 15 May 2023 02:36:52 +0900 Subject: [PATCH 19/64] fix items lost on merge --- public/script.js | 8 +++----- 1 file changed, 3 insertions(+), 5 deletions(-) diff --git a/public/script.js b/public/script.js index 270c8de39..7d15cd23c 100644 --- a/public/script.js +++ b/public/script.js @@ -2201,7 +2201,6 @@ async function Generate(type, { automatic_trigger, force_name2, resolve, reject, breakdown_bar.empty(); const total = Object.values(counts).filter(x => !Number.isNaN(x)).reduce((acc, val) => acc + val, 0); - console.log(`oai start tokens: ${Object.entries(counts)[0][1]}`); thisPromptBits.push({ oaiStartTokens: Object.entries(counts)[0][1], @@ -2215,9 +2214,6 @@ async function Generate(type, { automatic_trigger, force_name2, resolve, reject, oaiTotalTokens: total, }) - - console.log(`added OAI prompt bits to array`); - Object.entries(counts).forEach(([type, value]) => { if (value === 0) { return; @@ -2554,6 +2550,8 @@ function promptItemize(itemizedPrompts, requestedMesId) { var worldInfoStringTokensPercentage = ((worldInfoStringTokens / (finalPromptTokens)) * 100).toFixed(2); var allAnchorsTokensPercentage = ((allAnchorsTokens / (finalPromptTokens)) * 100).toFixed(2); var selectedTokenizer = `tiktoken (${oai_settings.openai_model})`; + var oaiSystemTokens = oaiImpersonateTokens + oaiJailbreakTokens + oaiNudgeTokens + oaiStartTokens; + var oaiSystemTokensPercentage = ((oaiSystemTokens / (finalPromptTokens)) * 100).toFixed(2); } else { //console.log('-- applying % on non-OAI tokens'); @@ -2603,7 +2601,7 @@ function promptItemize(itemizedPrompts, requestedMesId) {
-- NSFW:
-
${oaiSystemTokens}
+
??
-- Nudge:
From dbab6fc26aa019424968981aa89c631e4687fac8 Mon Sep 17 00:00:00 2001 From: SillyLossy Date: Sun, 14 May 2023 22:37:02 +0300 Subject: [PATCH 20/64] Add a button to narrate only one message with TTS --- public/index.html | 1 + public/scripts/extensions/tts/index.js | 44 ++++++++++++++++++++++++-- public/style.css | 13 ++++++++ 3 files changed, 56 insertions(+), 2 deletions(-) diff --git a/public/index.html b/public/index.html index 2a37d88c7..e68c9fa14 100644 --- a/public/index.html +++ b/public/index.html @@ -2325,6 +2325,7 @@ ${characterName}
+
diff --git a/public/scripts/extensions/tts/index.js b/public/scripts/extensions/tts/index.js index 4eeded46c..31c23ce8b 100644 --- a/public/scripts/extensions/tts/index.js +++ b/public/scripts/extensions/tts/index.js @@ -1,4 +1,4 @@ -import { callPopup, isMultigenEnabled, is_send_press, saveSettingsDebounced } from '../../../script.js' +import { callPopup, cancelTtsPlay, isMultigenEnabled, is_send_press, saveSettingsDebounced } from '../../../script.js' import { extension_settings, getContext } from '../../extensions.js' import { getStringHash } from '../../utils.js' import { ElevenLabsTtsProvider } from './elevenlabs.js' @@ -24,9 +24,47 @@ let ttsProviders = { let ttsProvider let ttsProviderName +async function onNarrateOneMessage() { + cancelTtsPlay(); + const context = getContext(); + const id = $(this).closest('.mes').attr('mesid'); + const message = context.chat[id]; + + if (!message) { + return; + } + + currentTtsJob = null; + audioElement.pause(); + audioElement.currentTime = 0; + ttsJobQueue.splice(0, ttsJobQueue.length); + audioJobQueue.splice(0, audioJobQueue.length); + ttsJobQueue.push(message); + moduleWorker(); +} + +let isWorkerBusy = false; + +async function moduleWorkerWrapper() { + // Don't touch me I'm busy... + if (isWorkerBusy) { + return; + } + + // I'm free. Let's update! + try { + isWorkerBusy = true; + await moduleWorker(); + } + finally { + isWorkerBusy = false; + } +} + async function moduleWorker() { // Primarily determinign when to add new chat to the TTS queue const enabled = $('#tts_enabled').is(':checked') + $('body').toggleClass('tts', enabled); if (!enabled) { return } @@ -296,6 +334,7 @@ function loadSettings() { ) $('#tts_narrate_dialogues').prop('checked', extension_settings.tts.narrate_dialogues_only) $('#tts_narrate_quoted').prop('checked', extension_settings.tts.narrate_quoted_only) + $('body').toggleClass('tts', extension_settings.tts.enabled); } const defaultSettings = { @@ -507,10 +546,11 @@ $(document).ready(function () { $('#tts_provider').append($("
+ +
-
+
@@ -2092,10 +2094,11 @@
-   +
+
@@ -2184,14 +2187,14 @@
diff --git a/public/scripts/RossAscends-mods.js b/public/scripts/RossAscends-mods.js index b17729753..a6e5d228a 100644 --- a/public/scripts/RossAscends-mods.js +++ b/public/scripts/RossAscends-mods.js @@ -438,6 +438,7 @@ dragElement(document.getElementById("sheld")); dragElement(document.getElementById("left-nav-panel")); dragElement(document.getElementById("right-nav-panel")); dragElement(document.getElementById("avatar_zoom_popup")); +dragElement(document.getElementById("world_popup")); diff --git a/public/style.css b/public/style.css index 64cb5a345..e74e8403f 100644 --- a/public/style.css +++ b/public/style.css @@ -1549,6 +1549,7 @@ input[type=search]:focus::-webkit-search-cancel-button { display: flex; align-items: center; justify-content: center; + text-align: center; } .avatar_div .menu_button, @@ -1672,7 +1673,9 @@ input[type=search]:focus::-webkit-search-cancel-button { background-color: var(--SmartThemeBlurTintColor); backdrop-filter: blur(calc(var(--SmartThemeBlurStrength) * 2)); max-width: var(--sheldWidth); - height: calc(100% - 40px); + max-height: calc(100% - 100px); + min-height: 100px; + min-width: 100px; position: absolute; margin-left: auto; margin-right: auto; @@ -1680,18 +1683,19 @@ input[type=search]:focus::-webkit-search-cancel-button { right: 0; top: 40px; box-shadow: 0 0 2px rgba(0, 0, 0, 0.5); - padding: 4px; + padding: 10px; flex-direction: column; z-index: 3010; border-radius: 0 0 20px 20px; + overflow-y: auto; } #world_popup_bottom_holder { - padding: 0.5rem 0; - margin: 0 18px; + /* padding: 0.5rem 0; + margin: 0 18px; */ display: flex; flex-direction: row; - justify-content: flex-end; + justify-content: space-evenly; align-items: center; } @@ -1728,7 +1732,7 @@ input[type=search]:focus::-webkit-search-cancel-button { display: flex; flex-direction: row; align-items: center; - margin-left: 18px; + /* margin-left: 18px; */ } #world_popup_header h3 { @@ -1825,7 +1829,7 @@ input[type=search]:focus::-webkit-search-cancel-button { .world_entry_form_control textarea { height: auto; - width: auto; + /* width: auto; */ margin-top: 0; } @@ -1833,6 +1837,7 @@ input[type=search]:focus::-webkit-search-cancel-button { flex-direction: row; align-items: center; margin-top: 5px; + flex-wrap: wrap; } .world_entry_form_control input[type=button] { @@ -2294,8 +2299,10 @@ input[type="range"]::-webkit-slider-thumb { -webkit-backdrop-filter: blur(calc(var(--SmartThemeBlurStrength)*2)); grid-template-rows: 50px 1fr 1fr 1fr 5fr; grid-gap: 10px; + min-height: 100px; + min-width: 100px; max-width: var(--sheldWidth); - height: calc(100svh - 40px); + max-height: calc(100svh - 100px); position: absolute; z-index: 3002; margin-left: auto; @@ -2310,6 +2317,7 @@ input[type="range"]::-webkit-slider-thumb { padding-bottom: 30px; border: 1px solid var(--black30a); border-radius: 0 0 20px 20px; + overflow-y: auto; } #character_popup h3 { @@ -3834,7 +3842,8 @@ body.movingUI .drag-grabber { body.movingUI #sheld, body.movingUI .drawer-content, body.movingUI #expression-holder, -body.movingUI #avatar_zoom_popup { +body.movingUI #avatar_zoom_popup, +body.movingUI #world_popup { resize: both; } From 817842737eb0e4c17c441a6a8b6547519f29f6bc Mon Sep 17 00:00:00 2001 From: RossAscends <124905043+RossAscends@users.noreply.github.com> Date: Mon, 15 May 2023 07:18:59 +0900 Subject: [PATCH 22/64] fix(?) WIEditor scrollbars & add it to Panel Reset --- public/scripts/power-user.js | 9 ++++++++- public/style.css | 8 ++++++-- 2 files changed, 14 insertions(+), 3 deletions(-) diff --git a/public/scripts/power-user.js b/public/scripts/power-user.js index cae8a7ec2..e636893b9 100644 --- a/public/scripts/power-user.js +++ b/public/scripts/power-user.js @@ -544,7 +544,7 @@ function loadPowerUserSettings(settings, data) { function loadMaxContextUnlocked() { $('#max_context_unlocked').prop('checked', power_user.max_context_unlocked); - $('#max_context_unlocked').on('change', function() { + $('#max_context_unlocked').on('change', function () { power_user.max_context_unlocked = !!$(this).prop('checked'); switchMaxContextSize(); saveSettingsDebounced(); @@ -796,6 +796,13 @@ function resetMovablePanels() { document.getElementById("avatar_zoom_popup").style.bottom = ''; document.getElementById("avatar_zoom_popup").style.height = ''; document.getElementById("avatar_zoom_popup").style.width = ''; + + document.getElementById("world_popup").style.top = ''; + document.getElementById("world_popup").style.left = ''; + document.getElementById("world_popup").style.right = ''; + document.getElementById("world_popup").style.bottom = ''; + document.getElementById("world_popup").style.height = ''; + document.getElementById("world_popup").style.width = ''; } $(document).ready(() => { diff --git a/public/style.css b/public/style.css index e74e8403f..9c9ac5f86 100644 --- a/public/style.css +++ b/public/style.css @@ -1687,7 +1687,7 @@ input[type=search]:focus::-webkit-search-cancel-button { flex-direction: column; z-index: 3010; border-radius: 0 0 20px 20px; - overflow-y: auto; + overflow-y: hidden; } #world_popup_bottom_holder { @@ -1714,6 +1714,10 @@ input[type=search]:focus::-webkit-search-cancel-button { margin-left: 1rem; } +.world_entry { + padding: 0 5px; +} + .world_entry:not(:last-child)::after { margin-top: 1rem; height: 1px; @@ -1778,7 +1782,7 @@ input[type=search]:focus::-webkit-search-cancel-button { #world_popup_entries_list { flex-grow: 1; - overflow-y: scroll; + overflow-y: auto; } #world_popup_entries_list:empty { From c3fe796059f504ca0361a8edd9de32f61f1b10ee Mon Sep 17 00:00:00 2001 From: RossAscends <124905043+RossAscends@users.noreply.github.com> Date: Mon, 15 May 2023 09:27:03 +0900 Subject: [PATCH 23/64] World Selector&Editor Combined/Lockable/Swappable --- public/index.html | 84 +++++++++++++++++------------- public/script.js | 6 ++- public/scripts/RossAscends-mods.js | 70 +++++++++++++++++++------ public/scripts/power-user.js | 12 ++--- public/scripts/world-info.js | 5 +- public/style.css | 73 +++++++++++++++----------- 6 files changed, 158 insertions(+), 92 deletions(-) diff --git a/public/index.html b/public/index.html index 3295e600d..b4bc6fbec 100644 --- a/public/index.html +++ b/public/index.html @@ -1371,12 +1371,20 @@
-
+
-
+
+
+
+ + +

- World Info + World Selector ? @@ -1438,6 +1446,41 @@

+ +
+
+
+ +
+ +
+ +

+ World Info Editor + ? +

+
+ + + +
+ + +
+
+
+ +
+
+ +
+ + + + +
+ +

Soft Prompt

@@ -2065,41 +2108,7 @@
-
-
-
-
-
- -
- -

- World Info Editor - ? -

-
- - -
 
-
- - -
-
-
- -
-
- -
- - - - -
- -
@@ -2178,6 +2187,7 @@
+
diff --git a/public/script.js b/public/script.js index 7d15cd23c..90e5d221e 100644 --- a/public/script.js +++ b/public/script.js @@ -6220,6 +6220,7 @@ $(document).ready(function () { '#shadow_popup', '#world_popup', '.ui-widget', + '.text_pole', ]; for (const id of forbiddenTargets) { if (clickTarget.closest(id).length > 0) { @@ -6241,7 +6242,10 @@ $(document).ready(function () { } }); - $(document).on('click', '.inline-drawer-toggle', function () { + $(document).on('click', '.inline-drawer-toggle', function (e) { + if ($(e.target).hasClass('text_pole')) { + return; + }; var icon = $(this).find('.inline-drawer-icon'); icon.toggleClass('down up'); icon.toggleClass('fa-circle-chevron-down fa-circle-chevron-up'); diff --git a/public/scripts/RossAscends-mods.js b/public/scripts/RossAscends-mods.js index a6e5d228a..952adedd7 100644 --- a/public/scripts/RossAscends-mods.js +++ b/public/scripts/RossAscends-mods.js @@ -30,11 +30,16 @@ import { import { sortByCssOrder } from "./utils.js"; var NavToggle = document.getElementById("nav-toggle"); + var RPanelPin = document.getElementById("rm_button_panel_pin"); var LPanelPin = document.getElementById("lm_button_panel_pin"); -var SelectedCharacterTab = document.getElementById("rm_button_selected_ch"); +var WIPanelPin = document.getElementById("WI_panel_pin"); + var RightNavPanel = document.getElementById("right-nav-panel"); -var LeftNavPanel = document.getElementById("left-nav-panel") +var LeftNavPanel = document.getElementById("left-nav-panel"); +var WorldInfo = document.getElementById("WorldInfo"); + +var SelectedCharacterTab = document.getElementById("rm_button_selected_ch"); var AdvancedCharDefsPopup = document.getElementById("character_popup"); var ConfirmationPopup = document.getElementById("dialogue_popup"); var AutoConnectCheckbox = document.getElementById("auto-connect-checkbox"); @@ -412,23 +417,18 @@ function OpenNavPanels() { if (LoadLocalBool("NavLockOn") == true && LoadLocalBool("NavOpened") == true) { //console.log("RA -- clicking right nav to open"); $("#rightNavDrawerIcon").click(); - } else { - /* console.log('didnt see reason to open right nav on load: R-nav locked? ' + - LoadLocalBool("NavLockOn") - + ' R-nav was open before? ' + - LoadLocalBool("NavOpened" == true)); */ } //auto-open L nav if locked and previously open - if (LoadLocalBool("LNavLockOn") == true && LoadLocalBool("LNavOpened") == true) { console.log("RA -- clicking left nav to open"); $("#leftNavDrawerIcon").click(); - } else { - /* console.log('didnt see reason to open left nav on load: L-Nav Locked? ' + - LoadLocalBool("LNavLockOn") - + ' L-nav was open before? ' + - LoadLocalBool("LNavOpened" == true)); */ + } + + //auto-open WI if locked and previously open + if (LoadLocalBool("WINavLockOn") == true && LoadLocalBool("WINavOpened") == true) { + console.log("RA -- clicking WI to open"); + $("#WIDrawerIcon").click(); } } @@ -438,7 +438,7 @@ dragElement(document.getElementById("sheld")); dragElement(document.getElementById("left-nav-panel")); dragElement(document.getElementById("right-nav-panel")); dragElement(document.getElementById("avatar_zoom_popup")); -dragElement(document.getElementById("world_popup")); +dragElement(document.getElementById("WorldInfo")); @@ -547,6 +547,7 @@ function dragElement(elmnt) { elmnt.style.top = (elmnt.offsetTop - pos2) + "px"; $(elmnt).css("bottom", "unset"); $(elmnt).css("right", "unset"); + $(elmnt).css("margin", "unset"); /* console.log(` offsetLeft: ${elmnt.offsetLeft}, offsetTop: ${elmnt.offsetTop} @@ -615,7 +616,7 @@ $("document").ready(function () { if ($(RightNavPanel).hasClass('openDrawer') && $('.openDrawer').length > 1) { $(RightNavPanel).slideToggle(200, "swing"); - $(rightNavDrawerIcon).toggleClass('openIcon closedIcon'); + //$(rightNavDrawerIcon).toggleClass('openIcon closedIcon'); $(RightNavPanel).toggleClass('openDrawer closedDrawer'); } } @@ -631,12 +632,30 @@ $("document").ready(function () { if ($(LeftNavPanel).hasClass('openDrawer') && $('.openDrawer').length > 1) { $(LeftNavPanel).slideToggle(200, "swing"); - $(leftNavDrawerIcon).toggleClass('openIcon closedIcon'); + //$(leftNavDrawerIcon).toggleClass('openIcon closedIcon'); $(LeftNavPanel).toggleClass('openDrawer closedDrawer'); } } }); + $(WIPanelPin).on("click", function () { + SaveLocal("WINavLockOn", $(WIPanelPin).prop("checked")); + if ($(WIPanelPin).prop("checked") == true) { + console.log('adding pin class to WI'); + $(WorldInfo).addClass('pinnedOpen'); + } else { + console.log('removing pin class from WI'); + $(WorldInfo).removeClass('pinnedOpen'); + + if ($(WorldInfo).hasClass('openDrawer') && $('.openDrawer').length > 1) { + console.log('closing WI after lock removal'); + $(WorldInfo).slideToggle(200, "swing"); + //$(WorldInfoDrawerIcon).toggleClass('openIcon closedIcon'); + $(WorldInfo).toggleClass('openDrawer closedDrawer'); + } + } + }); + // read the state of right Nav Lock and apply to rightnav classlist $(RPanelPin).prop('checked', LoadLocalBool("NavLockOn")); if (LoadLocalBool("NavLockOn") == true) { @@ -658,6 +677,18 @@ $("document").ready(function () { $(LeftNavPanel).addClass('pinnedOpen'); } + // read the state of left Nav Lock and apply to leftnav classlist + $(WIPanelPin).prop('checked', LoadLocalBool("WINavLockOn")); + if (LoadLocalBool("WINavLockOn") == true) { + //console.log('setting pin class via local var'); + $(WorldInfo).addClass('pinnedOpen'); + } + + if ($(WIPanelPin).prop('checked' == true)) { + console.log('setting pin class via checkbox state'); + $(WorldInfo).addClass('pinnedOpen'); + } + //save state of Right nav being open or closed $("#rightNavDrawerIcon").on("click", function () { if (!$("#rightNavDrawerIcon").hasClass('openIcon')) { @@ -672,6 +703,13 @@ $("document").ready(function () { } else { SaveLocal('LNavOpened', 'false'); } }); + //save state of Left nav being open or closed + $("#WorldInfo").on("click", function () { + if (!$("#WorldInfo").hasClass('openIcon')) { + SaveLocal('WINavOpened', 'true'); + } else { SaveLocal('WINavOpened', 'false'); } + }); + var chatbarInFocus = false; $('#send_textarea').focus(function () { chatbarInFocus = true; diff --git a/public/scripts/power-user.js b/public/scripts/power-user.js index e636893b9..128eba3ff 100644 --- a/public/scripts/power-user.js +++ b/public/scripts/power-user.js @@ -797,12 +797,12 @@ function resetMovablePanels() { document.getElementById("avatar_zoom_popup").style.height = ''; document.getElementById("avatar_zoom_popup").style.width = ''; - document.getElementById("world_popup").style.top = ''; - document.getElementById("world_popup").style.left = ''; - document.getElementById("world_popup").style.right = ''; - document.getElementById("world_popup").style.bottom = ''; - document.getElementById("world_popup").style.height = ''; - document.getElementById("world_popup").style.width = ''; + document.getElementById("WorldInfo").style.top = ''; + document.getElementById("WorldInfo").style.left = ''; + document.getElementById("WorldInfo").style.right = ''; + document.getElementById("WorldInfo").style.bottom = ''; + document.getElementById("WorldInfo").style.height = ''; + document.getElementById("WorldInfo").style.width = ''; } $(document).ready(() => { diff --git a/public/scripts/world-info.js b/public/scripts/world-info.js index 1ec51b112..7911cb5ed 100644 --- a/public/scripts/world-info.js +++ b/public/scripts/world-info.js @@ -311,7 +311,7 @@ function appendWorldEntry(entry) { const value = $(this).prop("checked"); world_info_data.entries[uid].disable = value; saveWorldInfo(); - console.log(`WI #${entry.uid} disabled? ${world_info_data.entries[uid].disable}`); + //console.log(`WI #${entry.uid} disabled? ${world_info_data.entries[uid].disable}`); }); disableInput.prop("checked", entry.disable).trigger("input"); disableInput.siblings(".checkbox_fancy").click(function () { @@ -593,7 +593,8 @@ $(document).ready(() => { await loadWorldInfoData(); } - hideWorldEditor(); + if (selectedWorld === "None") { hideWorldEditor(); } + if (is_world_edit_open && selectedWorld !== "None") { showWorldEditor() }; saveSettingsDebounced(); }); diff --git a/public/style.css b/public/style.css index 9c9ac5f86..4e4fee3ec 100644 --- a/public/style.css +++ b/public/style.css @@ -911,19 +911,22 @@ input[type="file"] { #rm_button_characters, #rm_button_panel_pin_div, -#lm_button_panel_pin_div { +#lm_button_panel_pin_div, +#WI_button_panel_pin_div { font-size: 24px; display: inline; } #rm_button_panel_pin_div, -#lm_button_panel_pin_div { +#lm_button_panel_pin_div, +#WI_button_panel_pin_div { opacity: 0.5; transition: 0.3s; } #rm_button_panel_pin_div:hover, -#lm_button_panel_pin_div:hover { +#lm_button_panel_pin_div:hover, +#WI_button_panel_pin_div:hover { opacity: 1; } @@ -932,32 +935,38 @@ input[type="file"] { } #rm_button_panel_pin, -#lm_button_panel_pin { +#lm_button_panel_pin, +#WI_panel_pin { display: none; } #rm_button_panel_pin:checked+label, -#lm_button_panel_pin:checked+label { +#lm_button_panel_pin:checked+label, +#WI_panel_pin:checked+label { display: inline; } #rm_button_panel_pin:checked+label .checked, -#lm_button_panel_pin:checked+label .checked { +#lm_button_panel_pin:checked+label .checked, +#WI_panel_pin:checked+label .checked { display: inline; } #rm_button_panel_pin:checked+label .unchecked, -#lm_button_panel_pin:checked+label .unchecked { +#lm_button_panel_pin:checked+label .unchecked, +#WI_panel_pin:checked+label .unchecked { display: none; } #rm_button_panel_pin:not(:checked)+label .checked, -#lm_button_panel_pin:not(:checked)+label .checked { +#lm_button_panel_pin:not(:checked)+label .checked, +#WI_panel_pin:not(:checked)+label .checked { display: none; } #rm_button_panel_pin:not(:checked)+label .unchecked, -#lm_button_panel_pin:not(:checked)+label .unchecked { +#lm_button_panel_pin:not(:checked)+label .unchecked, +#WI_panel_pin:not(:checked)+label .unchecked { display: inline; } @@ -1670,20 +1679,20 @@ input[type=search]:focus::-webkit-search-cancel-button { #world_popup { display: none; - background-color: var(--SmartThemeBlurTintColor); - backdrop-filter: blur(calc(var(--SmartThemeBlurStrength) * 2)); - max-width: var(--sheldWidth); - max-height: calc(100% - 100px); + /* background-color: var(--SmartThemeBlurTintColor); + backdrop-filter: blur(calc(var(--SmartThemeBlurStrength) * 2)); */ + /* max-width: var(--sheldWidth); */ + /* max-height: calc(100% - 100px); */ min-height: 100px; min-width: 100px; - position: absolute; - margin-left: auto; - margin-right: auto; + /* position: absolute; */ + /* margin-left: auto; + margin-right: auto; */ left: 0; right: 0; - top: 40px; - box-shadow: 0 0 2px rgba(0, 0, 0, 0.5); - padding: 10px; + /* top: 40px; */ + /* box-shadow: 0 0 2px rgba(0, 0, 0, 0.5); */ + /* padding: 10px; */ flex-direction: column; z-index: 3010; border-radius: 0 0 20px 20px; @@ -1753,7 +1762,7 @@ input[type=search]:focus::-webkit-search-cancel-button { } #form_rename_world { - margin-right: 50px; + /* margin-right: 50px; */ } #form_rename_chat { @@ -3490,8 +3499,10 @@ label[for="extensions_autoconnect"] { } .fillRight, -.fillLeft { +.fillLeft, +#WorldInfo { min-width: unset; + position: fixed; } .fillLeft { @@ -3846,8 +3857,7 @@ body.movingUI .drag-grabber { body.movingUI #sheld, body.movingUI .drawer-content, body.movingUI #expression-holder, -body.movingUI #avatar_zoom_popup, -body.movingUI #world_popup { +body.movingUI #avatar_zoom_popup { resize: both; } @@ -4000,10 +4010,10 @@ body.waifuMode #avatar_zoom_popup { display: none; } - #world_popup_header { + /* #world_popup_header { flex-direction: column; align-items: flex-start; - } + } */ #world_popup_header .world_popup_expander { display: none; @@ -4049,14 +4059,18 @@ body.waifuMode #avatar_zoom_popup { #sheld, #character_popup, - #world_popup { - height: calc(100svh - 45px); + .drawer-content + + /* , + #world_popup */ + { + max-height: calc(100svh - 45px); width: 100% !important; margin: 0 auto; max-width: 100%; left: 0 !important; - resize: none; - top: 42px; + resize: none !important; + top: 40px; } #character_popup, @@ -4065,7 +4079,6 @@ body.waifuMode #avatar_zoom_popup { } #character_popup, - #world_popup, #send_form { border: 1px solid var(--grey30); backdrop-filter: blur(calc(var(--SmartThemeBlurStrength) * 2)); From a950458534cda68f3c45c5df838c5499d1011ff3 Mon Sep 17 00:00:00 2001 From: RossAscends <124905043+RossAscends@users.noreply.github.com> Date: Mon, 15 May 2023 10:23:43 +0900 Subject: [PATCH 24/64] fix panel resetting --- public/scripts/power-user.js | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/public/scripts/power-user.js b/public/scripts/power-user.js index 128eba3ff..ed0c83664 100644 --- a/public/scripts/power-user.js +++ b/public/scripts/power-user.js @@ -770,18 +770,21 @@ function resetMovablePanels() { document.getElementById("sheld").style.right = ''; document.getElementById("sheld").style.height = ''; document.getElementById("sheld").style.width = ''; + document.getElementById("sheld").style.margin = ''; document.getElementById("left-nav-panel").style.top = ''; document.getElementById("left-nav-panel").style.left = ''; document.getElementById("left-nav-panel").style.height = ''; document.getElementById("left-nav-panel").style.width = ''; + document.getElementById("left-nav-panel").style.margin = ''; document.getElementById("right-nav-panel").style.top = ''; document.getElementById("right-nav-panel").style.left = ''; document.getElementById("right-nav-panel").style.right = ''; document.getElementById("right-nav-panel").style.height = ''; document.getElementById("right-nav-panel").style.width = ''; + document.getElementById("right-nav-panel").style.margin = ''; document.getElementById("expression-holder").style.top = ''; document.getElementById("expression-holder").style.left = ''; @@ -789,6 +792,7 @@ function resetMovablePanels() { document.getElementById("expression-holder").style.bottom = ''; document.getElementById("expression-holder").style.height = ''; document.getElementById("expression-holder").style.width = ''; + document.getElementById("expression-holder").style.margin = ''; document.getElementById("avatar_zoom_popup").style.top = ''; document.getElementById("avatar_zoom_popup").style.left = ''; @@ -796,6 +800,7 @@ function resetMovablePanels() { document.getElementById("avatar_zoom_popup").style.bottom = ''; document.getElementById("avatar_zoom_popup").style.height = ''; document.getElementById("avatar_zoom_popup").style.width = ''; + document.getElementById("avatar_zoom_popup").style.margin = ''; document.getElementById("WorldInfo").style.top = ''; document.getElementById("WorldInfo").style.left = ''; @@ -803,6 +808,7 @@ function resetMovablePanels() { document.getElementById("WorldInfo").style.bottom = ''; document.getElementById("WorldInfo").style.height = ''; document.getElementById("WorldInfo").style.width = ''; + document.getElementById("WorldInfo").style.margin = ''; } $(document).ready(() => { From 238c4fad57af2c14fcc9631931a086eb78946135 Mon Sep 17 00:00:00 2001 From: Sanskar Tiwari Date: Mon, 15 May 2023 12:04:56 +0530 Subject: [PATCH 25/64] console logging the prompt option --- public/index.html | 4 ++++ public/script.js | 28 +++++++++++++++++----------- public/scripts/power-user.js | 7 +++++++ 3 files changed, 28 insertions(+), 11 deletions(-) diff --git a/public/index.html b/public/index.html index b4bc6fbec..35e234d89 100644 --- a/public/index.html +++ b/public/index.html @@ -1704,6 +1704,10 @@ Auto-scroll Chat +
+
+
+
\ No newline at end of file diff --git a/public/script.js b/public/script.js index 5a29de6af..f80e9b134 100644 --- a/public/script.js +++ b/public/script.js @@ -246,6 +246,9 @@ let optionsPopper = Popper.createPopper(document.getElementById('send_form'), do let exportPopper = Popper.createPopper(document.getElementById('export_button'), document.getElementById('export_format_popup'), { placement: 'left' }); +let rawPromptPopper = Popper.createPopper(document.getElementById('dialogue_popup'), document.getElementById('rawPromptPopup'), { + placement: 'right' +}); let dialogueResolve = null; let chat_metadata = {}; @@ -2186,7 +2189,7 @@ async function Generate(type, { automatic_trigger, force_name2, resolve, reject, } else if (main_api == 'openai') { let [prompt, counts] = await prepareOpenAIMessages(name2, storyString, worldInfoBefore, worldInfoAfter, afterScenarioAnchor, promptBias, type); - generate_data = { prompt : prompt }; + generate_data = { prompt: prompt }; // counts will return false if the user has not enabled the token breakdown feature if (counts) { @@ -2228,10 +2231,11 @@ async function Generate(type, { automatic_trigger, force_name2, resolve, reject, setInContextMessages(openai_messages_count, type); } else if (main_api == 'poe') { - generate_data = { prompt : finalPromt }; + generate_data = { prompt: finalPromt }; } if (power_user.console_log_prompts) { + console.log(generate_data.prompt); } @@ -2280,6 +2284,7 @@ async function Generate(type, { automatic_trigger, force_name2, resolve, reject, let currentArrayEntry = Number(thisPromptBits.length - 1); let additionalPromptStuff = { ...thisPromptBits[currentArrayEntry], + rawPrompt: generate_data.prompt, mesId: Number(count_view_mes), worldInfoBefore: worldInfoBefore, allAnchors: allAnchors, @@ -2580,6 +2585,7 @@ function promptItemize(itemizedPrompts, requestedMesId) { Only the white numbers really matter. All numbers are estimates. Grey color items may not have been included in the context due to certain prompt format settings. +
@@ -2693,6 +2699,7 @@ function promptItemize(itemizedPrompts, requestedMesId) { Only the white numbers really matter. All numbers are estimates. Grey color items may not have been included in the context due to certain prompt format settings. +
@@ -5763,6 +5770,18 @@ $(document).ready(function () { } }) + $(document).on("pointerup", "#showRawPrompt", function () { + //let mesIdForItemization = $(this).closest('.mes').attr('mesId'); + //console.log(generate_data.prompt); + console.log(itemizedPrompts[0].rawPrompt); + $("#rawPromptWrapper").html(itemizedPrompts[0].rawPrompt.replace(/\n/g, '
')); + rawPromptPopper.update(); + $('#rawPromptPopup').toggle(); + + //Popper(itemizedPrompts, mesIdForItemization); + + }) + //******************** //***Message Editor*** diff --git a/public/style.css b/public/style.css index 4e4fee3ec..b31adf958 100644 --- a/public/style.css +++ b/public/style.css @@ -3193,11 +3193,39 @@ a { text-decoration: none; } -#export_format_popup { +#export_format_popup, +#rawPromptPopup { display: none; z-index: 9999; } +#rawPromptPopup { + inset: 0px auto auto 0px; + margin: 0px; + transform: translate(909px, 47px); + display: block; + overflow-wrap: break-word; + white-space: normal; + max-width: calc(((100svw - 500px) / 2) - 10px); + position: absolute; + z-index: 9999; + max-height: 90svh; + /*unsure why, but this prevents scrollbars*/ + height: 49svh; + + padding: 5px; + overflow-y: auto; + display: none; +} + +#rawPopupWrapper { + word-wrap: break-word; + width: 100%; + text-align: start; + overflow-y: auto; + max-height: 100%; +} + .list-group { display: flex; flex-direction: column; @@ -4100,6 +4128,10 @@ body.waifuMode #avatar_zoom_popup { } + #showRawPrompt { + display: none; + } + .mes-text { padding-right: 25px; } From 23f9298db59a2a5eb48de183aaddec0d62432036 Mon Sep 17 00:00:00 2001 From: SillyLossy Date: Mon, 15 May 2023 16:59:43 +0300 Subject: [PATCH 30/64] start.sh: install LTS node --- start.sh | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/start.sh b/start.sh index 57c73369f..6cd89c561 100755 --- a/start.sh +++ b/start.sh @@ -10,7 +10,8 @@ then [ -s "$NVM_DIR/nvm.sh" ] && \. "$NVM_DIR/nvm.sh" curl -o- https://raw.githubusercontent.com/nvm-sh/nvm/v0.39.3/install.sh | bash source ~/.bashrc - nvm install node;; + nvm install lts + nvm use lts;; n|N ) echo "Nodejs and npm will not be installed." exit;; From 2f8b95b18d660dc15c15394151afe481f5d2cf45 Mon Sep 17 00:00:00 2001 From: Sanskar Tiwari Date: Mon, 15 May 2023 19:29:59 +0530 Subject: [PATCH 31/64] add trim sentence feature --- public/index.html | 9 +++++++++ public/script.js | 6 +++++- public/scripts/power-user.js | 25 +++++++++++++++++++++++++ public/scripts/utils.js | 11 ++++++++++- 4 files changed, 49 insertions(+), 2 deletions(-) diff --git a/public/index.html b/public/index.html index 3876f517c..d88998a1d 100644 --- a/public/index.html +++ b/public/index.html @@ -1216,6 +1216,15 @@ Disable chat start formatting + + +

Custom Chat Separator diff --git a/public/script.js b/public/script.js index 26b8f1c95..ae5215b27 100644 --- a/public/script.js +++ b/public/script.js @@ -106,7 +106,7 @@ import { setPoeOnlineStatus, } from "./scripts/poe.js"; -import { debounce, delay, restoreCaretPosition, saveCaretPosition } from "./scripts/utils.js"; +import { debounce, delay, restoreCaretPosition, saveCaretPosition, end_trim_to_sentence } from "./scripts/utils.js"; import { extension_settings, getContext, loadExtensionSettings } from "./scripts/extensions.js"; import { executeSlashCommands, getSlashCommandsHelp, registerSlashCommand } from "./scripts/slash-commands.js"; import { @@ -1662,6 +1662,10 @@ async function Generate(type, { automatic_trigger, force_name2, resolve, reject, is_send_press = true; textareaText = $("#send_textarea").val(); $("#send_textarea").val('').trigger('input'); + + if (power_user.trim_sentences) { + textareaText = end_trim_to_sentence(textareaText, power_user.keep_newlines); + } } else { textareaText = ""; if (chat.length && chat[chat.length - 1]['is_user']) { diff --git a/public/scripts/power-user.js b/public/scripts/power-user.js index d27453f12..c0acbd1ad 100644 --- a/public/scripts/power-user.js +++ b/public/scripts/power-user.js @@ -76,6 +76,8 @@ let power_user = { disable_personality_formatting: false, disable_examples_formatting: false, disable_start_formatting: false, + trim_sentences: false, + keep_newlines: false, always_force_name2: false, multigen: false, multigen_first_chunk: 50, @@ -492,6 +494,8 @@ function loadPowerUserSettings(settings, data) { $("#always-force-name2-checkbox").prop("checked", power_user.always_force_name2); $("#disable-examples-formatting-checkbox").prop("checked", power_user.disable_examples_formatting); $('#disable-start-formatting-checkbox').prop("checked", power_user.disable_start_formatting); + $("#trim_sentences_checkbox").prop("checked", power_user.trim_sentences); + $("#keep_newlines_checkbox").prop("checked", power_user.keep_newlines); $('#render_formulas').prop("checked", power_user.render_formulas); $("#custom_chat_separator").val(power_user.custom_chat_separator); $("#fast_ui_mode").prop("checked", power_user.fast_ui_mode); @@ -856,6 +860,27 @@ $(document).ready(() => { saveSettingsDebounced(); }); + // keep newlines is the child of trim sentences + // if keep newlines is checked, trim sentences must be checked + // if trim sentences is unchecked, keep newlines must be unchecked + $("#trim_sentences_checkbox").change(function() { + power_user.trim_sentences = !!$(this).prop("checked"); + if (!$(this).prop("checked")) { + $("#keep_newlines_checkbox").prop("checked", false); + power_user.keep_newlines = false; + } + saveSettingsDebounced(); + }); + + $("#keep_newlines_checkbox").change(function() { + power_user.keep_newlines = !!$(this).prop("checked"); + if ($(this).prop("checked")) { + $("#trim_sentences_checkbox").prop("checked", true); + power_user.trim_sentences = true; + } + saveSettingsDebounced(); + }); + $("#always-force-name2-checkbox").change(function () { power_user.always_force_name2 = !!$(this).prop("checked"); saveSettingsDebounced(); diff --git a/public/scripts/utils.js b/public/scripts/utils.js index 6213f4506..116343dfa 100644 --- a/public/scripts/utils.js +++ b/public/scripts/utils.js @@ -188,4 +188,13 @@ export function sortByCssOrder(a, b) { const _a = Number($(a).css('order')); const _b = Number($(b).css('order')); return _a - _b; -} \ No newline at end of file +} + +export function end_trim_to_sentence(input, keep_newlines = false) { + if (!keep_newlines) { + return input.trimEnd(); + } else { + // trim all whitespace at the end of the string, except for newlines + return input.replace(/([^\S\r\n])+(?=\n*$)/g, ""); + } +} From 7dcc6639d6246f9234436951529a571132975936 Mon Sep 17 00:00:00 2001 From: SillyLossy Date: Mon, 15 May 2023 17:17:06 +0300 Subject: [PATCH 32/64] Don't lose generate parameters on forced name2 regeneration --- public/script.js | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/public/script.js b/public/script.js index 26b8f1c95..c27b129fd 100644 --- a/public/script.js +++ b/public/script.js @@ -2425,10 +2425,7 @@ async function Generate(type, { automatic_trigger, force_name2, resolve, reject, // regenerate with character speech reenforced // to make sure we leave on swipe type while also adding the name2 appendage setTimeout(() => { - let newType = type == "swipe" ? "swipe" : "force_name2"; - newType = isImpersonate ? type : newType; - - Generate(newType, { automatic_trigger: false, force_name2: true }); + Generate(type, { automatic_trigger, force_name2: true, resolve, reject, quiet_prompt, force_chid }); }, generate_loop_counter * 1000); } } else { From d7982c433998b3dd25357d9596199c20981f8c2f Mon Sep 17 00:00:00 2001 From: SillyLossy Date: Mon, 15 May 2023 17:28:41 +0300 Subject: [PATCH 33/64] Clean-up Generate function for better readability --- public/script.js | 295 ++++++++++++++++++++++++----------------------- 1 file changed, 153 insertions(+), 142 deletions(-) diff --git a/public/script.js b/public/script.js index c27b129fd..281bda8a0 100644 --- a/public/script.js +++ b/public/script.js @@ -1700,20 +1700,7 @@ async function Generate(type, { automatic_trigger, force_name2, resolve, reject, //for normal messages sent from user.. if (textareaText != "" && !automatic_trigger && type !== 'quiet') { - chat[chat.length] = {}; - chat[chat.length - 1]['name'] = name1; - chat[chat.length - 1]['is_user'] = true; - chat[chat.length - 1]['is_name'] = true; - chat[chat.length - 1]['send_date'] = humanizedDateTime(); - chat[chat.length - 1]['mes'] = textareaText; - chat[chat.length - 1]['extra'] = {}; - - if (messageBias) { - console.log('checking bias'); - chat[chat.length - 1]['extra']['bias'] = messageBias; - } - //console.log('Generate calls addOneMessage'); - addOneMessage(chat[chat.length - 1]); + sendMessageAsUser(textareaText, messageBias); } //////////////////////////////////// const scenarioText = chat_metadata['scenario'] || characters[this_chid].scenario; @@ -1812,28 +1799,7 @@ async function Generate(type, { automatic_trigger, force_name2, resolve, reject, //chat2 = chat2.reverse(); // Determine token limit - let this_max_context = 1487; - if (main_api == 'kobold' || main_api == 'textgenerationwebui') { - this_max_context = (max_context - amount_gen); - } - if (main_api == 'novel') { - if (novel_tier === 1) { - this_max_context = 1024; - } else { - this_max_context = 2048 - 60;//fix for fat tokens - if (nai_settings.model_novel == 'krake-v2') { - this_max_context -= 160; - } - } - } - if (main_api == 'openai') { - this_max_context = oai_settings.openai_max_context; - } - if (main_api == 'poe') { - this_max_context = Number(max_context); - } - - + let this_max_context = getMaxContextSize(); // Adjust token limit for Horde let adjustedParams; @@ -2072,22 +2038,8 @@ async function Generate(type, { automatic_trigger, force_name2, resolve, reject, } // add a custom dingus (if defined) - if (power_user.custom_chat_separator && power_user.custom_chat_separator.length) { - mesSendString = power_user.custom_chat_separator + '\n' + mesSendString; - } - // if chat start formatting is disabled - else if (power_user.disable_start_formatting) { - mesSendString = mesSendString; - } - // add non-pygma dingus - else if (!is_pygmalion) { - mesSendString = '\nThen the roleplay chat between ' + name1 + ' and ' + name2 + ' begins.\n' + mesSendString; - } - // add pygma - else { - mesSendString = '\n' + mesSendString; - //mesSendString = mesSendString; //This edit simply removes the first "" that is prepended to all context prompts - } + mesSendString = adjustChatsSeparator(mesSendString); + let finalPromt = worldInfoBefore + storyString + @@ -2098,22 +2050,9 @@ async function Generate(type, { automatic_trigger, force_name2, resolve, reject, generatedPromtCache + promptBias; - - if (zeroDepthAnchor && zeroDepthAnchor.length) { if (!isMultigenEnabled() || tokens_already_generated == 0) { - const trimBothEnds = !force_name2 && !is_pygmalion; - let trimmedPrompt = (trimBothEnds ? zeroDepthAnchor.trim() : zeroDepthAnchor.trimEnd()); - - if (trimBothEnds && !finalPromt.endsWith('\n')) { - finalPromt += '\n'; - } - - finalPromt += trimmedPrompt; - - if (force_name2 || is_pygmalion) { - finalPromt += ' '; - } + finalPromt = appendZeroDepthAnchor(force_name2, zeroDepthAnchor, finalPromt); } } @@ -2132,29 +2071,7 @@ async function Generate(type, { automatic_trigger, force_name2, resolve, reject, if (isMultigenEnabled() && type !== 'quiet') { // if nothing has been generated yet.. - if (tokens_already_generated === 0) { - // if the max gen setting is > 50...( - if (parseInt(amount_gen) >= power_user.multigen_first_chunk) { - // then only try to make 50 this cycle.. - this_amount_gen = power_user.multigen_first_chunk; - } - else { - // otherwise, make as much as the max amount request. - this_amount_gen = parseInt(amount_gen); - } - } - // if we already received some generated text... - else { - // if the remaining tokens to be made is less than next potential cycle count - if (parseInt(amount_gen) - tokens_already_generated < power_user.multigen_next_chunks) { - // subtract already generated amount from the desired max gen amount - this_amount_gen = parseInt(amount_gen) - tokens_already_generated; - } - else { - // otherwise make the standard cycle amount (first 50, and 30 after that) - this_amount_gen = power_user.multigen_next_chunks; - } - } + this_amount_gen = getMultigenAmount(); } let thisPromptBits = []; @@ -2193,40 +2110,7 @@ async function Generate(type, { automatic_trigger, force_name2, resolve, reject, // counts will return false if the user has not enabled the token breakdown feature if (counts) { - - //$('#token_breakdown').css('display', 'flex'); - const breakdown_bar = $('#token_breakdown div:first-child'); - breakdown_bar.empty(); - - const total = Object.values(counts).filter(x => !Number.isNaN(x)).reduce((acc, val) => acc + val, 0); - - thisPromptBits.push({ - oaiStartTokens: Object.entries(counts)[0][1], - oaiPromptTokens: Object.entries(counts)[1][1], - oaiBiasTokens: Object.entries(counts)[2][1], - oaiNudgeTokens: Object.entries(counts)[3][1], - oaiJailbreakTokens: Object.entries(counts)[4][1], - oaiImpersonateTokens: Object.entries(counts)[5][1], - oaiExamplesTokens: Object.entries(counts)[6][1], - oaiConversationTokens: Object.entries(counts)[7][1], - oaiTotalTokens: total, - }) - - Object.entries(counts).forEach(([type, value]) => { - if (value === 0) { - return; - } - const percent_value = (value / total) * 100; - const color = uniqolor(type, { saturation: 50, lightness: 75, }).color; - const bar = document.createElement('div'); - bar.style.width = `${percent_value}%`; - bar.classList.add('token_breakdown_segment'); - bar.style.backgroundColor = color + 'AA'; - bar.style.borderColor = color + 'FF'; - bar.innerText = value; - bar.title = `${type}: ${percent_value.toFixed(2)}%`; - breakdown_bar.append(bar); - }); + parseTokenCounts(counts, thisPromptBits); } setInContextMessages(openai_messages_count, type); @@ -2314,29 +2198,10 @@ async function Generate(type, { automatic_trigger, force_name2, resolve, reject, itemizedPrompts.push(thisPromptBits); //console.log(`pushed prompt bits to itemizedPrompts array. Length is now: ${itemizedPrompts.length}`); - - if (isStreamingEnabled() && type !== 'quiet') { hideSwipeButtons(); let getMessage = await streamingProcessor.generate(); - // Cohee: Basically a dead-end code... (disabled by isStreamingEnabled) - // I wasn't able to get multigen working with real streaming - // consistently without screwing the interim prompting - if (isMultigenEnabled()) { - tokens_already_generated += this_amount_gen; // add new gen amt to any prev gen counter.. - message_already_generated += getMessage; - promptBias = ''; - if (!streamingProcessor.isStopped && shouldContinueMultigen(getMessage, isImpersonate)) { - streamingProcessor.isFinished = false; - runGenerate(getMessage); - console.log('returning to make generate again'); - return; - } - - getMessage = message_already_generated; - } - if (streamingProcessor && !streamingProcessor.isStopped && streamingProcessor.isFinished) { streamingProcessor.onFinishStreaming(streamingProcessor.messageId, getMessage); streamingProcessor = null; @@ -2468,6 +2333,152 @@ async function Generate(type, { automatic_trigger, force_name2, resolve, reject, //console.log('generate ending'); } //generate ends +function sendMessageAsUser(textareaText, messageBias) { + chat[chat.length] = {}; + chat[chat.length - 1]['name'] = name1; + chat[chat.length - 1]['is_user'] = true; + chat[chat.length - 1]['is_name'] = true; + chat[chat.length - 1]['send_date'] = humanizedDateTime(); + chat[chat.length - 1]['mes'] = textareaText; + chat[chat.length - 1]['extra'] = {}; + + if (messageBias) { + console.log('checking bias'); + chat[chat.length - 1]['extra']['bias'] = messageBias; + } + //console.log('Generate calls addOneMessage'); + addOneMessage(chat[chat.length - 1]); +} + +function getMaxContextSize() { + let this_max_context = 1487; + if (main_api == 'kobold' || main_api == 'textgenerationwebui') { + this_max_context = (max_context - amount_gen); + } + if (main_api == 'novel') { + if (novel_tier === 1) { + this_max_context = 1024; + } else { + this_max_context = 2048 - 60; //fix for fat tokens + if (nai_settings.model_novel == 'krake-v2') { + this_max_context -= 160; + } + } + } + if (main_api == 'openai') { + this_max_context = oai_settings.openai_max_context; + } + if (main_api == 'poe') { + this_max_context = Number(max_context); + } + return this_max_context; +} + +function parseTokenCounts(counts, thisPromptBits) { + const breakdown_bar = $('#token_breakdown div:first-child'); + breakdown_bar.empty(); + + const total = Object.values(counts).filter(x => !Number.isNaN(x)).reduce((acc, val) => acc + val, 0); + + thisPromptBits.push({ + oaiStartTokens: Object.entries(counts)[0][1], + oaiPromptTokens: Object.entries(counts)[1][1], + oaiBiasTokens: Object.entries(counts)[2][1], + oaiNudgeTokens: Object.entries(counts)[3][1], + oaiJailbreakTokens: Object.entries(counts)[4][1], + oaiImpersonateTokens: Object.entries(counts)[5][1], + oaiExamplesTokens: Object.entries(counts)[6][1], + oaiConversationTokens: Object.entries(counts)[7][1], + oaiTotalTokens: total, + }); + + Object.entries(counts).forEach(([type, value]) => { + if (value === 0) { + return; + } + const percent_value = (value / total) * 100; + const color = uniqolor(type, { saturation: 50, lightness: 75, }).color; + const bar = document.createElement('div'); + bar.style.width = `${percent_value}%`; + bar.classList.add('token_breakdown_segment'); + bar.style.backgroundColor = color + 'AA'; + bar.style.borderColor = color + 'FF'; + bar.innerText = value; + bar.title = `${type}: ${percent_value.toFixed(2)}%`; + breakdown_bar.append(bar); + }); +} + +function adjustChatsSeparator(mesSendString) { + if (power_user.custom_chat_separator && power_user.custom_chat_separator.length) { + mesSendString = power_user.custom_chat_separator + '\n' + mesSendString; + } + + // if chat start formatting is disabled + else if (power_user.disable_start_formatting) { + mesSendString = mesSendString; + } + + // add non-pygma dingus + else if (!is_pygmalion) { + mesSendString = '\nThen the roleplay chat between ' + name1 + ' and ' + name2 + ' begins.\n' + mesSendString; + } + + // add pygma + else { + mesSendString = '\n' + mesSendString; + //mesSendString = mesSendString; //This edit simply removes the first "" that is prepended to all context prompts + } + + return mesSendString; +} + +function appendZeroDepthAnchor(force_name2, zeroDepthAnchor, finalPromt) { + const trimBothEnds = !force_name2 && !is_pygmalion; + let trimmedPrompt = (trimBothEnds ? zeroDepthAnchor.trim() : zeroDepthAnchor.trimEnd()); + + if (trimBothEnds && !finalPromt.endsWith('\n')) { + finalPromt += '\n'; + } + + finalPromt += trimmedPrompt; + + if (force_name2 || is_pygmalion) { + finalPromt += ' '; + } + + return finalPromt; +} + +function getMultigenAmount() { + let this_amount_gen = parseInt(amount_gen); + + if (tokens_already_generated === 0) { + // if the max gen setting is > 50...( + if (parseInt(amount_gen) >= power_user.multigen_first_chunk) { + // then only try to make 50 this cycle.. + this_amount_gen = power_user.multigen_first_chunk; + } + else { + // otherwise, make as much as the max amount request. + this_amount_gen = parseInt(amount_gen); + } + } + // if we already received some generated text... + else { + // if the remaining tokens to be made is less than next potential cycle count + if (parseInt(amount_gen) - tokens_already_generated < power_user.multigen_next_chunks) { + // subtract already generated amount from the desired max gen amount + this_amount_gen = parseInt(amount_gen) - tokens_already_generated; + } + else { + // otherwise make the standard cycle amount (first 50, and 30 after that) + this_amount_gen = power_user.multigen_next_chunks; + } + } + return this_amount_gen; +} + function promptItemize(itemizedPrompts, requestedMesId) { var incomingMesId = Number(requestedMesId); console.log(`looking for MesId ${incomingMesId}`); From b65279ea279db3d615ebfa4d224ca0d6d0cfbab4 Mon Sep 17 00:00:00 2001 From: SillyLossy Date: Mon, 15 May 2023 17:42:18 +0300 Subject: [PATCH 34/64] Adjust stopping strings for metharme --- public/script.js | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/public/script.js b/public/script.js index 281bda8a0..68ed89267 100644 --- a/public/script.js +++ b/public/script.js @@ -1261,15 +1261,16 @@ function getStoppingStrings(isImpersonate, addSpace) { } } + // Cohee: oobabooga's textgen always appends newline before the sequence as a stopping string + // But it's a problem for Metharme which doesn't use newlines to separate them. + const wrap = (s) => power_user.instruct.wrap ? '\n' + s : s; + if (power_user.instruct.enabled) { - // Cohee: This was borrowed from oobabooga's textgen. But.. - // What if a model doesn't use newlines to chain sequences? - // Who knows. if (power_user.instruct.input_sequence) { - result.push(`\n${power_user.instruct.input_sequence}`); + result.push(wrap(power_user.instruct.input_sequence)); } if (power_user.instruct.output_sequence) { - result.push(`\n${power_user.instruct.output_sequence}`); + result.push(wrap(power_user.instruct.output_sequence)); } } From ab124cb926d4c67df5e9db23696dee2cd8514ddd Mon Sep 17 00:00:00 2001 From: Sanskar Tiwari Date: Mon, 15 May 2023 20:56:55 +0530 Subject: [PATCH 35/64] mirror horde function --- public/scripts/utils.js | 27 ++++++++++++++++++++++----- 1 file changed, 22 insertions(+), 5 deletions(-) diff --git a/public/scripts/utils.js b/public/scripts/utils.js index 116343dfa..796cb59f6 100644 --- a/public/scripts/utils.js +++ b/public/scripts/utils.js @@ -191,10 +191,27 @@ export function sortByCssOrder(a, b) { } export function end_trim_to_sentence(input, keep_newlines = false) { - if (!keep_newlines) { - return input.trimEnd(); - } else { - // trim all whitespace at the end of the string, except for newlines - return input.replace(/([^\S\r\n])+(?=\n*$)/g, ""); + // inspired from https://github.com/kaihordewebui/kaihordewebui.github.io/blob/06b95e6b7720eb85177fbaf1a7f52955d7cdbc02/index.html#L4853-L4867 + + const punctuation = new Set(['.', '!', '?']); // extend this as you see fit + + for (let i = input.length - 1; i >= 0; i--) { + const char = input[i]; + + if (punctuation.has(char)) { + last = i; + break; + } + + if (include_newline && char === '\n') { + last = i; + break; + } + + if (last === -1) { + return input.trimEnd(); + } + + return input.substring(0, last + 1).trimEnd(); } } From c8b77c0d582fb9ab251bbd242096525361ce1d76 Mon Sep 17 00:00:00 2001 From: Sanskar Tiwari Date: Mon, 15 May 2023 21:00:24 +0530 Subject: [PATCH 36/64] change keep newline to include newline --- public/index.html | 6 +++--- public/script.js | 2 +- public/scripts/power-user.js | 18 +++++++++--------- public/scripts/utils.js | 8 ++++---- 4 files changed, 17 insertions(+), 17 deletions(-) diff --git a/public/index.html b/public/index.html index d88998a1d..c54ff4563 100644 --- a/public/index.html +++ b/public/index.html @@ -1221,9 +1221,9 @@ Trim Sentences -

-
+
From 1204ba43d816a2799e7d7748c18dc49f7a5e8b78 Mon Sep 17 00:00:00 2001 From: SillyLossy Date: Mon, 15 May 2023 18:56:17 +0300 Subject: [PATCH 40/64] Extend punctuation list --- public/scripts/utils.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/public/scripts/utils.js b/public/scripts/utils.js index 5c8c1eb89..dc84a357d 100644 --- a/public/scripts/utils.js +++ b/public/scripts/utils.js @@ -193,7 +193,7 @@ export function sortByCssOrder(a, b) { export function end_trim_to_sentence(input, include_newline = false) { // inspired from https://github.com/kaihordewebui/kaihordewebui.github.io/blob/06b95e6b7720eb85177fbaf1a7f52955d7cdbc02/index.html#L4853-L4867 - const punctuation = new Set(['.', '!', '?']); // extend this as you see fit + const punctuation = new Set(['.', '!', '?', '*', '"', ')', '}', '`', ']', '$']); // extend this as you see fit let last = -1; for (let i = input.length - 1; i >= 0; i--) { From 11716b5fc00d2b9d3bac55c4a70aa5e129f6cc9f Mon Sep 17 00:00:00 2001 From: SillyLossy Date: Mon, 15 May 2023 19:05:00 +0300 Subject: [PATCH 41/64] Proper trim sentences with streaming --- public/script.js | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/public/script.js b/public/script.js index 7400b1f9a..a5a58dc9a 100644 --- a/public/script.js +++ b/public/script.js @@ -1478,10 +1478,10 @@ class StreamingProcessor { return text; } - onProgressStreaming(messageId, text) { + onProgressStreaming(messageId, text, isFinal) { const isImpersonate = this.type == "impersonate"; text = this.removePrefix(text); - let processedText = cleanUpMessage(text, isImpersonate); + let processedText = cleanUpMessage(text, isImpersonate, !isFinal); let result = extractNameFromMessage(processedText, this.force_name2, isImpersonate); let isName = result.this_mes_is_name; processedText = result.getMessage; @@ -1518,7 +1518,7 @@ class StreamingProcessor { onFinishStreaming(messageId, text) { this.hideStopButton(this.messageId); - this.onProgressStreaming(messageId, text); + this.onProgressStreaming(messageId, text, true); addCopyToCodeBlocks($(`#chat .mes[mesid="${messageId}"]`)); playMessageSound(); saveChatConditional(); @@ -2146,7 +2146,7 @@ async function Generate(type, { automatic_trigger, force_name2, resolve, reject, generatePoe(type, finalPromt).then(onSuccess).catch(onError); } } - else if (main_api == 'textgenerationwebui' && textgenerationwebui_settings.streaming && type !== 'quiet') { + else if (main_api == 'textgenerationwebui' && isStreamingEnabled() && type !== 'quiet') { streamingProcessor.generator = await generateTextGenWithStreaming(generate_data, streamingProcessor.abortController.signal); } else { @@ -2236,7 +2236,7 @@ async function Generate(type, { automatic_trigger, force_name2, resolve, reject, ({ type, getMessage } = saveReply('append', getMessage, this_mes_is_name, title)); } } else { - let chunk = cleanUpMessage(message_already_generated, true); + let chunk = cleanUpMessage(message_already_generated, true, true); let extract = extractNameFromMessage(chunk, force_name2, isImpersonate); $('#send_textarea').val(extract.getMessage).trigger('input'); } @@ -2960,8 +2960,8 @@ function extractMessageFromData(data) { return getMessage; } -function cleanUpMessage(getMessage, isImpersonate) { - if (power_user.trim_sentences) { +function cleanUpMessage(getMessage, isImpersonate, displayIncompleteSentences = false) { + if (!displayIncompleteSentences && power_user.trim_sentences) { getMessage = end_trim_to_sentence(getMessage, power_user.include_newline); } From c3146e830d437f575139bc08f19c40b04690acc9 Mon Sep 17 00:00:00 2001 From: SillyLossy Date: Mon, 15 May 2023 19:06:56 +0300 Subject: [PATCH 42/64] More clear checkbox name --- public/index.html | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/public/index.html b/public/index.html index 8270b403d..cb857346a 100644 --- a/public/index.html +++ b/public/index.html @@ -1218,7 +1218,7 @@
Use slash commands to generate images. Type /help in chat for more details +
+ Hint: Save an API key in Horde KoboldAI API settings to use it here. + + @@ -472,6 +592,8 @@ jQuery(async () => { $('#sd_negative_prompt').on('input', onNegativePromptInput); $('#sd_width').on('input', onWidthInput); $('#sd_height').on('input', onHeightInput); + $('#sd_horde').on('input', onHordeInput); + $('#sd_horde_nsfw').on('input', onHordeNsfwInput); $('.sd_settings .inline-drawer-toggle').on('click', function () { initScrollHeight($("#sd_prompt_prefix")); diff --git a/public/scripts/extensions/stable-diffusion/style.css b/public/scripts/extensions/stable-diffusion/style.css index 5d26a2ad3..3cb72e138 100644 --- a/public/scripts/extensions/stable-diffusion/style.css +++ b/public/scripts/extensions/stable-diffusion/style.css @@ -1,4 +1,4 @@ -.sd_settings label { +.sd_settings label:not(.checkbox_label) { display: block; } @@ -16,7 +16,6 @@ display: flex; align-items: center; justify-content: center; - } #sd_gen:hover { diff --git a/server.js b/server.js index 05d584028..c4e577a7a 100644 --- a/server.js +++ b/server.js @@ -2925,10 +2925,21 @@ app.post('/viewsecrets', jsonParser, async (_, response) => { } }); -app.post('/horde_generateimage', async (request, response) => { +app.post('/horde_samplers', jsonParser, async (_, response) => { + const samplers = Object.values(ai_horde.ModelGenerationInputStableSamplers); + response.send(samplers); +}); + +app.post('/horde_models', jsonParser, async (_, response) => { + const models = await ai_horde.getModels(); + response.send(models); +}); + +app.post('/horde_generateimage', jsonParser, async (request, response) => { const MAX_ATTEMPTS = 100; const CHECK_INTERVAL = 3000; const api_key_horde = readSecret(SECRET_KEYS.HORDE) || ANONYMOUS_KEY; + console.log('Stable Horde request:', request.body); const generation = await ai_horde.postAsyncImageGenerate( { prompt: `${request.body.prompt_prefix} ${request.body.prompt} ### ${request.body.negative_prompt}`, @@ -2950,12 +2961,17 @@ app.post('/horde_generateimage', async (request, response) => { for (let attempt = 0; attempt < MAX_ATTEMPTS; attempt++) { await delay(CHECK_INTERVAL); const check = await ai_horde.getImageGenerationCheck(generation.id); + console.log(check); if (check.done) { const result = await ai_horde.getImageGenerationStatus(generation.id); return response.send(result.generations[0].img); } + if (!check.is_possible) { + return response.sendStatus(503); + } + if (check.faulted) { return response.sendStatus(500); } From d93f3cf0fd6b2f54e870bd250a5b2048c288892a Mon Sep 17 00:00:00 2001 From: SillyLossy Date: Mon, 15 May 2023 23:19:16 +0300 Subject: [PATCH 46/64] Rearrange user blocks --- public/index.html | 46 +++++++++++++++++++++++++++++----------------- 1 file changed, 29 insertions(+), 17 deletions(-) diff --git a/public/index.html b/public/index.html index 67f960f47..ca09563f0 100644 --- a/public/index.html +++ b/public/index.html @@ -1733,22 +1733,35 @@ ? -
-

- Send on Enter -

- -
- -

Auto-swipe

- +
+
+ +
+ + +
+ +
+

+ Send on Enter +

+ +
+ +
+ Auto-swipe +
+
+
+ +
Minimum generated message length
Blacklisted words
@@ -1760,7 +1773,6 @@
-

Name

From ae9f9cc060b8e223f0245750eb46911451634f18 Mon Sep 17 00:00:00 2001 From: RossAscends <124905043+RossAscends@users.noreply.github.com> Date: Tue, 16 May 2023 06:32:59 +0900 Subject: [PATCH 47/64] itemizer now references correct prompt for message --- public/script.js | 18 +++++++++++------- 1 file changed, 11 insertions(+), 7 deletions(-) diff --git a/public/script.js b/public/script.js index a5a58dc9a..4f7044208 100644 --- a/public/script.js +++ b/public/script.js @@ -555,6 +555,8 @@ const MAX_GENERATION_LOOPS = 5; let token; +var PromptArrayItemForRawPromptDisplay; + export function getRequestHeaders() { return { "Content-Type": "application/json", @@ -2488,6 +2490,7 @@ function promptItemize(itemizedPrompts, requestedMesId) { for (var i = 0; i < itemizedPrompts.length; i++) { if (itemizedPrompts[i].mesId === incomingMesId) { thisPromptSet = i; + PromptArrayItemForRawPromptDisplay = Number(i); } } @@ -5784,15 +5787,16 @@ $(document).ready(function () { }) $(document).on("pointerup", "#showRawPrompt", function () { - //let mesIdForItemization = $(this).closest('.mes').attr('mesId'); - //console.log(generate_data.prompt); - console.log(itemizedPrompts[0].rawPrompt); - $("#rawPromptWrapper").html(itemizedPrompts[0].rawPrompt.replace(/\n/g, '
')); + //console.log(itemizedPrompts[PromptArrayItemForRawPromptDisplay].rawPrompt); + console.log(itemizedPrompts[PromptArrayItemForRawPromptDisplay].rawPrompt); + + let rawPrompt = itemizedPrompts[PromptArrayItemForRawPromptDisplay].rawPrompt; + let rawPromptValues = Object.values(rawPrompt); + + //let DisplayStringifiedPrompt = JSON.stringify(itemizedPrompts[PromptArrayItemForRawPromptDisplay].rawPrompt).replace(/\n+/g, '
'); + $("#rawPromptWrapper").html(rawPromptValues); rawPromptPopper.update(); $('#rawPromptPopup').toggle(); - - //Popper(itemizedPrompts, mesIdForItemization); - }) From 9d49c4644d46af7152422b9f16d53c33c5db4747 Mon Sep 17 00:00:00 2001 From: SillyLossy Date: Tue, 16 May 2023 00:50:00 +0300 Subject: [PATCH 48/64] OAI prompt display --- public/script.js | 11 +++++++++-- public/style.css | 1 + 2 files changed, 10 insertions(+), 2 deletions(-) diff --git a/public/script.js b/public/script.js index b703f279c..0a408d48a 100644 --- a/public/script.js +++ b/public/script.js @@ -5170,6 +5170,9 @@ $(document).ready(function () { } } + rawPromptPopper.update(); + $('#rawPromptPopup').hide(); + if (dialogueResolve) { if (popup_type == 'input') { dialogueResolve($("#dialogue_popup_input").val()); @@ -5825,10 +5828,14 @@ $(document).ready(function () { console.log(itemizedPrompts[PromptArrayItemForRawPromptDisplay].rawPrompt); let rawPrompt = itemizedPrompts[PromptArrayItemForRawPromptDisplay].rawPrompt; - let rawPromptValues = Object.values(rawPrompt); + let rawPromptValues = rawPrompt; + + if (Array.isArray(rawPrompt)) { + rawPromptValues = rawPrompt.map(x => x.content).join('\n'); + } //let DisplayStringifiedPrompt = JSON.stringify(itemizedPrompts[PromptArrayItemForRawPromptDisplay].rawPrompt).replace(/\n+/g, '
'); - $("#rawPromptWrapper").html(rawPromptValues); + $("#rawPromptWrapper").text(rawPromptValues); rawPromptPopper.update(); $('#rawPromptPopup').toggle(); }) diff --git a/public/style.css b/public/style.css index b31adf958..c113af3c2 100644 --- a/public/style.css +++ b/public/style.css @@ -135,6 +135,7 @@ table.responsiveTable { .tokenItemizingSubclass { font-size: calc(var(--mainFontSize) * 0.8); color: var(--SmartThemeEmColor); + white-space: pre-wrap; } .tokenGraph { From 558d0d9b66a7a32fd034dbb53f4cc84fad400ca6 Mon Sep 17 00:00:00 2001 From: Sanskar Tiwari Date: Tue, 16 May 2023 04:36:30 +0530 Subject: [PATCH 49/64] deletion checkbox delegate to parent --- public/script.js | 21 +++++++++++++++------ 1 file changed, 15 insertions(+), 6 deletions(-) diff --git a/public/script.js b/public/script.js index b1c98b2fa..7e2a98ac9 100644 --- a/public/script.js +++ b/public/script.js @@ -246,7 +246,7 @@ let dialogueResolve = null; let chat_metadata = {}; let streamingProcessor = null; let crop_data = undefined; - +let is_delete_mode = false; let fav_ch_checked = false; //initialize global var for future cropped blobs @@ -4512,14 +4512,19 @@ $(document).ready(function () { is_use_scroll_holder = false; } }); - $(document).on("click", ".del_checkbox", function () { - //when a 'delete message' checkbox is clicked - $(".del_checkbox").each(function () { + + $(document).on("click", ".mes", function () { + //when a 'delete message' parent div is clicked + // and we are in delete mode + if (!is_delete_mode) { + return; + } + $(".mes").children(".del_checkbox").each(function () { $(this).prop("checked", false); $(this).parent().css("background", css_mes_bg); }); - $(this).parent().css("background", "#600"); //sets the bg of the mes selected for deletion - var i = $(this).parent().attr("mesid"); //checks the message ID in the chat + $(this).css("background", "#600"); //sets the bg of the mes selected for deletion + var i = $(this).attr("mesid"); //checks the message ID in the chat this_del_mes = i; while (i < chat.length) { //as long as the current message ID is less than the total chat length @@ -4531,6 +4536,7 @@ $(document).ready(function () { //console.log(i); } }); + $(document).on("click", "#user_avatar_block .avatar", function () { user_avatar = $(this).attr("imgfile"); reloadUserAvatar(); @@ -5225,6 +5231,7 @@ $(document).ready(function () { } }); } + is_delete_mode = true; } }); @@ -5243,6 +5250,7 @@ $(document).ready(function () { this_del_mes = 0; console.log('canceled del msgs, calling showswipesbtns'); showSwipeButtons(); + is_delete_mode = false; }); //confirms message delation with the "ok" button @@ -5271,6 +5279,7 @@ $(document).ready(function () { $('#chat .mes').eq(-2).removeClass('last_mes'); console.log('confirmed del msgs, calling showswipesbtns'); showSwipeButtons(); + is_delete_mode = false; }); $("#settings_perset").change(function () { From 83f9876a131e691f941d2526696ef560aefc660f Mon Sep 17 00:00:00 2001 From: SillyLossy Date: Tue, 16 May 2023 10:51:04 +0300 Subject: [PATCH 50/64] Check for message deletable on click --- public/script.js | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/public/script.js b/public/script.js index 44327339d..9040454ef 100644 --- a/public/script.js +++ b/public/script.js @@ -4909,8 +4909,8 @@ $(document).ready(function () { $(document).on("click", ".mes", function () { //when a 'delete message' parent div is clicked - // and we are in delete mode - if (!is_delete_mode) { + // and we are in delete mode and del_checkbox is visible + if (!is_delete_mode || !$(this).children('.del_checkbox').is(':visible')) { return; } $(".mes").children(".del_checkbox").each(function () { From be3b4e18a7958e47e8656d013275b5bb68aa8054 Mon Sep 17 00:00:00 2001 From: SillyLossy Date: Tue, 16 May 2023 12:36:46 +0300 Subject: [PATCH 51/64] Reduce console spam --- public/scripts/extensions/stable-diffusion/index.js | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/public/scripts/extensions/stable-diffusion/index.js b/public/scripts/extensions/stable-diffusion/index.js index c4af71d2b..889a8881a 100644 --- a/public/scripts/extensions/stable-diffusion/index.js +++ b/public/scripts/extensions/stable-diffusion/index.js @@ -229,6 +229,10 @@ async function loadHordeSamplers() { } async function loadExtrasSamplers() { + if (!modules.includes('sd')) { + return []; + } + const url = new URL(getApiUrl()); url.pathname = '/api/image/samplers'; const result = await fetch(url, defaultRequestArgs); @@ -276,6 +280,10 @@ async function loadHordeModels() { } async function loadExtrasModels() { + if (!modules.includes('sd')) { + return []; + } + const url = new URL(getApiUrl()); url.pathname = '/api/image/model'; const getCurrentModelResult = await fetch(url, defaultRequestArgs); From 3b5ce2f669ab089ad708905445c157c7dd1eaa10 Mon Sep 17 00:00:00 2001 From: SillyLossy Date: Tue, 16 May 2023 12:40:36 +0300 Subject: [PATCH 52/64] Fix quiet prompt extraction --- public/script.js | 5 ----- 1 file changed, 5 deletions(-) diff --git a/public/script.js b/public/script.js index 9040454ef..5f215e007 100644 --- a/public/script.js +++ b/public/script.js @@ -2096,11 +2096,6 @@ async function Generate(type, { automatic_trigger, force_name2, resolve, reject, } } - // Add quiet generation prompt at depth 0 - if (quiet_prompt && quiet_prompt.length) { - finalPromt += `\n${quiet_prompt}`; - } - finalPromt = finalPromt.replace(/\r/gm, ''); if (power_user.collapse_newlines) { From 7192c912ab382f5a777932e54bd366d6d4791dfc Mon Sep 17 00:00:00 2001 From: SillyLossy Date: Tue, 16 May 2023 12:46:58 +0300 Subject: [PATCH 53/64] Fix Horde image gen --- server.js | 2 ++ 1 file changed, 2 insertions(+) diff --git a/server.js b/server.js index ccfc29825..f36f38086 100644 --- a/server.js +++ b/server.js @@ -2974,9 +2974,11 @@ app.post('/horde_generateimage', jsonParser, async (request, response) => { return response.send(result.generations[0].img); } + /* if (!check.is_possible) { return response.sendStatus(503); } + */ if (check.faulted) { return response.sendStatus(500); From 07bc7c434a8d05a033a323c42e2990edb2559654 Mon Sep 17 00:00:00 2001 From: SillyLossy Date: Tue, 16 May 2023 14:20:37 +0300 Subject: [PATCH 54/64] SD gen error message --- public/scripts/extensions/stable-diffusion/index.js | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/public/scripts/extensions/stable-diffusion/index.js b/public/scripts/extensions/stable-diffusion/index.js index 889a8881a..b2a098357 100644 --- a/public/scripts/extensions/stable-diffusion/index.js +++ b/public/scripts/extensions/stable-diffusion/index.js @@ -409,11 +409,12 @@ async function generateExtrasImage(prompt) { }), }); - if (result.ok) { const data = await result.json(); const base64Image = `data:image/jpeg;base64,${data.image}`; sendMessage(prompt, base64Image); + } else { + callPopup('Image generation has failed. Please try again.', 'text'); } } @@ -439,6 +440,8 @@ async function generateHordeImage(prompt) { const data = await result.text(); const base64Image = `data:image/webp;base64,${data}`; sendMessage(prompt, base64Image); + } else { + callPopup('Image generation has failed. Please try again.', 'text'); } } From 0e6ae900f62c61c4a0c336c7185408c8f7a2b590 Mon Sep 17 00:00:00 2001 From: RossAscends <124905043+RossAscends@users.noreply.github.com> Date: Tue, 16 May 2023 22:04:17 +0900 Subject: [PATCH 55/64] Authors Note draggable. --- public/index.html | 63 +++++++------ public/script.js | 23 ++++- public/scripts/RossAscends-mods.js | 12 +++ .../extensions/floating-prompt/index.js | 90 ++++++++++--------- .../extensions/floating-prompt/style.css | 29 ++++-- public/style.css | 9 +- 6 files changed, 147 insertions(+), 79 deletions(-) diff --git a/public/index.html b/public/index.html index ca09563f0..8b10f038a 100644 --- a/public/index.html +++ b/public/index.html @@ -1221,7 +1221,7 @@ Trim Incomplete Sentences -