Merge pull request #1431 from valadaptive/eslint-fixes-3

ESLint fixes part 3 - more significant changes
This commit is contained in:
Cohee
2023-12-02 19:55:54 +02:00
committed by GitHub
30 changed files with 118 additions and 247 deletions

View File

@@ -50,12 +50,10 @@ module.exports = {
// Most, if not all, of these rules should eventually be enabled and the code changed. They're disabled so that
// linting passes.
rules: {
'no-unused-vars': 'off',
'no-unused-vars': ['error', {args: 'none'}],
'no-control-regex': 'off',
'no-redeclare': 'off',
'no-async-promise-executor': 'off',
'no-inner-declarations': 'off',
'no-undef': 'off',
'require-yield': 'off',
'no-constant-condition': ['error', {checkLoops: false}]
}

View File

@@ -62,7 +62,6 @@ import {
renameGroupChat,
importGroupChat,
getGroupBlock,
getGroupChatNames,
getGroupCharacterCards,
getGroupDepthPrompts,
} from "./scripts/group-chats.js";
@@ -130,8 +129,6 @@ import {
import {
debounce,
delay,
restoreCaretPosition,
saveCaretPosition,
trimToEndSentence,
countOccurrences,
isOdd,
@@ -193,7 +190,7 @@ import { getFriendlyTokenizerName, getTokenCount, getTokenizerModel, initTokeniz
import { createPersona, initPersonas, selectCurrentPersona, setPersonaDescription } from "./scripts/personas.js";
import { getBackgrounds, initBackgrounds } from "./scripts/backgrounds.js";
import { hideLoader, showLoader } from "./scripts/loader.js";
import { CharacterContextMenu, BulkEditOverlay } from "./scripts/BulkEditOverlay.js";
import { BulkEditOverlay } from "./scripts/BulkEditOverlay.js";
import { loadMancerModels } from "./scripts/mancer-settings.js";
import { getFileAttachment, hasPendingFileAttachment, populateFileAttachment } from "./scripts/chats.js";
import { replaceVariableMacros } from "./scripts/variables.js";
@@ -327,7 +324,6 @@ eventSource.on(event_types.MESSAGE_SENT, processExtensionHelpers);
eventSource.on(event_types.CHAT_CHANGED, processChatSlashCommands);
const characterGroupOverlay = new BulkEditOverlay();
const characterContextMenu = new CharacterContextMenu(characterGroupOverlay);
eventSource.on(event_types.CHARACTER_PAGE_LOADED, characterGroupOverlay.onPageLoad);
hljs.addPlugin({ "before:highlightElement": ({ el }) => { el.textContent = el.innerText } });
@@ -666,7 +662,6 @@ let is_send_press = false; //Send generation
let this_del_mes = -1;
//message editing and chat scroll position persistence
var this_edit_mes_text = "";
var this_edit_mes_chname = "";
var this_edit_mes_id;
var scroll_holder = 0;
@@ -2224,7 +2219,6 @@ function getStoppingStrings(isImpersonate, isContinue) {
*/
export async function generateQuietPrompt(quiet_prompt, quietToLoud, skipWIAN, quietImage = null) {
console.log('got into genQuietPrompt')
const skipWIANvalue = skipWIAN
return await new Promise(
async function promptPromise(resolve, reject) {
if (quietToLoud === true) {
@@ -3855,7 +3849,6 @@ async function Generate(type, { automatic_trigger, force_name2, resolve, reject,
//prevent custom depth WI entries (which have unique random key names) from duplicating
for (let key in extension_prompts) {
if (key.includes('customDepthWI')) {
let keyname = extension_prompts[key]
delete extension_prompts[key];
}
}
@@ -4108,6 +4101,8 @@ function addChatsSeparator(mesSendString) {
}
}
// There's a TODO related to zero-depth anchors; not removing this function until that's resolved
// eslint-disable-next-line no-unused-vars
function appendZeroDepthAnchor(force_name2, zeroDepthAnchor, finalPrompt) {
const trimBothEnds = !force_name2;
let trimmedPrompt = (trimBothEnds ? zeroDepthAnchor.trim() : zeroDepthAnchor.trimEnd());
@@ -4177,143 +4172,101 @@ function promptItemize(itemizedPrompts, requestedMesId) {
return null;
}
//these happen regardless of API
var charDescriptionTokens = getTokenCount(itemizedPrompts[thisPromptSet].charDescription);
var charPersonalityTokens = getTokenCount(itemizedPrompts[thisPromptSet].charPersonality);
var scenarioTextTokens = getTokenCount(itemizedPrompts[thisPromptSet].scenarioText);
var userPersonaStringTokens = getTokenCount(itemizedPrompts[thisPromptSet].userPersona);
var worldInfoStringTokens = getTokenCount(itemizedPrompts[thisPromptSet].worldInfoString);
var allAnchorsTokens = getTokenCount(itemizedPrompts[thisPromptSet].allAnchors);
var summarizeStringTokens = getTokenCount(itemizedPrompts[thisPromptSet].summarizeString);
var authorsNoteStringTokens = getTokenCount(itemizedPrompts[thisPromptSet].authorsNoteString);
var smartContextStringTokens = getTokenCount(itemizedPrompts[thisPromptSet].smartContextString);
var beforeScenarioAnchorTokens = getTokenCount(itemizedPrompts[thisPromptSet].beforeScenarioAnchor);
var afterScenarioAnchorTokens = getTokenCount(itemizedPrompts[thisPromptSet].afterScenarioAnchor);
var zeroDepthAnchorTokens = getTokenCount(itemizedPrompts[thisPromptSet].zeroDepthAnchor);
var thisPrompt_max_context = itemizedPrompts[thisPromptSet].this_max_context;
var thisPrompt_padding = itemizedPrompts[thisPromptSet].padding;
var this_main_api = itemizedPrompts[thisPromptSet].main_api;
const params = {
charDescriptionTokens: getTokenCount(itemizedPrompts[thisPromptSet].charDescription),
charPersonalityTokens: getTokenCount(itemizedPrompts[thisPromptSet].charPersonality),
scenarioTextTokens: getTokenCount(itemizedPrompts[thisPromptSet].scenarioText),
userPersonaStringTokens: getTokenCount(itemizedPrompts[thisPromptSet].userPersona),
worldInfoStringTokens: getTokenCount(itemizedPrompts[thisPromptSet].worldInfoString),
allAnchorsTokens: getTokenCount(itemizedPrompts[thisPromptSet].allAnchors),
summarizeStringTokens: getTokenCount(itemizedPrompts[thisPromptSet].summarizeString),
authorsNoteStringTokens: getTokenCount(itemizedPrompts[thisPromptSet].authorsNoteString),
smartContextStringTokens: getTokenCount(itemizedPrompts[thisPromptSet].smartContextString),
beforeScenarioAnchorTokens: getTokenCount(itemizedPrompts[thisPromptSet].beforeScenarioAnchor),
afterScenarioAnchorTokens: getTokenCount(itemizedPrompts[thisPromptSet].afterScenarioAnchor),
zeroDepthAnchorTokens: getTokenCount(itemizedPrompts[thisPromptSet].zeroDepthAnchor), // TODO: unused
thisPrompt_padding: itemizedPrompts[thisPromptSet].padding,
this_main_api: itemizedPrompts[thisPromptSet].main_api
};
if (this_main_api == 'openai') {
if (params.this_main_api == 'openai') {
//for OAI API
//console.log('-- Counting OAI Tokens');
//var finalPromptTokens = itemizedPrompts[thisPromptSet].oaiTotalTokens;
var oaiMainTokens = itemizedPrompts[thisPromptSet].oaiMainTokens;
var oaiStartTokens = itemizedPrompts[thisPromptSet].oaiStartTokens;
var ActualChatHistoryTokens = itemizedPrompts[thisPromptSet].oaiConversationTokens;
var examplesStringTokens = itemizedPrompts[thisPromptSet].oaiExamplesTokens;
var oaiPromptTokens = itemizedPrompts[thisPromptSet].oaiPromptTokens - (afterScenarioAnchorTokens + beforeScenarioAnchorTokens) + examplesStringTokens;
var oaiBiasTokens = itemizedPrompts[thisPromptSet].oaiBiasTokens;
var oaiJailbreakTokens = itemizedPrompts[thisPromptSet].oaiJailbreakTokens;
var oaiNudgeTokens = itemizedPrompts[thisPromptSet].oaiNudgeTokens;
var oaiImpersonateTokens = itemizedPrompts[thisPromptSet].oaiImpersonateTokens;
var oaiNsfwTokens = itemizedPrompts[thisPromptSet].oaiNsfwTokens;
var finalPromptTokens =
oaiStartTokens +
oaiPromptTokens +
oaiMainTokens +
oaiNsfwTokens +
oaiBiasTokens +
oaiImpersonateTokens +
oaiJailbreakTokens +
oaiNudgeTokens +
ActualChatHistoryTokens +
//params.finalPromptTokens = itemizedPrompts[thisPromptSet].oaiTotalTokens;
params.oaiMainTokens = itemizedPrompts[thisPromptSet].oaiMainTokens;
params.oaiStartTokens = itemizedPrompts[thisPromptSet].oaiStartTokens;
params.ActualChatHistoryTokens = itemizedPrompts[thisPromptSet].oaiConversationTokens;
params.examplesStringTokens = itemizedPrompts[thisPromptSet].oaiExamplesTokens;
params.oaiPromptTokens = itemizedPrompts[thisPromptSet].oaiPromptTokens - (params.afterScenarioAnchorTokens + params.beforeScenarioAnchorTokens) + params.examplesStringTokens;
params.oaiBiasTokens = itemizedPrompts[thisPromptSet].oaiBiasTokens;
params.oaiJailbreakTokens = itemizedPrompts[thisPromptSet].oaiJailbreakTokens;
params.oaiNudgeTokens = itemizedPrompts[thisPromptSet].oaiNudgeTokens;
params.oaiImpersonateTokens = itemizedPrompts[thisPromptSet].oaiImpersonateTokens;
params.oaiNsfwTokens = itemizedPrompts[thisPromptSet].oaiNsfwTokens;
params.finalPromptTokens =
params.oaiStartTokens +
params.oaiPromptTokens +
params.oaiMainTokens +
params.oaiNsfwTokens +
params.oaiBiasTokens +
params.oaiImpersonateTokens +
params.oaiJailbreakTokens +
params.oaiNudgeTokens +
params.ActualChatHistoryTokens +
//charDescriptionTokens +
//charPersonalityTokens +
//allAnchorsTokens +
worldInfoStringTokens +
beforeScenarioAnchorTokens +
afterScenarioAnchorTokens;
// OAI doesn't use padding
thisPrompt_padding = 0;
params.worldInfoStringTokens +
params.beforeScenarioAnchorTokens +
params.afterScenarioAnchorTokens;
// Max context size - max completion tokens
thisPrompt_max_context = (oai_settings.openai_max_context - oai_settings.openai_max_tokens);
params.thisPrompt_max_context = (oai_settings.openai_max_context - oai_settings.openai_max_tokens);
//console.log('-- applying % on OAI tokens');
params.oaiStartTokensPercentage = ((params.oaiStartTokens / (params.finalPromptTokens)) * 100).toFixed(2);
params.storyStringTokensPercentage = (((params.afterScenarioAnchorTokens + params.beforeScenarioAnchorTokens + params.oaiPromptTokens) / (params.finalPromptTokens)) * 100).toFixed(2);
params.ActualChatHistoryTokensPercentage = ((params.ActualChatHistoryTokens / (params.finalPromptTokens)) * 100).toFixed(2);
params.promptBiasTokensPercentage = ((params.oaiBiasTokens / (params.finalPromptTokens)) * 100).toFixed(2);
params.worldInfoStringTokensPercentage = ((params.worldInfoStringTokens / (params.finalPromptTokens)) * 100).toFixed(2);
params.allAnchorsTokensPercentage = ((params.allAnchorsTokens / (params.finalPromptTokens)) * 100).toFixed(2);
params.selectedTokenizer = getFriendlyTokenizerName(params.this_main_api).tokenizerName;
params.oaiSystemTokens = params.oaiImpersonateTokens + params.oaiJailbreakTokens + params.oaiNudgeTokens + params.oaiStartTokens + params.oaiNsfwTokens + params.oaiMainTokens;
params.oaiSystemTokensPercentage = ((params.oaiSystemTokens / (params.finalPromptTokens)) * 100).toFixed(2);
} else {
//for non-OAI APIs
//console.log('-- Counting non-OAI Tokens');
var finalPromptTokens = getTokenCount(itemizedPrompts[thisPromptSet].finalPrompt);
var storyStringTokens = getTokenCount(itemizedPrompts[thisPromptSet].storyString) - worldInfoStringTokens;
var examplesStringTokens = getTokenCount(itemizedPrompts[thisPromptSet].examplesString);
var mesSendStringTokens = getTokenCount(itemizedPrompts[thisPromptSet].mesSendString)
var ActualChatHistoryTokens = mesSendStringTokens - (allAnchorsTokens - (beforeScenarioAnchorTokens + afterScenarioAnchorTokens)) + power_user.token_padding;
var instructionTokens = getTokenCount(itemizedPrompts[thisPromptSet].instruction);
var promptBiasTokens = getTokenCount(itemizedPrompts[thisPromptSet].promptBias);
params.finalPromptTokens = getTokenCount(itemizedPrompts[thisPromptSet].finalPrompt);
params.storyStringTokens = getTokenCount(itemizedPrompts[thisPromptSet].storyString) - params.worldInfoStringTokens;
params.examplesStringTokens = getTokenCount(itemizedPrompts[thisPromptSet].examplesString);
params.mesSendStringTokens = getTokenCount(itemizedPrompts[thisPromptSet].mesSendString)
params.ActualChatHistoryTokens = params.mesSendStringTokens - (params.allAnchorsTokens - (params.beforeScenarioAnchorTokens + params.afterScenarioAnchorTokens)) + power_user.token_padding;
params.instructionTokens = getTokenCount(itemizedPrompts[thisPromptSet].instruction);
params.promptBiasTokens = getTokenCount(itemizedPrompts[thisPromptSet].promptBias);
var totalTokensInPrompt =
storyStringTokens + //chardefs total
worldInfoStringTokens +
examplesStringTokens + // example messages
ActualChatHistoryTokens + //chat history
allAnchorsTokens + // AN and/or legacy anchors
params.totalTokensInPrompt =
params.storyStringTokens + //chardefs total
params.worldInfoStringTokens +
params.examplesStringTokens + // example messages
params.ActualChatHistoryTokens + //chat history
params.allAnchorsTokens + // AN and/or legacy anchors
//afterScenarioAnchorTokens + //only counts if AN is set to 'after scenario'
//zeroDepthAnchorTokens + //same as above, even if AN not on 0 depth
promptBiasTokens; //{{}}
params.promptBiasTokens; //{{}}
//- thisPrompt_padding; //not sure this way of calculating is correct, but the math results in same value as 'finalPrompt'
}
params.thisPrompt_max_context = itemizedPrompts[thisPromptSet].this_max_context;
params.thisPrompt_actual = params.thisPrompt_max_context - params.thisPrompt_padding;
if (this_main_api == 'openai') {
//console.log('-- applying % on OAI tokens');
var oaiStartTokensPercentage = ((oaiStartTokens / (finalPromptTokens)) * 100).toFixed(2);
var storyStringTokensPercentage = (((afterScenarioAnchorTokens + beforeScenarioAnchorTokens + oaiPromptTokens) / (finalPromptTokens)) * 100).toFixed(2);
var ActualChatHistoryTokensPercentage = ((ActualChatHistoryTokens / (finalPromptTokens)) * 100).toFixed(2);
var promptBiasTokensPercentage = ((oaiBiasTokens / (finalPromptTokens)) * 100).toFixed(2);
var worldInfoStringTokensPercentage = ((worldInfoStringTokens / (finalPromptTokens)) * 100).toFixed(2);
var allAnchorsTokensPercentage = ((allAnchorsTokens / (finalPromptTokens)) * 100).toFixed(2);
var selectedTokenizer = getFriendlyTokenizerName(this_main_api).tokenizerName;
var oaiSystemTokens = oaiImpersonateTokens + oaiJailbreakTokens + oaiNudgeTokens + oaiStartTokens + oaiNsfwTokens + oaiMainTokens;
var oaiSystemTokensPercentage = ((oaiSystemTokens / (finalPromptTokens)) * 100).toFixed(2);
} else {
//console.log('-- applying % on non-OAI tokens');
var storyStringTokensPercentage = ((storyStringTokens / (totalTokensInPrompt)) * 100).toFixed(2);
var ActualChatHistoryTokensPercentage = ((ActualChatHistoryTokens / (totalTokensInPrompt)) * 100).toFixed(2);
var promptBiasTokensPercentage = ((promptBiasTokens / (totalTokensInPrompt)) * 100).toFixed(2);
var worldInfoStringTokensPercentage = ((worldInfoStringTokens / (totalTokensInPrompt)) * 100).toFixed(2);
var allAnchorsTokensPercentage = ((allAnchorsTokens / (totalTokensInPrompt)) * 100).toFixed(2);
var selectedTokenizer = getFriendlyTokenizerName(this_main_api).tokenizerName;
params.storyStringTokensPercentage = ((params.storyStringTokens / (params.totalTokensInPrompt)) * 100).toFixed(2);
params.ActualChatHistoryTokensPercentage = ((params.ActualChatHistoryTokens / (params.totalTokensInPrompt)) * 100).toFixed(2);
params.promptBiasTokensPercentage = ((params.promptBiasTokens / (params.totalTokensInPrompt)) * 100).toFixed(2);
params.worldInfoStringTokensPercentage = ((params.worldInfoStringTokens / (params.totalTokensInPrompt)) * 100).toFixed(2);
params.allAnchorsTokensPercentage = ((params.allAnchorsTokens / (params.totalTokensInPrompt)) * 100).toFixed(2);
params.selectedTokenizer = getFriendlyTokenizerName(params.this_main_api).tokenizerName;
}
const params = {
selectedTokenizer,
this_main_api,
storyStringTokensPercentage,
worldInfoStringTokensPercentage,
ActualChatHistoryTokensPercentage,
allAnchorsTokensPercentage,
promptBiasTokensPercentage,
storyStringTokens,
charDescriptionTokens,
charPersonalityTokens,
scenarioTextTokens,
examplesStringTokens,
userPersonaStringTokens,
instructionTokens,
worldInfoStringTokens,
ActualChatHistoryTokens,
allAnchorsTokens,
summarizeStringTokens,
authorsNoteStringTokens,
smartContextStringTokens,
promptBiasTokens,
totalTokensInPrompt,
finalPromptTokens,
thisPrompt_max_context,
thisPrompt_padding,
thisPrompt_actual: thisPrompt_max_context - thisPrompt_padding,
oaiSystemTokensPercentage,
oaiStartTokensPercentage,
oaiSystemTokens,
oaiStartTokens,
oaiJailbreakTokens,
oaiNudgeTokens,
oaiImpersonateTokens,
oaiPromptTokens,
oaiBiasTokens,
oaiNsfwTokens,
oaiMainTokens,
};
if (this_main_api == 'openai') {
if (params.this_main_api == 'openai') {
callPopup(renderTemplate('itemizationChat', params), 'text');
} else {
@@ -6796,7 +6749,6 @@ function addAlternateGreeting(template, greeting, index, getArray) {
async function createOrEditCharacter(e) {
$("#rm_info_avatar").html("");
let save_name = create_save.name;
var formData = new FormData($("#form_create").get(0));
formData.set('fav', fav_ch_checked);
if ($("#form_create").attr("actiontype") == "createcharacter") {
@@ -7657,28 +7609,22 @@ jQuery(async function () {
$(document).on('click', '.api_loading', cancelStatusCheck);
//////////INPUT BAR FOCUS-KEEPING LOGIC/////////////
let S_TAFocused = false;
let S_TAPreviouslyFocused = false;
$('#send_textarea').on('focusin focus click', () => {
S_TAFocused = true;
S_TAPreviouslyFocused = true;
});
$('#send_textarea').on('focusout blur', () => S_TAFocused = false);
$('#options_button, #send_but, #option_regenerate, #option_continue, #mes_continue').on('click', () => {
if (S_TAPreviouslyFocused) {
$('#send_textarea').focus();
S_TAFocused = true;
}
});
$(document).click(event => {
if ($(':focus').attr('id') !== 'send_textarea') {
var validIDs = ["options_button", "send_but", "mes_continue", "send_textarea", "option_regenerate", "option_continue"];
if (!validIDs.includes($(event.target).attr('id'))) {
S_TAFocused = false;
S_TAPreviouslyFocused = false;
}
} else {
S_TAFocused = true;
S_TAPreviouslyFocused = true;
}
});
@@ -8611,6 +8557,7 @@ jQuery(async function () {
if (this_edit_mes_id !== undefined) {
let mes_edited = $(`#chat [mesid="${this_edit_mes_id}"]`).find(".mes_edit_done");
if (Number(edit_mes_id) == count_view_mes - 1) { //if the generating swipe (...)
let run_edit = true;
if (chat[edit_mes_id]['swipe_id'] !== undefined) {
if (chat[edit_mes_id]['swipes'].length === chat[edit_mes_id]['swipe_id']) {
run_edit = false;

View File

@@ -1615,7 +1615,6 @@ PromptManagerModule.prototype.import = function (importData) {
this.setPrompts(prompts);
this.log('Prompt import succeeded');
let promptOrder = [];
if ('global' === this.configuration.promptOrder.strategy) {
const promptOrder = this.getPromptOrderForCharacter({ id: this.configuration.promptOrder.dummyId });
Object.assign(promptOrder, importData.data.prompt_order);

View File

@@ -25,13 +25,13 @@ import {
send_on_enter_options,
} from "./power-user.js";
import { LoadLocal, SaveLocal, CheckLocal, LoadLocalBool } from "./f-localStorage.js";
import { LoadLocal, SaveLocal, LoadLocalBool } from "./f-localStorage.js";
import { selected_group, is_group_generating, getGroupAvatar, groups, openGroupById } from "./group-chats.js";
import {
SECRET_KEYS,
secret_state,
} from "./secrets.js";
import { debounce, delay, getStringHash, isValidUrl, waitUntilCondition } from "./utils.js";
import { debounce, delay, getStringHash, isValidUrl } from "./utils.js";
import { chat_completion_sources, oai_settings } from "./openai.js";
import { getTokenCount } from "./tokenizers.js";
import { isMancer } from "./textgen-settings.js";
@@ -51,7 +51,6 @@ var AutoLoadChatCheckbox = document.getElementById("auto-load-chat-checkbox");
var connection_made = false;
var retry_delay = 500;
var RA_AC_retries = 1;
const observerConfig = { childList: true, subtree: true };
const countTokensDebounced = debounce(RA_CountCharTokens, 1000);
@@ -167,8 +166,6 @@ export function humanizedDateTime() {
(baseDate.getMinutes() < 10 ? "0" : "") + baseDate.getMinutes();
let humanSecond =
(baseDate.getSeconds() < 10 ? "0" : "") + baseDate.getSeconds();
let humanMillisecond =
(baseDate.getMilliseconds() < 10 ? "0" : "") + baseDate.getMilliseconds();
let HumanizedDateTime =
humanYear + "-" + humanMonth + "-" + humanDate + "@" + humanHour + "h" + humanMinute + "m" + humanSecond + "s";
return HumanizedDateTime;
@@ -375,7 +372,6 @@ function RA_checkOnlineStatus() {
$("#API-status-top").addClass("fa-plug");
connection_made = true;
retry_delay = 100;
RA_AC_retries = 1;
if (!is_send_press && !(selected_group && is_group_generating)) {
$("#send_but").removeClass("displayNone"); //on connect, send button shows
@@ -427,7 +423,6 @@ function RA_autoconnect(PrevApi) {
}
if (!connection_made) {
RA_AC_retries++;
retry_delay = Math.min(retry_delay * 2, 30000); // double retry delay up to to 30 secs
// console.log('connection attempts: ' + RA_AC_retries + ' delay: ' + (retry_delay / 1000) + 's');
// setTimeout(RA_autoconnect, retry_delay);
@@ -486,7 +481,7 @@ export function dragElement(elmnt) {
var pos1 = 0, pos2 = 0, pos3 = 0, pos4 = 0;
var height, width, top, left, right, bottom,
maxX, maxY, winHeight, winWidth,
topbar, topbarWidth, topBarFirstX, topBarLastX, topBarLastY, sheldWidth;
topbar, topBarFirstX, topBarLastY;
var elmntName = elmnt.attr('id');
console.debug(`dragElement called for ${elmntName}`);
@@ -530,13 +525,10 @@ export function dragElement(elmnt) {
maxY = parseInt(height + top);
winWidth = window.innerWidth;
winHeight = window.innerHeight;
sheldWidth = parseInt($('html').css('--sheldWidth').slice(0, -2));
topbar = document.getElementById("top-bar")
const topbarstyle = getComputedStyle(topbar)
topBarFirstX = parseInt(topbarstyle.marginInline)
topbarWidth = parseInt(topbarstyle.width);
topBarLastX = topBarFirstX + topbarWidth;
topBarLastY = parseInt(topbarstyle.height);
/*console.log(`

View File

@@ -8,7 +8,7 @@ import {
import { selected_group } from "./group-chats.js";
import { extension_settings, getContext, saveMetadataDebounced } from "./extensions.js";
import { registerSlashCommand } from "./slash-commands.js";
import { getCharaFilename, debounce, waitUntilCondition, delay } from "./utils.js";
import { getCharaFilename, debounce, delay } from "./utils.js";
import { getTokenCount } from "./tokenizers.js";
export { MODULE_NAME as NOTE_MODULE_NAME };

View File

@@ -398,7 +398,7 @@ async function setBackground(bg) {
}
async function delBackground(bg) {
const response = await fetch("/delbackground", {
await fetch("/delbackground", {
method: "POST",
headers: getRequestHeaders(),
body: JSON.stringify({

View File

@@ -28,7 +28,6 @@ import { createTagMapFromList } from "./tags.js";
import {
delay,
getUniqueName,
stringFormat,
} from "./utils.js";
export {

View File

@@ -89,7 +89,6 @@ async function onDeleteButtonClick() {
*/
function enableBulkSelect() {
$("#rm_print_characters_block .character_select").each((i, el) => {
const character = $(el).text();
const checkbox = $("<input type='checkbox' class='bulk_select_checkbox'>");
checkbox.on("change", () => {
// Do something when the checkbox is changed

View File

@@ -695,7 +695,7 @@ async function onDeleteClick() {
export async function deleteExtension(extensionName) {
try {
const response = await fetch('/api/extensions/delete', {
await fetch('/api/extensions/delete', {
method: 'POST',
headers: getRequestHeaders(),
body: JSON.stringify({ extensionName })

View File

@@ -13,8 +13,6 @@ const DEBUG_PREFIX = "<Assets module> ";
let previewAudio = null;
let ASSETS_JSON_URL = "https://raw.githubusercontent.com/SillyTavern/SillyTavern-Content/main/index.json"
const extensionName = "assets";
const extensionFolderPath = `scripts/extensions/${extensionName}`;
// DBG
//if (DEBUG_TONY_SAMA_FORK_MODE)
@@ -26,9 +24,6 @@ let currentAssets = {};
// Extension UI and Settings //
//#############################//
const defaultSettings = {
}
function downloadAssetsList(url) {
updateCurrentAssets().then(function () {
fetch(url, { cache: "no-cache" })

View File

@@ -1,4 +1,4 @@
import { callPopup, eventSource, event_types, getRequestHeaders, saveSettingsDebounced, this_chid } from "../../../script.js";
import { callPopup, eventSource, event_types, getRequestHeaders, saveSettingsDebounced } from "../../../script.js";
import { dragElement, isMobile } from "../../RossAscends-mods.js";
import { getContext, getApiUrl, modules, extension_settings, ModuleWorkerWrapper, doExtrasFetch, renderExtensionTemplate } from "../../extensions.js";
import { loadMovingUIState, power_user } from "../../power-user.js";
@@ -393,7 +393,6 @@ async function unloadLiveChar() {
if (!loadResponse.ok) {
throw new Error(loadResponse.statusText);
}
const loadResponseText = await loadResponse.text();
//console.log(`Response: ${loadResponseText}`);
} catch (error) {
//console.error(`Error unloading - ${error}`);

View File

@@ -623,7 +623,6 @@ function generateQuickReplyElements() {
let quickReplyHtml = '';
for (let i = 1; i <= extension_settings.quickReply.numberOfSlots; i++) {
let itemNumber = i + 1
quickReplyHtml += `
<div class="flex-container alignitemscenter" data-order="${i}">
<span class="drag-handle ui-sortable-handle">☰</span>

View File

@@ -1,4 +1,6 @@
import { MenuItem } from "./MenuItem.js";
/**
* @typedef {import('./MenuItem.js').MenuItem} MenuItem
*/
export class ContextMenu {
/**@type {MenuItem[]}*/ itemList = [];

View File

@@ -1,4 +1,6 @@
import { MenuItem } from "./MenuItem.js";
/**
* @typedef {import('./MenuItem.js').MenuItem} MenuItem
*/
export class SubMenu {
/**@type {MenuItem[]}*/ itemList = [];

View File

@@ -2362,7 +2362,6 @@ async function sdMessageButton(e) {
const $mes = $icon.closest('.mes');
const message_id = $mes.attr('mesid');
const message = context.chat[message_id];
const characterName = message?.name || context.name2;
const characterFileName = context.characterId ? context.characters[context.characterId].name : context.groups[Object.keys(context.groups).filter(x => context.groups[x].id === context.groupId)[0]]?.id?.toString();
const messageText = message?.mes;
const hasSavedImage = message?.extra?.image && message?.extra?.title;

View File

@@ -71,16 +71,6 @@ async function doTokenCounter() {
* @param {number[]} ids
*/
function drawChunks(chunks, ids) {
const main_text_color = rgb2hex((getComputedStyle(document.documentElement).getPropertyValue('--SmartThemeBodyColor').trim()))
const italics_text_color = rgb2hex((getComputedStyle(document.documentElement).getPropertyValue('--SmartThemeEmColor').trim()))
const quote_text_color = rgb2hex((getComputedStyle(document.documentElement).getPropertyValue('--SmartThemeQuoteColor').trim()))
const blur_tint_color = rgb2hex((getComputedStyle(document.documentElement).getPropertyValue('--SmartThemeBlurTintColor').trim()))
const chat_tint_color = rgb2hex((getComputedStyle(document.documentElement).getPropertyValue('--SmartThemeChatTintColor').trim()))
const user_mes_blur_tint_color = rgb2hex((getComputedStyle(document.documentElement).getPropertyValue('--SmartThemeUserMesBlurTintColor').trim()))
const bot_mes_blur_tint_color = rgb2hex((getComputedStyle(document.documentElement).getPropertyValue('--SmartThemeBotMesBlurTintColor').trim()))
const shadow_color = rgb2hex((getComputedStyle(document.documentElement).getPropertyValue('--SmartThemeShadowColor').trim()))
const border_color = rgb2hex((getComputedStyle(document.documentElement).getPropertyValue('--SmartThemeBorderColor').trim()))
const pastelRainbow = [
//main_text_color,
//italics_text_color,

View File

@@ -4,17 +4,15 @@ TODO:
- Delete useless call
*/
import { doExtrasFetch, extension_settings, getApiUrl, getContext, modules, ModuleWorkerWrapper } from "../../extensions.js"
import { doExtrasFetch, extension_settings, getApiUrl, modules } from "../../extensions.js"
import { callPopup } from "../../../script.js"
import { initVoiceMap } from "./index.js"
export { CoquiTtsProvider }
const DEBUG_PREFIX = "<Coqui TTS module> ";
const UPDATE_INTERVAL = 1000;
let inApiCall = false;
let voiceIdList = []; // Updated with module worker
let coquiApiModels = {}; // Initialized only once
let coquiApiModelsFull = {}; // Initialized only once
let coquiLocalModels = []; // Initialized only once
@@ -495,7 +493,7 @@ class CoquiTtsProvider {
.append('<option value="none">Select language</option>')
.val('none');
for (var i = 0; i < model_settings["languages"].length; i++) {
for (let i = 0; i < model_settings["languages"].length; i++) {
const language_label = JSON.stringify(model_settings["languages"][i]).replaceAll("\"", "");
$("#coqui_api_model_settings_language").append(new Option(language_label, i));
}
@@ -514,7 +512,7 @@ class CoquiTtsProvider {
.append('<option value="none">Select speaker</option>')
.val('none');
for (var i = 0; i < model_settings["speakers"].length; i++) {
for (let i = 0; i < model_settings["speakers"].length; i++) {
const speaker_label = JSON.stringify(model_settings["speakers"][i]).replaceAll("\"", "");
$("#coqui_api_model_settings_speaker").append(new Option(speaker_label, i));
}

View File

@@ -17,10 +17,7 @@ const UPDATE_INTERVAL = 1000
let voiceMapEntries = []
let voiceMap = {} // {charName:voiceid, charName2:voiceid2}
let audioControl
let storedvalue = false;
let lastCharacterId = null
let lastGroupId = null
let lastChatId = null
let lastMessageHash = null
@@ -314,8 +311,6 @@ let currentAudioJob
let audioPaused = false
let audioQueueProcessorReady = true
let lastAudioPosition = 0
async function playAudioData(audioBlob) {
// Since current audio job can be cancelled, don't playback if it is null
if (currentAudioJob == null) {
@@ -407,14 +402,12 @@ function addAudioControl() {
TTS Playback
</div>`)
$('#ttsExtensionMenuItem').attr('title', 'TTS play/pause').on('click', onAudioControlClicked)
audioControl = document.getElementById('tts_media_control')
updateUiAudioPlayState()
}
function completeCurrentAudioJob() {
audioQueueProcessorReady = true
currentAudioJob = null
lastAudioPosition = 0
talkingAnimation(false) //stop lip animation
// updateUiPlayState();
}
@@ -463,8 +456,6 @@ function completeTtsJob() {
function saveLastValues() {
const context = getContext()
lastGroupId = context.groupId
lastCharacterId = context.characterId
lastChatId = context.chatId
lastMessageHash = getStringHash(
(context.chat.length && context.chat[context.chat.length - 1].mes) ?? ''

View File

@@ -191,7 +191,7 @@ class SystemTtsProvider {
const voice = speechSynthesis.getVoices().find(x => x.voiceURI === voiceId);
if (!voice) {
throw `TTS Voice name ${voiceName} not found`
throw `TTS Voice id ${voiceId} not found`
}
speechSynthesis.cancel();

View File

@@ -133,8 +133,7 @@ class XTTSTtsProvider {
// Perform a simple readiness check by trying to fetch voiceIds
async checkReady() {
const response = await this.fetchTtsVoiceObjects()
await this.fetchTtsVoiceObjects()
}
async onRefreshClick() {

View File

@@ -70,7 +70,7 @@ import {
depth_prompt_depth_default,
loadItemizedPrompts,
} from "../script.js";
import { appendTagToList, createTagMapFromList, getTagsList, applyTagsOnCharacterSelect, tag_map, printTagFilters } from './tags.js';
import { appendTagToList, createTagMapFromList, getTagsList, applyTagsOnCharacterSelect, tag_map } from './tags.js';
import { FILTER_TYPES, FilterHelper } from './filters.js';
export {
@@ -112,7 +112,7 @@ export const group_generation_mode = {
}
export const groupCandidatesFilter = new FilterHelper(debounce(printGroupCandidates, 100));
const groupAutoModeInterval = setInterval(groupChatAutoModeWorker, 5000);
setInterval(groupChatAutoModeWorker, 5000);
const saveGroupDebounced = debounce(async (group, reload) => await _save(group, reload), 500);
async function _save(group, reload = true) {

View File

@@ -79,7 +79,6 @@ export {
}
let openai_messages_count = 0;
let openai_narrator_messages_count = 0;
const default_main_prompt = "Write {{char}}'s next reply in a fictional chat between {{charIfNotGroup}} and {{user}}. Write 1 reply only in internet RP style, italicize actions, and avoid quotation marks. Use markdown. Be proactive, creative, and drive the plot and conversation forward. Write at least 1 paragraph, up to 4. Always stay in character and avoid repetition.";
const default_nsfw_prompt = "NSFW/Smut is allowed. Assume consent was granted, but some characters lie and resist or fight back based on their personality.";
@@ -406,7 +405,6 @@ function setOpenAIMessages(chat) {
let j = 0;
// clean openai msgs
const messages = [];
openai_narrator_messages_count = 0;
for (let i = chat.length - 1; i >= 0; i--) {
let role = chat[j]['is_user'] ? 'user' : 'assistant';
let content = chat[j]['mes'];
@@ -414,7 +412,6 @@ function setOpenAIMessages(chat) {
// 100% legal way to send a message as system
if (chat[j].extra?.type === system_message_types.NARRATOR) {
role = 'system';
openai_narrator_messages_count++;
}
// for groups or sendas command - prepend a character's name

View File

@@ -3,7 +3,6 @@ import {
scrollChatToBottom,
characters,
callPopup,
getStatus,
reloadMarkdownProcessor,
reloadCurrentChat,
getRequestHeaders,
@@ -416,7 +415,6 @@ function switchTokenCount() {
function switchMesIDDisplay() {
const value = localStorage.getItem(storage_keys.mesIDDisplay_enabled);
let before = power_user.mesIDDisplay_enabled;
power_user.mesIDDisplay_enabled = value === null ? true : value == "true";
/* console.log(`
localstorage value:${value},
@@ -536,9 +534,10 @@ async function CreateZenSliders(elmnt) {
var decimals = 2
var offVal, allVal
var stepScale
var steps
if (sliderID == 'amount_gen') {
decimals = 0
var steps = [16, 50, 100, 150, 200, 256, 300, 400, 512, 1024];
steps = [16, 50, 100, 150, 200, 256, 300, 400, 512, 1024];
sliderMin = 0
sliderMax = steps.length - 1
stepScale = 1;
@@ -548,11 +547,11 @@ async function CreateZenSliders(elmnt) {
}
if (sliderID == 'rep_pen_range_textgenerationwebui') {
if (power_user.max_context_unlocked) {
var steps = [0, 256, 512, 768, 1024, 2048, 4096, 8192, 16355, 24576, 32768, 49152, 65536, -1];
steps = [0, 256, 512, 768, 1024, 2048, 4096, 8192, 16355, 24576, 32768, 49152, 65536, -1];
numSteps = 13
allVal = 13
} else {
var steps = [0, 256, 512, 768, 1024, 2048, 4096, 8192, -1];
steps = [0, 256, 512, 768, 1024, 2048, 4096, 8192, -1];
numSteps = 8
allVal = 8
}
@@ -667,13 +666,14 @@ async function CreateZenSliders(elmnt) {
max: sliderMax,
create: function () {
var handle = $(this).find(".ui-slider-handle");
var handleText, stepNumber, leftMargin;
//handling creation of amt_gen
if (newSlider.attr('id') == 'amount_gen_zenslider') {
//console.log(`using custom process for ${newSlider.attr('id')}`)
var handleText = steps[sliderValue]
var stepNumber = sliderValue
var leftMargin = ((stepNumber) / numSteps) * 50 * -1
handleText = steps[sliderValue]
stepNumber = sliderValue
leftMargin = ((stepNumber) / numSteps) * 50 * -1
handle.text(handleText)
.css('margin-left', `${leftMargin}px`)
//console.log(`${newSlider.attr('id')} initial value:${handleText}, stepNum:${stepNumber}, numSteps:${numSteps}, left-margin:${leftMargin}`)
@@ -683,9 +683,9 @@ async function CreateZenSliders(elmnt) {
if ($('#rep_pen_range_textgenerationwebui_zensliders').length !== 0) {
$('#rep_pen_range_textgenerationwebui_zensliders').remove()
}
var handleText = steps[sliderValue]
var stepNumber = sliderValue
var leftMargin = ((stepNumber) / numSteps) * 50 * -1
handleText = steps[sliderValue]
stepNumber = sliderValue
leftMargin = ((stepNumber) / numSteps) * 50 * -1
if (sliderValue === offVal) {
handleText = 'Off'
@@ -710,8 +710,8 @@ async function CreateZenSliders(elmnt) {
} else {
handle.text(numVal).css('color', '');
}
var stepNumber = ((sliderValue - sliderMin) / stepScale)
var leftMargin = (stepNumber / numSteps) * 50 * -1
stepNumber = ((sliderValue - sliderMin) / stepScale)
leftMargin = (stepNumber / numSteps) * 50 * -1
var isManualInput = false
var valueBeforeManualInput
handle.css('margin-left', `${leftMargin}px`)
@@ -777,11 +777,9 @@ async function CreateZenSliders(elmnt) {
//console.log('clamping numVal to sliderMin')
numVal = sliderMin
}
var sliderValRange = sliderMax - sliderMin
var stepNumber = ((ui.value - sliderMin) / stepScale).toFixed(0);
var handleText = (ui.value);
var leftMargin = (stepNumber / numSteps) * 50 * -1;
var percentOfMax = Number((ui.value / sliderMax)) //what % our value is of the max
var perStepPercent = 1 / numSteps //how far in % each step should be on the slider
var leftPos = newSlider.width() * (stepNumber * perStepPercent) //how big of a left margin to give the slider for manual inputs
/* console.log(`
@@ -2294,27 +2292,6 @@ function setAvgBG() {
return [r * 255, g * 255, b * 255];
}
function rgbToLuminance(r, g, b) {
console.log(r, g, b)
const gammaCorrect = (color) => {
return color <= 0.03928
? color / 12.92
: Math.pow((color + 0.055) / 1.055, 2.4);
};
const rsRGB = r / 255;
const gsRGB = g / 255;
const bsRGB = b / 255;
const rLuminance = gammaCorrect(rsRGB).toFixed(2);
const gLuminance = gammaCorrect(gsRGB).toFixed(2);
const bLuminance = gammaCorrect(bsRGB).toFixed(2);
console.log(`rLum ${rLuminance}, gLum ${gLuminance}, bLum ${bLuminance}`)
return 0.2126 * Number(rLuminance) + 0.7152 * Number(gLuminance) + 0.0722 * Number(bLuminance);
}
//this version keeps BG and main text in same hue
/* function getReadableTextColor(rgb) {
const [r, g, b] = rgb;

View File

@@ -284,7 +284,7 @@ class PresetManager {
}
async deleteCurrentPreset() {
const { presets, preset_names } = this.getPresetList();
const { preset_names } = this.getPresetList();
const value = this.getSelectedPreset();
const nameToDelete = this.getSelectedPresetName();

View File

@@ -8,7 +8,7 @@ import {
entitiesFilter,
printCharacters,
} from "../script.js";
import { FILTER_TYPES, FilterHelper } from "./filters.js";
import { FILTER_TYPES } from "./filters.js";
import { groupCandidatesFilter, groups, selected_group } from "./group-chats.js";
import { download, onlyUnique, parseJsonFile, uuidv4 } from "./utils.js";

View File

@@ -1,4 +1,4 @@
import { saveSettings, callPopup, substituteParams, getRequestHeaders, chat_metadata, this_chid, characters, saveCharacterDebounced, menu_type, eventSource, event_types, getExtensionPrompt, MAX_INJECTION_DEPTH, extension_prompt_types, getExtensionPromptByName, saveMetadata, getCurrentChatId } from "../script.js";
import { saveSettings, callPopup, substituteParams, getRequestHeaders, chat_metadata, this_chid, characters, saveCharacterDebounced, menu_type, eventSource, event_types, getExtensionPromptByName, saveMetadata, getCurrentChatId } from "../script.js";
import { download, debounce, initScrollHeight, resetScrollHeight, parseJsonFile, extractDataFromPng, getFileBuffer, getCharaFilename, getSortableDelay, escapeRegex, PAGINATION_TEMPLATE, navigation_option, waitUntilCondition, isTrueBoolean } from "./utils.js";
import { extension_settings, getContext } from "./extensions.js";
import { NOTE_MODULE_NAME, metadata_keys, shouldWIAddPrompt } from "./authors-note.js";
@@ -1395,7 +1395,7 @@ function createWorldInfoEntry(name, data, fromSlashCommand = false) {
}
async function _save(name, data) {
const response = await fetch("/editworldinfo", {
await fetch("/editworldinfo", {
method: "POST",
headers: getRequestHeaders(),
body: JSON.stringify({ name: name, data: data }),

View File

@@ -1681,10 +1681,6 @@ function readAndParseFromDirectory(directoryPath, fileExtension = '.json') {
return parsedFiles;
}
function sortByModifiedDate(directory) {
return (a, b) => +(new Date(fs.statSync(`${directory}/${b}`).mtime)) - +(new Date(fs.statSync(`${directory}/${a}`).mtime));
}
function sortByName(_) {
return (a, b) => a.localeCompare(b);
}

View File

@@ -2,7 +2,6 @@ const path = require('path');
const fs = require('fs');
const { default: simpleGit } = require('simple-git');
const sanitize = require('sanitize-filename');
const commandExistsSync = require('command-exists').sync;
const { DIRECTORIES } = require('./constants');
/**

View File

@@ -45,11 +45,6 @@ const logitBiasExp = [
{ "sequence": [21], "bias": -0.08, "ensure_sequence_finish": false, "generate_once": false }
]
const hypeBotLogitBiasExp = [
{ "sequence": [8162], "bias": -0.12, "ensure_sequence_finish": false, "generate_once": false },
{ "sequence": [46256, 224], "bias": -0.12, "ensure_sequence_finish": false, "generate_once": false }
];
function getBadWordsList(model) {
let list = []

View File

@@ -1,4 +1,3 @@
const express = require('express');
const vectra = require('vectra');
const path = require('path');
const sanitize = require('sanitize-filename');