+
diff --git a/public/scripts/extensions/infinity-context/index.js b/public/scripts/extensions/infinity-context/index.js
index 7f3aba368..b06309e5b 100644
--- a/public/scripts/extensions/infinity-context/index.js
+++ b/public/scripts/extensions/infinity-context/index.js
@@ -1,6 +1,7 @@
-import { saveSettingsDebounced, getCurrentChatId, system_message_types, extension_prompt_types, eventSource, event_types, getRequestHeaders, CHARACTERS_PER_TOKEN_RATIO, substituteParams, max_context, } from "../../../script.js";
+import { saveSettingsDebounced, getCurrentChatId, system_message_types, extension_prompt_types, eventSource, event_types, getRequestHeaders, substituteParams, } from "../../../script.js";
import { humanizedDateTime } from "../../RossAscends-mods.js";
import { getApiUrl, extension_settings, getContext, doExtrasFetch } from "../../extensions.js";
+import { CHARACTERS_PER_TOKEN_RATIO } from "../../tokenizers.js";
import { getFileText, onlyUnique, splitRecursive } from "../../utils.js";
export { MODULE_NAME };
diff --git a/public/scripts/extensions/quick-reply/index.js b/public/scripts/extensions/quick-reply/index.js
index 36fb38ff0..9beb88a5f 100644
--- a/public/scripts/extensions/quick-reply/index.js
+++ b/public/scripts/extensions/quick-reply/index.js
@@ -28,7 +28,7 @@ async function updateQuickReplyPresetList() {
if (result.ok) {
var data = await result.json();
presets = data.quickReplyPresets?.length ? data.quickReplyPresets : [];
- console.log(presets)
+ console.debug('Quick Reply presets', presets);
$("#quickReplyPresets").find('option[value!=""]').remove();
@@ -284,7 +284,7 @@ async function doQR(_, text) {
}
text = Number(text)
- //use scale starting with 0
+ //use scale starting with 0
//ex: user inputs "/qr 2" >> qr with data-index 1 (but 2nd item displayed) gets triggered
let QRnum = Number(text - 1)
if (QRnum <= 0) { QRnum = 0 }
diff --git a/public/scripts/extensions/speech-recognition/index.js b/public/scripts/extensions/speech-recognition/index.js
index e5b0ae116..8678b6bcd 100644
--- a/public/scripts/extensions/speech-recognition/index.js
+++ b/public/scripts/extensions/speech-recognition/index.js
@@ -4,11 +4,12 @@ TODO:
*/
import { saveSettingsDebounced } from "../../../script.js";
-import { getContext, getApiUrl, modules, extension_settings, ModuleWorkerWrapper, doExtrasFetch } from "../../extensions.js";
+import { getContext, extension_settings, ModuleWorkerWrapper } from "../../extensions.js";
import { VoskSttProvider } from './vosk.js'
import { WhisperSttProvider } from './whisper.js'
import { BrowserSttProvider } from './browser.js'
import { StreamingSttProvider } from './streaming.js'
+import { getMessageTimeStamp } from "../../RossAscends-mods.js";
export { MODULE_NAME };
const MODULE_NAME = 'Speech Recognition';
@@ -61,10 +62,10 @@ async function moduleWorker() {
let messageStart = -1;
if (extension_settings.speech_recognition.Streaming.triggerWordsEnabled) {
-
+
for (const triggerWord of extension_settings.speech_recognition.Streaming.triggerWords) {
const triggerPos = userMessageRaw.indexOf(triggerWord.toLowerCase());
-
+
// Trigger word not found or not starting message and just a substring
if (triggerPos == -1){ // | (triggerPos > 0 & userMessageFormatted[triggerPos-1] != " ")) {
console.debug(DEBUG_PREFIX+"trigger word not found: ", triggerWord);
@@ -152,12 +153,12 @@ async function processTranscript(transcript) {
name: context.name1,
is_user: true,
is_name: true,
- send_date: Date.now(),
+ send_date: getMessageTimeStamp(),
mes: messageText,
};
context.chat.push(message);
context.addOneMessage(message);
-
+
await context.generate();
$('#debug_output').text("
: message sent: \""+ transcriptFormatted +"\"");
@@ -191,10 +192,10 @@ async function processTranscript(transcript) {
function loadNavigatorAudioRecording() {
if (navigator.mediaDevices.getUserMedia) {
console.debug(DEBUG_PREFIX+' getUserMedia supported by browser.');
-
+
let onSuccess = function(stream) {
const mediaRecorder = new MediaRecorder(stream);
-
+
$("#microphone_button").off('click').on("click", function() {
if (!audioRecording) {
mediaRecorder.start();
@@ -211,30 +212,30 @@ function loadNavigatorAudioRecording() {
$("#microphone_button").toggleClass('fa-microphone fa-microphone-slash');
}
});
-
+
mediaRecorder.onstop = async function() {
console.debug(DEBUG_PREFIX+"data available after MediaRecorder.stop() called: ", audioChunks.length, " chunks");
const audioBlob = new Blob(audioChunks, { type: "audio/wav; codecs=0" });
audioChunks = [];
-
+
const transcript = await sttProvider.processAudio(audioBlob);
-
+
// TODO: lock and release recording while processing?
console.debug(DEBUG_PREFIX+"received transcript:", transcript);
processTranscript(transcript);
}
-
+
mediaRecorder.ondataavailable = function(e) {
audioChunks.push(e.data);
}
}
-
+
let onError = function(err) {
console.debug(DEBUG_PREFIX+"The following error occured: " + err);
}
-
+
navigator.mediaDevices.getUserMedia(constraints).then(onSuccess, onError);
-
+
} else {
console.debug(DEBUG_PREFIX+"getUserMedia not supported on your browser!");
toastr.error("getUserMedia not supported", DEBUG_PREFIX+"not supported for your browser.", { timeOut: 10000, extendedTimeOut: 20000, preventDuplicates: true });
@@ -257,7 +258,7 @@ function loadSttProvider(provider) {
console.warn(`Provider ${sttProviderName} not in Extension Settings, initiatilizing provider in settings`);
extension_settings.speech_recognition[sttProviderName] = {};
}
-
+
$('#speech_recognition_provider').val(sttProviderName);
if (sttProviderName == "None") {
@@ -287,13 +288,13 @@ function loadSttProvider(provider) {
loadNavigatorAudioRecording();
$("#microphone_button").show();
}
-
+
if (sttProviderName == "Streaming") {
sttProvider.loadSettings(extension_settings.speech_recognition[sttProviderName]);
$("#microphone_button").off('click');
$("#microphone_button").hide();
}
-
+
}
function onSttProviderChange() {
@@ -365,7 +366,7 @@ async function onMessageMappingChange() {
console.debug(DEBUG_PREFIX+"Wrong syntax for message mapping, no '=' found in:", text);
}
}
-
+
$("#speech_recognition_message_mapping_status").text("Message mapping updated to: "+JSON.stringify(extension_settings.speech_recognition.messageMapping))
console.debug(DEBUG_PREFIX+"Updated message mapping", extension_settings.speech_recognition.messageMapping);
extension_settings.speech_recognition.messageMappingText = $('#speech_recognition_message_mapping').val()
@@ -425,7 +426,7 @@ $(document).ready(function () {
$('#speech_recognition_message_mode').on('change', onMessageModeChange);
$('#speech_recognition_message_mapping').on('change', onMessageMappingChange);
$('#speech_recognition_message_mapping_enabled').on('click', onMessageMappingEnabledClick);
-
+
const $button = $('');
$('#send_but_sheld').prepend($button);
diff --git a/public/scripts/extensions/stable-diffusion/index.js b/public/scripts/extensions/stable-diffusion/index.js
index 646fa0495..27fff7b23 100644
--- a/public/scripts/extensions/stable-diffusion/index.js
+++ b/public/scripts/extensions/stable-diffusion/index.js
@@ -14,7 +14,7 @@ import {
import { getApiUrl, getContext, extension_settings, doExtrasFetch, modules } from "../../extensions.js";
import { selected_group } from "../../group-chats.js";
import { stringFormat, initScrollHeight, resetScrollHeight, timestampToMoment, getCharaFilename, saveBase64AsFile } from "../../utils.js";
-import { humanizedDateTime } from "../../RossAscends-mods.js";
+import { getMessageTimeStamp, humanizedDateTime } from "../../RossAscends-mods.js";
export { MODULE_NAME };
// Wraps a string into monospace font-face span
@@ -755,11 +755,10 @@ async function sendMessage(prompt, image) {
const messageText = `[${context.name2} sends a picture that contains: ${prompt}]`;
const message = {
name: context.groupId ? systemUserName : context.name2,
- is_system: context.groupId ? true : false,
is_user: false,
is_system: true,
is_name: true,
- send_date: timestampToMoment(Date.now()).format('LL LT'),
+ send_date: getMessageTimeStamp(),
mes: context.groupId ? p(messageText) : messageText,
extra: {
image: image,
diff --git a/public/scripts/extensions/token-counter/index.js b/public/scripts/extensions/token-counter/index.js
index 430fb5771..bdc8e19f0 100644
--- a/public/scripts/extensions/token-counter/index.js
+++ b/public/scripts/extensions/token-counter/index.js
@@ -1,6 +1,6 @@
import { callPopup, main_api } from "../../../script.js";
import { getContext } from "../../extensions.js";
-import { getTokenizerModel } from "../../openai.js";
+import { getTokenizerModel } from "../../tokenizers.js";
async function doTokenCounter() {
const selectedTokenizer = main_api == 'openai'
diff --git a/public/scripts/extensions/translate/index.js b/public/scripts/extensions/translate/index.js
index bb8b1ddb7..ac477ffe3 100644
--- a/public/scripts/extensions/translate/index.js
+++ b/public/scripts/extensions/translate/index.js
@@ -421,9 +421,9 @@ jQuery(() => {
loadSettings();
- eventSource.on(event_types.MESSAGE_RECEIVED, handleIncomingMessage);
+ eventSource.on(event_types.CHARACTER_MESSAGE_RENDERED, handleIncomingMessage);
eventSource.on(event_types.MESSAGE_SWIPED, handleIncomingMessage);
- eventSource.on(event_types.MESSAGE_SENT, handleOutgoingMessage);
+ eventSource.on(event_types.USER_MESSAGE_RENDERED, handleOutgoingMessage);
eventSource.on(event_types.IMPERSONATE_READY, handleImpersonateReady);
eventSource.on(event_types.MESSAGE_EDITED, handleMessageEdit);
diff --git a/public/scripts/extensions/variables/index.js b/public/scripts/extensions/variables/index.js
new file mode 100644
index 000000000..478181b7d
--- /dev/null
+++ b/public/scripts/extensions/variables/index.js
@@ -0,0 +1,66 @@
+import { getContext } from "../../extensions.js";
+
+/**
+ * Gets a chat variable from the current chat metadata.
+ * @param {string} name The name of the variable to get.
+ * @returns {string} The value of the variable.
+ */
+function getChatVariable(name) {
+ const metadata = getContext().chatMetadata;
+
+ if (!metadata) {
+ return '';
+ }
+
+ if (!metadata.variables) {
+ metadata.variables = {};
+ return '';
+ }
+
+ return metadata.variables[name] || '';
+}
+
+/**
+ * Sets a chat variable in the current chat metadata.
+ * @param {string} name The name of the variable to set.
+ * @param {any} value The value of the variable to set.
+ */
+function setChatVariable(name, value) {
+ if (name === undefined || value === undefined) {
+ return;
+ }
+
+ const metadata = getContext().chatMetadata;
+
+ if (!metadata) {
+ return;
+ }
+
+ if (!metadata.variables) {
+ metadata.variables = {};
+ }
+
+ metadata.variables[name] = value;
+}
+
+function listChatVariables() {
+ const metadata = getContext().chatMetadata;
+
+ if (!metadata) {
+ return '';
+ }
+
+ if (!metadata.variables) {
+ metadata.variables = {};
+ return '';
+ }
+
+ return Object.keys(metadata.variables).map(key => `${key}=${metadata.variables[key]}`).join(';');
+}
+
+jQuery(() => {
+ const context = getContext();
+ context.registerHelper('getvar', getChatVariable);
+ context.registerHelper('setvar', setChatVariable);
+ context.registerHelper('listvar', listChatVariables);
+});
diff --git a/public/scripts/extensions/variables/manifest.json b/public/scripts/extensions/variables/manifest.json
new file mode 100644
index 000000000..9c4e9cc48
--- /dev/null
+++ b/public/scripts/extensions/variables/manifest.json
@@ -0,0 +1,11 @@
+{
+ "display_name": "Chat Variables",
+ "loading_order": 100,
+ "requires": [],
+ "optional": [],
+ "js": "index.js",
+ "css": "",
+ "author": "Cohee#1207",
+ "version": "1.0.0",
+ "homePage": "https://github.com/SillyTavern/SillyTavern"
+}
diff --git a/public/scripts/filters.js b/public/scripts/filters.js
index d0c5a7aeb..364259687 100644
--- a/public/scripts/filters.js
+++ b/public/scripts/filters.js
@@ -1,6 +1,10 @@
import { fuzzySearchCharacters, fuzzySearchGroups, fuzzySearchWorldInfo, power_user } from "./power-user.js";
import { tag_map } from "./tags.js";
+/**
+ * The filter types.
+ * @type {Object.}
+ */
export const FILTER_TYPES = {
SEARCH: 'search',
TAG: 'tag',
@@ -9,11 +13,26 @@ export const FILTER_TYPES = {
WORLD_INFO_SEARCH: 'world_info_search',
};
+/**
+ * Helper class for filtering data.
+ * @example
+ * const filterHelper = new FilterHelper(() => console.log('data changed'));
+ * filterHelper.setFilterData(FILTER_TYPES.SEARCH, 'test');
+ * data = filterHelper.applyFilters(data);
+ */
export class FilterHelper {
+ /**
+ * Creates a new FilterHelper
+ * @param {Function} onDataChanged Callback to trigger when the filter data changes
+ */
constructor(onDataChanged) {
this.onDataChanged = onDataChanged;
}
+ /**
+ * The filter functions.
+ * @type {Object.}
+ */
filterFunctions = {
[FILTER_TYPES.SEARCH]: this.searchFilter.bind(this),
[FILTER_TYPES.GROUP]: this.groupFilter.bind(this),
@@ -22,6 +41,10 @@ export class FilterHelper {
[FILTER_TYPES.WORLD_INFO_SEARCH]: this.wiSearchFilter.bind(this),
}
+ /**
+ * The filter data.
+ * @type {Object.}
+ */
filterData = {
[FILTER_TYPES.SEARCH]: '',
[FILTER_TYPES.GROUP]: false,
@@ -30,6 +53,11 @@ export class FilterHelper {
[FILTER_TYPES.WORLD_INFO_SEARCH]: '',
}
+ /**
+ * Applies a fuzzy search filter to the World Info data.
+ * @param {any[]} data The data to filter. Must have a uid property.
+ * @returns {any[]} The filtered data.
+ */
wiSearchFilter(data) {
const term = this.filterData[FILTER_TYPES.WORLD_INFO_SEARCH];
@@ -41,6 +69,11 @@ export class FilterHelper {
return data.filter(entity => fuzzySearchResults.includes(entity.uid));
}
+ /**
+ * Applies a tag filter to the data.
+ * @param {any[]} data The data to filter.
+ * @returns {any[]} The filtered data.
+ */
tagFilter(data) {
const TAG_LOGIC_AND = true; // switch to false to use OR logic for combining tags
const { selected, excluded } = this.filterData[FILTER_TYPES.TAG];
@@ -76,6 +109,11 @@ export class FilterHelper {
return data.filter(entity => getIsTagged(entity));
}
+ /**
+ * Applies a favorite filter to the data.
+ * @param {any[]} data The data to filter.
+ * @returns {any[]} The filtered data.
+ */
favFilter(data) {
if (!this.filterData[FILTER_TYPES.FAV]) {
return data;
@@ -84,6 +122,11 @@ export class FilterHelper {
return data.filter(entity => entity.item.fav || entity.item.fav == "true");
}
+ /**
+ * Applies a group type filter to the data.
+ * @param {any[]} data The data to filter.
+ * @returns {any[]} The filtered data.
+ */
groupFilter(data) {
if (!this.filterData[FILTER_TYPES.GROUP]) {
return data;
@@ -92,6 +135,11 @@ export class FilterHelper {
return data.filter(entity => entity.type === 'group');
}
+ /**
+ * Applies a search filter to the data. Uses fuzzy search if enabled.
+ * @param {any[]} data The data to filter.
+ * @returns {any[]} The filtered data.
+ */
searchFilter(data) {
if (!this.filterData[FILTER_TYPES.SEARCH]) {
return data;
@@ -122,6 +170,12 @@ export class FilterHelper {
return data.filter(entity => getIsValidSearch(entity));
}
+ /**
+ * Sets the filter data for the given filter type.
+ * @param {string} filterType The filter type to set data for.
+ * @param {any} data The data to set.
+ * @param {boolean} suppressDataChanged Whether to suppress the data changed callback.
+ */
setFilterData(filterType, data, suppressDataChanged = false) {
const oldData = this.filterData[filterType];
this.filterData[filterType] = data;
@@ -132,10 +186,19 @@ export class FilterHelper {
}
}
+ /**
+ * Gets the filter data for the given filter type.
+ * @param {string} filterType The filter type to get data for.
+ */
getFilterData(filterType) {
return this.filterData[filterType];
}
+ /**
+ * Applies all filters to the given data.
+ * @param {any[]} data The data to filter.
+ * @returns {any[]} The filtered data.
+ */
applyFilters(data) {
return Object.values(this.filterFunctions)
.reduce((data, fn) => fn(data), data);
diff --git a/public/scripts/group-chats.js b/public/scripts/group-chats.js
index f0655d98d..589809df1 100644
--- a/public/scripts/group-chats.js
+++ b/public/scripts/group-chats.js
@@ -9,7 +9,7 @@ import {
saveBase64AsFile,
PAGINATION_TEMPLATE,
} from './utils.js';
-import { RA_CountCharTokens, humanizedDateTime, dragElement, favsToHotswap } from "./RossAscends-mods.js";
+import { RA_CountCharTokens, humanizedDateTime, dragElement, favsToHotswap, getMessageTimeStamp } from "./RossAscends-mods.js";
import { loadMovingUIState, sortEntitiesList } from './power-user.js';
import {
@@ -202,7 +202,7 @@ function getFirstCharacterMessage(character) {
mes["is_system"] = false;
mes["name"] = character.name;
mes["is_name"] = true;
- mes["send_date"] = humanizedDateTime();
+ mes["send_date"] = getMessageTimeStamp();
mes["original_avatar"] = character.avatar;
mes["extra"] = { "gen_id": Date.now() * Math.random() * 1000000 };
mes["mes"] = messageText
@@ -463,7 +463,7 @@ async function generateGroupWrapper(by_auto_mode, type = null, params = {}) {
is_group_generating = true;
setCharacterName('');
setCharacterId(undefined);
- const userInput = $("#send_textarea").val();
+ const userInput = String($("#send_textarea").val());
if (typingIndicator.length === 0 && !isStreamingEnabled()) {
typingIndicator = $(
@@ -983,11 +983,9 @@ function printGroupCandidates() {
const storageKey = 'GroupCandidates_PerPage';
$("#rm_group_add_members_pagination").pagination({
dataSource: getGroupCharacters({ doFilter: true, onlyMembers: false }),
- pageSize: 5,
pageRange: 1,
position: 'top',
showPageNumbers: false,
- showSizeChanger: false,
prevText: '<',
nextText: '>',
formatNavigator: PAGINATION_TEMPLATE,
@@ -1011,11 +1009,9 @@ function printGroupMembers() {
const storageKey = 'GroupMembers_PerPage';
$("#rm_group_members_pagination").pagination({
dataSource: getGroupCharacters({ doFilter: false, onlyMembers: true }),
- pageSize: 5,
pageRange: 1,
position: 'top',
showPageNumbers: false,
- showSizeChanger: false,
prevText: '<',
nextText: '>',
formatNavigator: PAGINATION_TEMPLATE,
@@ -1320,7 +1316,7 @@ function openCharacterDefinition(characterSelect) {
}
function filterGroupMembers() {
- const searchValue = $(this).val().toLowerCase();
+ const searchValue = String($(this).val()).toLowerCase();
groupCandidatesFilter.setFilterData(FILTER_TYPES.SEARCH, searchValue);
}
@@ -1390,7 +1386,7 @@ export async function createNewGroupChat(groupId) {
group.chat_metadata = {};
updateChatMetadata(group.chat_metadata, true);
- await editGroup(group.id, true);
+ await editGroup(group.id, true, false);
await getGroupChat(group.id);
}
diff --git a/public/scripts/i18n.js b/public/scripts/i18n.js
new file mode 100644
index 000000000..a27a5a456
--- /dev/null
+++ b/public/scripts/i18n.js
@@ -0,0 +1,75 @@
+import { waitUntilCondition } from "./utils.js";
+
+const storageKey = "language";
+export const localeData = await fetch("i18n.json").then(response => response.json());
+
+export function applyLocale(root = document) {
+ const overrideLanguage = localStorage.getItem("language");
+ var language = overrideLanguage || navigator.language || navigator.userLanguage;
+ language = language.toLowerCase();
+ //load the appropriate language file
+ if (localeData.lang.indexOf(language) < 0) language = "en";
+
+ const $root = root instanceof Document ? $(root) : $(new DOMParser().parseFromString(root, "text/html"));
+
+ //find all the elements with `data-i18n` attribute
+ $root.find("[data-i18n]").each(function () {
+ //read the translation from the language data
+ const keys = $(this).data("i18n").split(';'); // Multi-key entries are ; delimited
+ for (const key of keys) {
+ const attributeMatch = key.match(/\[(\S+)\](.+)/); // [attribute]key
+ if (attributeMatch) { // attribute-tagged key
+ const localizedValue = localeData?.[language]?.[attributeMatch[2]];
+ if (localizedValue) {
+ $(this).attr(attributeMatch[1], localizedValue);
+ }
+ } else { // No attribute tag, treat as 'text'
+ const localizedValue = localeData?.[language]?.[key];
+ if (localizedValue) {
+ $(this).text(localizedValue);
+ }
+ }
+ }
+ });
+
+ if (root !== document) {
+ return $root.get(0).body.innerHTML;
+ }
+}
+
+function addLanguagesToDropdown() {
+ if (!Array.isArray(localeData?.lang)) {
+ return;
+ }
+
+ for (const lang of localeData.lang) {
+ const option = document.createElement('option');
+ option.value = lang;
+ option.innerText = lang;
+ $('#ui_language_select').append(option);
+ }
+
+ const selectedLanguage = localStorage.getItem(storageKey);
+ if (selectedLanguage) {
+ $('#ui_language_select').val(selectedLanguage);
+ }
+}
+
+jQuery(async () => {
+ waitUntilCondition(() => !!localeData);
+ window["applyLocale"] = applyLocale;
+ applyLocale();
+ addLanguagesToDropdown();
+
+ $('#ui_language_select').on('change', async function () {
+ const language = String($(this).val());
+
+ if (language) {
+ localStorage.setItem(storageKey, language);
+ } else {
+ localStorage.removeItem(storageKey);
+ }
+
+ location.reload();
+ });
+});
diff --git a/public/scripts/instruct-mode.js b/public/scripts/instruct-mode.js
index bb99615d4..bc75e266d 100644
--- a/public/scripts/instruct-mode.js
+++ b/public/scripts/instruct-mode.js
@@ -4,6 +4,9 @@ import { saveSettingsDebounced, substituteParams } from "../script.js";
import { selected_group } from "./group-chats.js";
import { power_user } from "./power-user.js";
+/**
+ * @type {any[]} Instruct mode presets.
+ */
export let instruct_presets = [];
const controls = [
@@ -116,6 +119,11 @@ export function autoSelectInstructPreset(modelId) {
* @returns {string[]} Array of instruct mode stopping strings.
*/
export function getInstructStoppingSequences() {
+ /**
+ * Adds instruct mode sequence to the result array.
+ * @param {string} sequence Sequence string.
+ * @returns {void}
+ */
function addInstructSequence(sequence) {
// Cohee: oobabooga's textgen always appends newline before the sequence as a stopping string
// But it's a problem for Metharme which doesn't use newlines to separate them.
@@ -215,6 +223,7 @@ export function formatInstructModeExamples(mesExamples, name1, name2) {
* @param {string} promptBias Prompt bias string.
* @param {string} name1 User name.
* @param {string} name2 Character name.
+ * @returns {string} Formatted instruct mode last prompt line.
*/
export function formatInstructModePrompt(name, isImpersonate, promptBias, name1, name2) {
const includeNames = power_user.instruct.names || (!!selected_group && power_user.instruct.names_force_groups);
@@ -258,7 +267,7 @@ jQuery(() => {
return;
}
- power_user.instruct.preset = name;
+ power_user.instruct.preset = String(name);
controls.forEach(control => {
if (preset[control.property] !== undefined) {
power_user.instruct[control.property] = preset[control.property];
diff --git a/public/scripts/kai-settings.js b/public/scripts/kai-settings.js
index d1f8895ac..9dbb5ec21 100644
--- a/public/scripts/kai-settings.js
+++ b/public/scripts/kai-settings.js
@@ -75,18 +75,18 @@ function loadKoboldSettings(preset) {
}
}
-function getKoboldGenerationData(finalPromt, this_settings, this_amount_gen, this_max_context, isImpersonate, type) {
+function getKoboldGenerationData(finalPrompt, this_settings, this_amount_gen, this_max_context, isImpersonate, type) {
const sampler_order = kai_settings.sampler_order || this_settings.sampler_order;
let generate_data = {
- prompt: finalPromt,
+ prompt: finalPrompt,
gui_settings: false,
sampler_order: sampler_order,
- max_context_length: parseInt(this_max_context),
+ max_context_length: Number(this_max_context),
max_length: this_amount_gen,
- rep_pen: parseFloat(kai_settings.rep_pen),
- rep_pen_range: parseInt(kai_settings.rep_pen_range),
+ rep_pen: Number(kai_settings.rep_pen),
+ rep_pen_range: Number(kai_settings.rep_pen_range),
rep_pen_slope: kai_settings.rep_pen_slope,
- temperature: parseFloat(kai_settings.temp),
+ temperature: Number(kai_settings.temp),
tfs: kai_settings.tfs,
top_a: kai_settings.top_a,
top_k: kai_settings.top_k,
@@ -223,16 +223,30 @@ const sliders = [
}
];
+/**
+ * Determines if the Kobold stop sequence can be used with the given version.
+ * @param {string} version KoboldAI version to check.
+ * @returns {boolean} True if the Kobold stop sequence can be used, false otherwise.
+ */
function canUseKoboldStopSequence(version) {
return (version || '0.0.0').localeCompare(MIN_STOP_SEQUENCE_VERSION, undefined, { numeric: true, sensitivity: 'base' }) > -1;
}
+/**
+ * Determines if the Kobold streaming API can be used with the given version.
+ * @param {{ result: string; version: string; }} koboldVersion KoboldAI version object.
+ * @returns {boolean} True if the Kobold streaming API can be used, false otherwise.
+ */
function canUseKoboldStreaming(koboldVersion) {
if (koboldVersion && koboldVersion.result == 'KoboldCpp') {
return (koboldVersion.version || '0.0').localeCompare(MIN_STREAMING_KCPPVERSION, undefined, { numeric: true, sensitivity: 'base' }) > -1;
} else return false;
}
+/**
+ * Sorts the sampler items by the given order.
+ * @param {any[]} orderArray Sampler order array.
+ */
function sortItemsByOrder(orderArray) {
console.debug('Preset samplers order: ' + orderArray);
const $draggableItems = $("#kobold_order");
diff --git a/public/scripts/nai-settings.js b/public/scripts/nai-settings.js
index a3208162b..41fc87791 100644
--- a/public/scripts/nai-settings.js
+++ b/public/scripts/nai-settings.js
@@ -1,14 +1,14 @@
import {
getRequestHeaders,
getStoppingStrings,
- getTextTokens,
max_context,
novelai_setting_names,
saveSettingsDebounced,
setGenerationParamsFromPreset
} from "../script.js";
-import { getCfg } from "./extensions/cfg/util.js";
-import { MAX_CONTEXT_DEFAULT, tokenizers } from "./power-user.js";
+import { getCfgPrompt } from "./extensions/cfg/util.js";
+import { MAX_CONTEXT_DEFAULT } from "./power-user.js";
+import { getTextTokens, tokenizers } from "./tokenizers.js";
import {
getSortableDelay,
getStringHash,
@@ -395,7 +395,11 @@ function getBadWordPermutations(text) {
return result;
}
-export function getNovelGenerationData(finalPrompt, this_settings, this_amount_gen, isImpersonate) {
+export function getNovelGenerationData(finalPrompt, this_settings, this_amount_gen, isImpersonate, cfgValues) {
+ if (cfgValues.guidanceScale && cfgValues.guidanceScale?.value !== 1) {
+ cfgValues.negativePrompt = (getCfgPrompt(cfgValues.guidanceScale, true))?.value;
+ }
+
const clio = nai_settings.model_novel.includes('clio');
const kayra = nai_settings.model_novel.includes('kayra');
@@ -410,7 +414,6 @@ export function getNovelGenerationData(finalPrompt, this_settings, this_amount_g
: undefined;
const prefix = selectPrefix(nai_settings.prefix, finalPrompt);
- const cfgSettings = getCfg();
let logitBias = [];
if (tokenizerType !== tokenizers.NONE && Array.isArray(nai_settings.logit_bias) && nai_settings.logit_bias.length) {
@@ -437,8 +440,8 @@ export function getNovelGenerationData(finalPrompt, this_settings, this_amount_g
"typical_p": parseFloat(nai_settings.typical_p),
"mirostat_lr": parseFloat(nai_settings.mirostat_lr),
"mirostat_tau": parseFloat(nai_settings.mirostat_tau),
- "cfg_scale": cfgSettings?.guidanceScale ?? parseFloat(nai_settings.cfg_scale),
- "cfg_uc": cfgSettings?.negativePrompt ?? nai_settings.cfg_uc ?? "",
+ "cfg_scale": cfgValues?.guidanceScale?.value ?? parseFloat(nai_settings.cfg_scale),
+ "cfg_uc": cfgValues?.negativePrompt ?? nai_settings.cfg_uc ?? "",
"phrase_rep_pen": nai_settings.phrase_rep_pen,
"stop_sequences": stopSequences,
"bad_words_ids": badWordIds,
diff --git a/public/scripts/openai.js b/public/scripts/openai.js
index b720a684c..aef41beb8 100644
--- a/public/scripts/openai.js
+++ b/public/scripts/openai.js
@@ -48,10 +48,10 @@ import {
delay,
download,
getFileText, getSortableDelay,
- getStringHash,
parseJsonFile,
stringFormat,
} from "./utils.js";
+import { countTokensOpenAI } from "./tokenizers.js";
export {
is_get_status_openai,
@@ -67,7 +67,6 @@ export {
sendOpenAIRequest,
setOpenAIOnlineStatus,
getChatCompletionModel,
- countTokens,
TokenHandler,
IdentifierNotFoundError,
Message,
@@ -109,8 +108,8 @@ const max_4k = 4095;
const max_8k = 8191;
const max_16k = 16383;
const max_32k = 32767;
-const scale_max = 7900; // Probably more. Save some for the system prompt defined on Scale site.
-const claude_max = 8000; // We have a proper tokenizer, so theoretically could be larger (up to 9k)
+const scale_max = 8191;
+const claude_max = 9000; // We have a proper tokenizer, so theoretically could be larger (up to 9k)
const palm2_max = 7500; // The real context window is 8192, spare some for padding due to using turbo tokenizer
const claude_100k_max = 99000;
let ai21_max = 9200; //can easily fit 9k gpt tokens because j2's tokenizer is efficient af
@@ -124,40 +123,6 @@ const openrouter_website_model = 'OR_Website';
let biasCache = undefined;
let model_list = [];
-const objectStore = new localforage.createInstance({ name: "SillyTavern_ChatCompletions" });
-
-let tokenCache = {};
-
-async function loadTokenCache() {
- try {
- console.debug('Chat Completions: loading token cache')
- tokenCache = await objectStore.getItem('tokenCache') || {};
- } catch (e) {
- console.log('Chat Completions: unable to load token cache, using default value', e);
- tokenCache = {};
- }
-}
-
-async function saveTokenCache() {
- try {
- console.debug('Chat Completions: saving token cache')
- await objectStore.setItem('tokenCache', tokenCache);
- } catch (e) {
- console.log('Chat Completions: unable to save token cache', e);
- }
-}
-
-async function resetTokenCache() {
- try {
- console.debug('Chat Completions: resetting token cache');
- Object.keys(tokenCache).forEach(key => delete tokenCache[key]);
- await objectStore.removeItem('tokenCache');
- } catch (e) {
- console.log('Chat Completions: unable to reset token cache', e);
- }
-}
-
-window['resetTokenCache'] = resetTokenCache;
export const chat_completion_sources = {
OPENAI: 'openai',
@@ -219,6 +184,7 @@ const default_settings = {
assistant_prefill: '',
use_ai21_tokenizer: false,
exclude_assistant: false,
+ use_alt_scale: false,
};
const oai_settings = {
@@ -261,15 +227,12 @@ const oai_settings = {
assistant_prefill: '',
use_ai21_tokenizer: false,
exclude_assistant: false,
+ use_alt_scale: false,
};
let openai_setting_names;
let openai_settings;
-export function getTokenCountOpenAI(text) {
- const message = { role: 'system', content: text };
- return countTokens(message, true);
-}
let promptManager = null;
@@ -869,8 +832,6 @@ function prepareOpenAIMessages({
const chat = chatCompletion.getChat();
openai_messages_count = chat.filter(x => x?.role === "user" || x?.role === "assistant")?.length || 0;
- // Save token cache to IndexedDB storage (async, no need to await)
- saveTokenCache();
return [chat, promptManager.tokenHandler.counts];
}
@@ -1082,6 +1043,47 @@ function saveModelList(data) {
}
}
+async function sendAltScaleRequest(openai_msgs_tosend, logit_bias, signal) {
+ const generate_url = '/generate_altscale';
+
+ let firstSysMsgs = []
+ for (let msg of openai_msgs_tosend) {
+ if (msg.role === 'system') {
+ firstSysMsgs.push(substituteParams(msg.name ? msg.name + ": " + msg.content : msg.content));
+ } else {
+ break;
+ }
+ }
+
+ let subsequentMsgs = openai_msgs_tosend.slice(firstSysMsgs.length);
+
+ const joinedSysMsgs = substituteParams(firstSysMsgs.join("\n"));
+ const joinedSubsequentMsgs = subsequentMsgs.reduce((acc, obj) => {
+ return acc + obj.role + ": " + obj.content + "\n";
+ }, "");
+
+ openai_msgs_tosend = substituteParams(joinedSubsequentMsgs);
+
+ const generate_data = {
+ sysprompt: joinedSysMsgs,
+ prompt: openai_msgs_tosend,
+ temp: parseFloat(oai_settings.temp_openai),
+ top_p: parseFloat(oai_settings.top_p_openai),
+ max_tokens: parseFloat(oai_settings.openai_max_tokens),
+ logit_bias: logit_bias,
+ }
+
+ const response = await fetch(generate_url, {
+ method: 'POST',
+ body: JSON.stringify(generate_data),
+ headers: getRequestHeaders(),
+ signal: signal
+ });
+
+ const data = await response.json();
+ return data.output;
+}
+
async function sendOpenAIRequest(type, openai_msgs_tosend, signal) {
// Provide default abort signal
if (!signal) {
@@ -1118,7 +1120,7 @@ async function sendOpenAIRequest(type, openai_msgs_tosend, signal) {
return sendWindowAIRequest(openai_msgs_tosend, signal, stream);
}
- const logitBiasSources = [chat_completion_sources.OPENAI, chat_completion_sources.OPENROUTER];
+ const logitBiasSources = [chat_completion_sources.OPENAI, chat_completion_sources.OPENROUTER, chat_completion_sources.SCALE];
if (oai_settings.bias_preset_selected
&& logitBiasSources.includes(oai_settings.chat_completion_source)
&& Array.isArray(oai_settings.bias_presets[oai_settings.bias_preset_selected])
@@ -1127,6 +1129,10 @@ async function sendOpenAIRequest(type, openai_msgs_tosend, signal) {
biasCache = logit_bias;
}
+ if (isScale && oai_settings.use_alt_scale) {
+ return sendAltScaleRequest(openai_msgs_tosend, logit_bias, signal)
+ }
+
const model = getChatCompletionModel();
const generate_data = {
"messages": openai_msgs_tosend,
@@ -1363,63 +1369,8 @@ class TokenHandler {
}
}
-function countTokens(messages, full = false) {
- let shouldTokenizeAI21 = oai_settings.chat_completion_source === chat_completion_sources.AI21 && oai_settings.use_ai21_tokenizer;
- let chatId = 'undefined';
- try {
- if (selected_group) {
- chatId = groups.find(x => x.id == selected_group)?.chat_id;
- }
- else if (this_chid) {
- chatId = characters[this_chid].chat;
- }
- } catch {
- console.log('No character / group selected. Using default cache item');
- }
-
- if (typeof tokenCache[chatId] !== 'object') {
- tokenCache[chatId] = {};
- }
-
- if (!Array.isArray(messages)) {
- messages = [messages];
- }
-
- let token_count = -1;
-
- for (const message of messages) {
- const model = getTokenizerModel();
- const hash = getStringHash(JSON.stringify(message));
- const cacheKey = `${model}-${hash}`;
- const cachedCount = tokenCache[chatId][cacheKey];
-
- if (typeof cachedCount === 'number') {
- token_count += cachedCount;
- }
-
- else {
- jQuery.ajax({
- async: false,
- type: 'POST', //
- url: shouldTokenizeAI21 ? '/tokenize_ai21' : `/tokenize_openai?model=${model}`,
- data: JSON.stringify([message]),
- dataType: "json",
- contentType: "application/json",
- success: function (data) {
- token_count += Number(data.token_count);
- tokenCache[chatId][cacheKey] = Number(data.token_count);
- }
- });
- }
- }
-
- if (!full) token_count -= 2;
-
- return token_count;
-}
-
-const tokenHandler = new TokenHandler(countTokens);
+const tokenHandler = new TokenHandler(countTokensOpenAI);
// Thrown by ChatCompletion when a requested prompt couldn't be found.
class IdentifierNotFoundError extends Error {
@@ -1856,62 +1807,6 @@ class ChatCompletion {
}
}
-export function getTokenizerModel() {
- // OpenAI models always provide their own tokenizer
- if (oai_settings.chat_completion_source == chat_completion_sources.OPENAI) {
- return oai_settings.openai_model;
- }
-
- const turboTokenizer = 'gpt-3.5-turbo';
- const gpt4Tokenizer = 'gpt-4';
- const gpt2Tokenizer = 'gpt2';
- const claudeTokenizer = 'claude';
-
- // Assuming no one would use it for different models.. right?
- if (oai_settings.chat_completion_source == chat_completion_sources.SCALE) {
- return gpt4Tokenizer;
- }
-
- // Select correct tokenizer for WindowAI proxies
- if (oai_settings.chat_completion_source == chat_completion_sources.WINDOWAI && oai_settings.windowai_model) {
- if (oai_settings.windowai_model.includes('gpt-4')) {
- return gpt4Tokenizer;
- }
- else if (oai_settings.windowai_model.includes('gpt-3.5-turbo')) {
- return turboTokenizer;
- }
- else if (oai_settings.windowai_model.includes('claude')) {
- return claudeTokenizer;
- }
- else if (oai_settings.windowai_model.includes('GPT-NeoXT')) {
- return gpt2Tokenizer;
- }
- }
-
- // And for OpenRouter (if not a site model, then it's impossible to determine the tokenizer)
- if (oai_settings.chat_completion_source == chat_completion_sources.OPENROUTER && oai_settings.openrouter_model) {
- if (oai_settings.openrouter_model.includes('gpt-4')) {
- return gpt4Tokenizer;
- }
- else if (oai_settings.openrouter_model.includes('gpt-3.5-turbo')) {
- return turboTokenizer;
- }
- else if (oai_settings.openrouter_model.includes('claude')) {
- return claudeTokenizer;
- }
- else if (oai_settings.openrouter_model.includes('GPT-NeoXT')) {
- return gpt2Tokenizer;
- }
- }
-
- if (oai_settings.chat_completion_source == chat_completion_sources.CLAUDE) {
- return claudeTokenizer;
- }
-
- // Default to Turbo 3.5
- return turboTokenizer;
-}
-
function loadOpenAISettings(data, settings) {
openai_setting_names = data.openai_setting_names;
openai_settings = data.openai_settings;
@@ -1971,6 +1866,7 @@ function loadOpenAISettings(data, settings) {
if (settings.openai_model !== undefined) oai_settings.openai_model = settings.openai_model;
if (settings.use_ai21_tokenizer !== undefined) { oai_settings.use_ai21_tokenizer = !!settings.use_ai21_tokenizer; oai_settings.use_ai21_tokenizer ? ai21_max = 8191 : ai21_max = 9200; }
if (settings.exclude_assistant !== undefined) oai_settings.exclude_assistant = !!settings.exclude_assistant;
+ if (settings.use_alt_scale !== undefined) { oai_settings.use_alt_scale = !!settings.use_alt_scale; updateScaleForm(); }
$('#stream_toggle').prop('checked', oai_settings.stream_openai);
$('#api_url_scale').val(oai_settings.api_url_scale);
$('#openai_proxy_password').val(oai_settings.proxy_password);
@@ -2001,6 +1897,7 @@ function loadOpenAISettings(data, settings) {
$('#openai_external_category').toggle(oai_settings.show_external_models);
$('#use_ai21_tokenizer').prop('checked', oai_settings.use_ai21_tokenizer);
$('#exclude_assistant').prop('checked', oai_settings.exclude_assistant);
+ $('#scale-alt').prop('checked', oai_settings.use_alt_scale);
if (settings.impersonation_prompt !== undefined) oai_settings.impersonation_prompt = settings.impersonation_prompt;
$('#impersonation_prompt_textarea').val(oai_settings.impersonation_prompt);
@@ -2199,6 +2096,7 @@ async function saveOpenAIPreset(name, settings, triggerUi = true) {
assistant_prefill: settings.assistant_prefill,
use_ai21_tokenizer: settings.use_ai21_tokenizer,
exclude_assistant: settings.exclude_assistant,
+ use_alt_scale: settings.use_alt_scale,
};
const savePresetSettings = await fetch(`/savepreset_openai?name=${name}`, {
@@ -2536,6 +2434,7 @@ function onSettingsPresetChange() {
assistant_prefill: ['#claude_assistant_prefill', 'assistant_prefill', false],
use_ai21_tokenizer: ['#use_ai21_tokenizer', 'use_ai21_tokenizer', false],
exclude_assistant: ['#exclude_assistant', 'exclude_assistant', false],
+ use_alt_scale: ['#use_alt_scale', 'use_alt_scale', false],
};
const presetName = $('#settings_perset_openai').find(":selected").text();
@@ -2831,20 +2730,31 @@ async function onConnectButtonClick(e) {
if (oai_settings.chat_completion_source == chat_completion_sources.SCALE) {
const api_key_scale = $('#api_key_scale').val().trim();
+ const scale_cookie = $('#scale_cookie').val().trim();
if (api_key_scale.length) {
await writeSecret(SECRET_KEYS.SCALE, api_key_scale);
}
- if (!oai_settings.api_url_scale) {
+ if (scale_cookie.length) {
+ await writeSecret(SECRET_KEYS.SCALE_COOKIE, scale_cookie);
+ }
+
+ if (!oai_settings.api_url_scale && !oai_settings.use_alt_scale) {
console.log('No API URL saved for Scale');
return;
}
- if (!secret_state[SECRET_KEYS.SCALE]) {
+ if (!secret_state[SECRET_KEYS.SCALE] && !oai_settings.use_alt_scale) {
console.log('No secret key saved for Scale');
return;
}
+
+ if (!secret_state[SECRET_KEYS.SCALE_COOKIE] && oai_settings.use_alt_scale) {
+ console.log("No cookie set for Scale");
+ return;
+ }
+
}
if (oai_settings.chat_completion_source == chat_completion_sources.CLAUDE) {
@@ -2958,11 +2868,25 @@ function onProxyPasswordShowClick() {
$(this).toggleClass('fa-eye-slash fa-eye');
}
-$(document).ready(async function () {
- await loadTokenCache();
+function updateScaleForm() {
+ if (oai_settings.use_alt_scale) {
+ $('#normal_scale_form').css('display', 'none');
+ $('#alt_scale_form').css('display', '');
+ } else {
+ $('#normal_scale_form').css('display', '');
+ $('#alt_scale_form').css('display', 'none');
+ }
+}
+$(document).ready(async function () {
$('#test_api_button').on('click', testApiConnection);
+ $('#scale-alt').on('change', function () {
+ oai_settings.use_alt_scale = !!$('#scale-alt').prop('checked');
+ saveSettingsDebounced();
+ updateScaleForm();
+ });
+
$(document).on('input', '#temp_openai', function () {
oai_settings.temp_openai = Number($(this).val());
$('#temp_counter_openai').text(Number($(this).val()).toFixed(2));
diff --git a/public/scripts/power-user.js b/public/scripts/power-user.js
index 3d4832bb1..a0365cc0f 100644
--- a/public/scripts/power-user.js
+++ b/public/scripts/power-user.js
@@ -23,6 +23,7 @@ import {
import { loadInstructMode } from "./instruct-mode.js";
import { registerSlashCommand } from "./slash-commands.js";
+import { tokenizers } from "./tokenizers.js";
import { delay } from "./utils.js";
@@ -35,7 +36,6 @@ export {
fixMarkdown,
power_user,
pygmalion_options,
- tokenizers,
send_on_enter_options,
};
@@ -63,17 +63,6 @@ const pygmalion_options = {
ENABLED: 1,
}
-const tokenizers = {
- NONE: 0,
- GPT3: 1,
- CLASSIC: 2,
- LLAMA: 3,
- NERD: 4,
- NERD2: 5,
- API: 6,
- BEST_MATCH: 99,
-}
-
const send_on_enter_options = {
DISABLED: -1,
AUTO: 0,
@@ -207,7 +196,6 @@ let movingUIPresets = [];
let context_presets = [];
const storage_keys = {
- ui_language: "language",
fast_ui_mode: "TavernAI_fast_ui_mode",
avatar_style: "TavernAI_avatar_style",
chat_display: "TavernAI_chat_display",
@@ -247,29 +235,42 @@ function playMessageSound() {
}
const audio = document.getElementById('audio_message_sound');
- audio.volume = 0.8;
- audio.pause();
- audio.currentTime = 0;
- audio.play();
+ if (audio instanceof HTMLAudioElement) {
+ audio.volume = 0.8;
+ audio.pause();
+ audio.currentTime = 0;
+ audio.play();
+ }
}
+/**
+ * Replaces consecutive newlines with a single newline.
+ * @param {string} x String to be processed.
+ * @returns {string} Processed string.
+ * @example
+ * collapseNewlines("\n\n\n"); // "\n"
+ */
function collapseNewlines(x) {
return x.replaceAll(/\n+/g, "\n");
}
+/**
+ * Fix formatting problems in markdown.
+ * @param {string} text Text to be processed.
+ * @returns {string} Processed text.
+ * @example
+ * "^example * text*\n" // "^example *text*\n"
+ * "^*example * text\n"// "^*example* text\n"
+ * "^example *text *\n" // "^example *text*\n"
+ * "^* example * text\n" // "^*example* text\n"
+ * // take note that the side you move the asterisk depends on where its pairing is
+ * // i.e. both of the following strings have the same broken asterisk ' * ',
+ * // but you move the first to the left and the second to the right, to match the non-broken asterisk
+ * "^example * text*\n" // "^*example * text\n"
+ * // and you HAVE to handle the cases where multiple pairs of asterisks exist in the same line
+ * "^example * text* * harder problem *\n" // "^example *text* *harder problem*\n"
+ */
function fixMarkdown(text) {
- // fix formatting problems in markdown
- // e.g.:
- // "^example * text*\n" -> "^example *text*\n"
- // "^*example * text\n" -> "^*example* text\n"
- // "^example *text *\n" -> "^example *text*\n"
- // "^* example * text\n" -> "^*example* text\n"
- // take note that the side you move the asterisk depends on where its pairing is
- // i.e. both of the following strings have the same broken asterisk ' * ',
- // but you move the first to the left and the second to the right, to match the non-broken asterisk "^example * text*\n" "^*example * text\n"
- // and you HAVE to handle the cases where multiple pairs of asterisks exist in the same line
- // i.e. "^example * text* * harder problem *\n" -> "^example *text* *harder problem*\n"
-
// Find pairs of formatting characters and capture the text in between them
const format = /([\*_]{1,2})([\s\S]*?)\1/gm;
let matches = [];
@@ -899,7 +900,7 @@ function loadContextSettings() {
});
$('#context_presets').on('change', function () {
- const name = $(this).find(':selected').val();
+ const name = String($(this).find(':selected').val());
const preset = context_presets.find(x => x.name === name);
if (!preset) {
@@ -1020,6 +1021,10 @@ const compareFunc = (first, second) => {
}
};
+/**
+ * Sorts an array of entities based on the current sort settings
+ * @param {any[]} entities An array of objects with an `item` property
+ */
function sortEntitiesList(entities) {
if (power_user.sort_field == undefined || entities.length === 0) {
return;
@@ -1027,6 +1032,7 @@ function sortEntitiesList(entities) {
entities.sort((a, b) => sortFunc(a.item, b.item));
}
+
async function saveTheme() {
const name = await callPopup('Enter a theme preset name:', 'input');
@@ -1250,8 +1256,8 @@ async function doDelMode(_, text) {
if (text) {
await delay(300) //same as above, need event signal for 'entered del mode'
console.debug('parsing msgs to del')
- let numMesToDel = Number(text).toFixed(0)
- let lastMesID = $('.last_mes').attr('mesid')
+ let numMesToDel = Number(text);
+ let lastMesID = Number($('.last_mes').attr('mesid'));
let oldestMesIDToDel = lastMesID - numMesToDel + 1;
//disallow targeting first message
@@ -1277,26 +1283,6 @@ function doResetPanels() {
$("#movingUIreset").trigger('click');
}
-function addLanguagesToDropdown() {
- $.getJSON('i18n.json', function (data) {
- if (!Array.isArray(data?.lang)) {
- return;
- }
-
- for (const lang of data.lang) {
- const option = document.createElement('option');
- option.value = lang;
- option.innerText = lang;
- $('#ui_language_select').append(option);
- }
-
- const selectedLanguage = localStorage.getItem(storage_keys.ui_language);
- if (selectedLanguage) {
- $('#ui_language_select').val(selectedLanguage);
- }
- });
-}
-
function setAvgBG() {
const bgimg = new Image();
bgimg.src = $('#bg1')
@@ -1348,10 +1334,6 @@ function setAvgBG() {
$("#user-mes-blur-tint-color-picker").attr('color', 'rgb(' + rgb.r + ',' + rgb.g + ',' + rgb.b + ')');
} */
-
-
-
-
function getAverageRGB(imgEl) {
var blockSize = 5, // only visit every 5 pixels
@@ -1396,6 +1378,13 @@ function setAvgBG() {
}
+ /**
+ * Converts an HSL color value to RGB.
+ * @param {number} h Hue value
+ * @param {number} s Saturation value
+ * @param {number} l Luminance value
+ * @return {Array} The RGB representation
+ */
function hslToRgb(h, s, l) {
const hueToRgb = (p, q, t) => {
if (t < 0) t += 1;
@@ -1437,7 +1426,7 @@ function setAvgBG() {
console.log(`rLum ${rLuminance}, gLum ${gLuminance}, bLum ${bLuminance}`)
- return 0.2126 * rLuminance + 0.7152 * gLuminance + 0.0722 * bLuminance;
+ return 0.2126 * Number(rLuminance) + 0.7152 * Number(gLuminance) + 0.0722 * Number(bLuminance);
}
//this version keeps BG and main text in same hue
@@ -1620,13 +1609,13 @@ $(document).ready(() => {
});
$("#markdown_escape_strings").on('input', function () {
- power_user.markdown_escape_strings = $(this).val();
+ power_user.markdown_escape_strings = String($(this).val());
saveSettingsDebounced();
reloadMarkdownProcessor(power_user.render_formulas);
});
$("#start_reply_with").on('input', function () {
- power_user.user_prompt_bias = $(this).val();
+ power_user.user_prompt_bias = String($(this).val());
saveSettingsDebounced();
});
@@ -1753,7 +1742,7 @@ $(document).ready(() => {
});
$("#themes").on('change', function () {
- const themeSelected = $(this).find(':selected').val();
+ const themeSelected = String($(this).find(':selected').val());
power_user.theme = themeSelected;
applyTheme(themeSelected);
saveSettingsDebounced();
@@ -1761,7 +1750,7 @@ $(document).ready(() => {
$("#movingUIPresets").on('change', async function () {
console.log('saw MUI preset change')
- const movingUIPresetSelected = $(this).find(':selected').val();
+ const movingUIPresetSelected = String($(this).find(':selected').val());
power_user.movingUIPreset = movingUIPresetSelected;
applyMovingUIPreset(movingUIPresetSelected);
saveSettingsDebounced();
@@ -1821,7 +1810,7 @@ $(document).ready(() => {
});
$('#auto_swipe_blacklist').on('input', function () {
- power_user.auto_swipe_blacklist = $(this).val()
+ power_user.auto_swipe_blacklist = String($(this).val())
.split(",")
.map(str => str.trim())
.filter(str => str);
@@ -1830,7 +1819,7 @@ $(document).ready(() => {
});
$('#auto_swipe_minimum_length').on('input', function () {
- const number = parseInt($(this).val());
+ const number = Number($(this).val());
if (!isNaN(number)) {
power_user.auto_swipe_minimum_length = number;
saveSettingsDebounced();
@@ -1838,7 +1827,7 @@ $(document).ready(() => {
});
$('#auto_swipe_blacklist_threshold').on('input', function () {
- const number = parseInt($(this).val());
+ const number = Number($(this).val());
if (!isNaN(number)) {
power_user.auto_swipe_blacklist_threshold = number;
saveSettingsDebounced();
@@ -1921,35 +1910,35 @@ $(document).ready(() => {
$("#messageTimerEnabled").on("input", function () {
const value = !!$(this).prop('checked');
power_user.timer_enabled = value;
- localStorage.setItem(storage_keys.timer_enabled, power_user.timer_enabled);
+ localStorage.setItem(storage_keys.timer_enabled, String(power_user.timer_enabled));
switchTimer();
});
$("#messageTimestampsEnabled").on("input", function () {
const value = !!$(this).prop('checked');
power_user.timestamps_enabled = value;
- localStorage.setItem(storage_keys.timestamps_enabled, power_user.timestamps_enabled);
+ localStorage.setItem(storage_keys.timestamps_enabled, String(power_user.timestamps_enabled));
switchTimestamps();
});
$("#messageModelIconEnabled").on("input", function () {
const value = !!$(this).prop('checked');
power_user.timestamp_model_icon = value;
- localStorage.setItem(storage_keys.timestamp_model_icon, power_user.timestamp_model_icon);
+ localStorage.setItem(storage_keys.timestamp_model_icon, String(power_user.timestamp_model_icon));
switchIcons();
});
$("#mesIDDisplayEnabled").on("input", function () {
const value = !!$(this).prop('checked');
power_user.mesIDDisplay_enabled = value;
- localStorage.setItem(storage_keys.mesIDDisplay_enabled, power_user.mesIDDisplay_enabled);
+ localStorage.setItem(storage_keys.mesIDDisplay_enabled, String(power_user.mesIDDisplay_enabled));
switchMesIDDisplay();
});
$("#hotswapEnabled").on("input", function () {
const value = !!$(this).prop('checked');
power_user.hotswap_enabled = value;
- localStorage.setItem(storage_keys.hotswap_enabled, power_user.hotswap_enabled);
+ localStorage.setItem(storage_keys.hotswap_enabled, String(power_user.hotswap_enabled));
switchHotswap();
});
@@ -1995,7 +1984,7 @@ $(document).ready(() => {
});
$('#custom_stopping_strings').on('input', function () {
- power_user.custom_stopping_strings = $(this).val();
+ power_user.custom_stopping_strings = String($(this).val());
saveSettingsDebounced();
});
@@ -2025,18 +2014,6 @@ $(document).ready(() => {
saveSettingsDebounced();
});
- $('#ui_language_select').on('change', async function () {
- const language = $(this).val();
-
- if (language) {
- localStorage.setItem(storage_keys.ui_language, language);
- } else {
- localStorage.removeItem(storage_keys.ui_language);
- }
-
- location.reload();
- });
-
$(window).on('focus', function () {
browser_has_focus = true;
});
@@ -2052,5 +2029,4 @@ $(document).ready(() => {
registerSlashCommand('cut', doMesCut, [], ' (requred number) – cuts the specified message from the chat', true, true);
registerSlashCommand('resetpanels', doResetPanels, ['resetui'], ' – resets UI panels to original state.', true, true);
registerSlashCommand('bgcol', setAvgBG, [], ' – WIP test of auto-bg avg coloring', true, true);
- addLanguagesToDropdown();
});
diff --git a/public/scripts/preset-manager.js b/public/scripts/preset-manager.js
index 4ebb55418..810ee070f 100644
--- a/public/scripts/preset-manager.js
+++ b/public/scripts/preset-manager.js
@@ -118,7 +118,7 @@ class PresetManager {
async savePresetAs() {
const popupText = `
Preset name:
- Hint: Use a character/group name to bind preset to a specific chat.
`;
+ ${!this.isNonGenericApi() ? 'Hint: Use a character/group name to bind preset to a specific chat.
' : ''}`;
const name = await callPopup(popupText, "input");
if (!name) {
@@ -131,7 +131,8 @@ class PresetManager {
}
async savePreset(name, settings) {
- const preset = settings ?? this.getPresetSettings();
+ const preset = settings ?? this.getPresetSettings(name);
+
const res = await fetch(`/save_preset`, {
method: "POST",
headers: getRequestHeaders(),
@@ -220,7 +221,7 @@ class PresetManager {
}
}
- getPresetSettings() {
+ getPresetSettings(name) {
function getSettingsByApiId(apiId) {
switch (apiId) {
case "koboldhorde":
@@ -232,7 +233,7 @@ class PresetManager {
return textgenerationwebui_settings;
case "instruct":
const preset = deepClone(power_user.instruct);
- preset['name'] = power_user.instruct.preset;
+ preset['name'] = name || power_user.instruct.preset;
return preset;
default:
console.warn(`Unknown API ID ${apiId}`);
@@ -346,7 +347,7 @@ jQuery(async () => {
const selected = $(presetManager.select).find("option:selected");
const name = selected.text();
- const preset = presetManager.getPresetSettings();
+ const preset = presetManager.getPresetSettings(name);
const data = JSON.stringify(preset, null, 4);
download(data, `${name}.json`, "application/json");
});
diff --git a/public/scripts/secrets.js b/public/scripts/secrets.js
index 202870c50..c695ca88c 100644
--- a/public/scripts/secrets.js
+++ b/public/scripts/secrets.js
@@ -9,6 +9,7 @@ export const SECRET_KEYS = {
OPENROUTER: 'api_key_openrouter',
SCALE: 'api_key_scale',
AI21: 'api_key_ai21',
+ SCALE_COOKIE: 'scale_cookie',
}
const INPUT_MAP = {
@@ -20,6 +21,7 @@ const INPUT_MAP = {
[SECRET_KEYS.OPENROUTER]: '#api_key_openrouter',
[SECRET_KEYS.SCALE]: '#api_key_scale',
[SECRET_KEYS.AI21]: '#api_key_ai21',
+ [SECRET_KEYS.SCALE_COOKIE]: '#scale_cookie',
}
async function clearSecret() {
diff --git a/public/scripts/slash-commands.js b/public/scripts/slash-commands.js
index aae5fc9e9..294a61fb9 100644
--- a/public/scripts/slash-commands.js
+++ b/public/scripts/slash-commands.js
@@ -22,7 +22,7 @@ import {
reloadCurrentChat,
sendMessageAsUser,
} from "../script.js";
-import { humanizedDateTime } from "./RossAscends-mods.js";
+import { getMessageTimeStamp } from "./RossAscends-mods.js";
import { resetSelectedGroup } from "./group-chats.js";
import { getRegexedString, regex_placement } from "./extensions/regex/engine.js";
import { chat_styles, power_user } from "./power-user.js";
@@ -327,7 +327,7 @@ async function sendMessageAs(_, text) {
is_user: false,
is_name: true,
is_system: isSystem,
- send_date: humanizedDateTime(),
+ send_date: getMessageTimeStamp(),
mes: substituteParams(mesText),
force_avatar: force_avatar,
original_avatar: original_avatar,
@@ -338,8 +338,9 @@ async function sendMessageAs(_, text) {
};
chat.push(message);
- addOneMessage(message);
await eventSource.emit(event_types.MESSAGE_SENT, (chat.length - 1));
+ addOneMessage(message);
+ await eventSource.emit(event_types.USER_MESSAGE_RENDERED, (chat.length - 1));
saveChatConditional();
}
@@ -358,7 +359,7 @@ async function sendNarratorMessage(_, text) {
is_user: false,
is_name: false,
is_system: isSystem,
- send_date: humanizedDateTime(),
+ send_date: getMessageTimeStamp(),
mes: substituteParams(text.trim()),
force_avatar: system_avatar,
extra: {
@@ -369,8 +370,9 @@ async function sendNarratorMessage(_, text) {
};
chat.push(message);
- addOneMessage(message);
await eventSource.emit(event_types.MESSAGE_SENT, (chat.length - 1));
+ addOneMessage(message);
+ await eventSource.emit(event_types.USER_MESSAGE_RENDERED, (chat.length - 1));
saveChatConditional();
}
@@ -384,7 +386,7 @@ async function sendCommentMessage(_, text) {
is_user: false,
is_name: true,
is_system: true,
- send_date: humanizedDateTime(),
+ send_date: getMessageTimeStamp(),
mes: substituteParams(text.trim()),
force_avatar: comment_avatar,
extra: {
@@ -394,8 +396,9 @@ async function sendCommentMessage(_, text) {
};
chat.push(message);
- addOneMessage(message);
await eventSource.emit(event_types.MESSAGE_SENT, (chat.length - 1));
+ addOneMessage(message);
+ await eventSource.emit(event_types.USER_MESSAGE_RENDERED, (chat.length - 1));
saveChatConditional();
}
diff --git a/public/scripts/stats.js b/public/scripts/stats.js
index d80997223..93b34e4ae 100644
--- a/public/scripts/stats.js
+++ b/public/scripts/stats.js
@@ -25,13 +25,12 @@ function createStatBlock(statName, statValue) {
* @returns {number} - The stat value if it is a number, otherwise 0.
*/
function verifyStatValue(stat) {
- return isNaN(stat) ? 0 : stat;
+ return isNaN(Number(stat)) ? 0 : Number(stat);
}
/**
* Calculates total stats from character statistics.
*
- * @param {Object} charStats - Object containing character statistics.
* @returns {Object} - Object containing total statistics.
*/
function calculateTotalStats() {
diff --git a/public/scripts/tags.js b/public/scripts/tags.js
index a6887c7c6..f486fbb08 100644
--- a/public/scripts/tags.js
+++ b/public/scripts/tags.js
@@ -7,7 +7,7 @@ import {
getCharacters,
entitiesFilter,
} from "../script.js";
-import { FILTER_TYPES } from "./filters.js";
+import { FILTER_TYPES, FilterHelper } from "./filters.js";
import { groupCandidatesFilter, selected_group } from "./group-chats.js";
import { uuidv4 } from "./utils.js";
@@ -24,7 +24,6 @@ export {
importTags,
};
-const random_id = () => uuidv4();
const CHARACTER_FILTER_SELECTOR = '#rm_characters_block .rm_tag_filter';
const GROUP_FILTER_SELECTOR = '#rm_group_chats_block .rm_tag_filter';
@@ -49,17 +48,21 @@ const InListActionable = {
}
const DEFAULT_TAGS = [
- { id: random_id(), name: "Plain Text" },
- { id: random_id(), name: "OpenAI" },
- { id: random_id(), name: "W++" },
- { id: random_id(), name: "Boostyle" },
- { id: random_id(), name: "PList" },
- { id: random_id(), name: "AliChat" },
+ { id: uuidv4(), name: "Plain Text" },
+ { id: uuidv4(), name: "OpenAI" },
+ { id: uuidv4(), name: "W++" },
+ { id: uuidv4(), name: "Boostyle" },
+ { id: uuidv4(), name: "PList" },
+ { id: uuidv4(), name: "AliChat" },
];
let tags = [];
let tag_map = {};
+/**
+ * Applies the favorite filter to the character list.
+ * @param {FilterHelper} filterHelper Instance of FilterHelper class.
+ */
function applyFavFilter(filterHelper) {
const isSelected = $(this).hasClass('selected');
const displayFavoritesOnly = !isSelected;
@@ -68,6 +71,10 @@ function applyFavFilter(filterHelper) {
filterHelper.setFilterData(FILTER_TYPES.FAV, displayFavoritesOnly);
}
+/**
+ * Applies the "is group" filter to the character list.
+ * @param {FilterHelper} filterHelper Instance of FilterHelper class.
+ */
function filterByGroups(filterHelper) {
const isSelected = $(this).hasClass('selected');
const displayGroupsOnly = !isSelected;
@@ -253,7 +260,7 @@ async function importTags(imported_char) {
function createNewTag(tagName) {
const tag = {
- id: random_id(),
+ id: uuidv4(),
name: tagName,
color: '',
};
diff --git a/public/scripts/templates/formatting.html b/public/scripts/templates/formatting.html
new file mode 100644
index 000000000..71671a3b0
--- /dev/null
+++ b/public/scripts/templates/formatting.html
@@ -0,0 +1,21 @@
+Text formatting commands:
+
+ - *text* - displays as italics
+ - **text** - displays as bold
+ - ***text*** - displays as bold italics
+ - ```text``` - displays as a code block (new lines allowed between the backticks)
+
+ like this
+
+ - `text` - displays as
inline code
+ - text - displays as a blockquote (note the space after >)
+ like this
+ - # text - displays as a large header (note the space)
+ like this
+ - ## text - displays as a medium header (note the space)
+ like this
+ - ### text - displays as a small header (note the space)
+ like this
+ - $$ text $$ - renders a LaTeX formula (if enabled)
+ - $ text $ - renders an AsciiMath formula (if enabled)
+
diff --git a/public/scripts/templates/help.html b/public/scripts/templates/help.html
new file mode 100644
index 000000000..66b858f48
--- /dev/null
+++ b/public/scripts/templates/help.html
@@ -0,0 +1,11 @@
+Hello there! Please select the help topic you would like to learn more about:
+
+
+
+ Still got questions left? The Official SillyTavern Documentation Website has much more information!
+
diff --git a/public/scripts/templates/hotkeys.html b/public/scripts/templates/hotkeys.html
new file mode 100644
index 000000000..18e751bd9
--- /dev/null
+++ b/public/scripts/templates/hotkeys.html
@@ -0,0 +1,13 @@
+Hotkeys/Keybinds:
+
+ - Up = Edit last message in chat
+ - Ctrl+Up = Edit last USER message in chat
+ - Left = swipe left
+ - Right = swipe right (NOTE: swipe hotkeys are disabled when chatbar has something typed into it)
+ - Ctrl+Left = view locally stored variables (in the browser console window)
+ - Enter (with chat bar selected) = send your message to AI
+ - Ctrl+Enter = Regenerate the last AI response
+ - Escape = stop AI response generation
+ - Ctrl+Shift+Up = Scroll to context line
+ - Ctrl+Shift+Down = Scroll chat to bottom
+
diff --git a/public/scripts/templates/itemizationChat.html b/public/scripts/templates/itemizationChat.html
new file mode 100644
index 000000000..12e25c94a
--- /dev/null
+++ b/public/scripts/templates/itemizationChat.html
@@ -0,0 +1,128 @@
+
+ Prompt Itemization
+
+
+Tokenizer: {{selectedTokenizer}}
+API Used: {{this_main_api}}
+
+ Only the white numbers really matter. All numbers are estimates.
+ Grey color items may not have been included in the context due to certain prompt format settings.
+
+
+
+
+
+
+
+
+
System Info:
+
{{oaiSystemTokens}}
+
+
+
-- Chat Start:
+
{{oaiStartTokens}}
+
+
+
-- Main:
+
{{oaiMainTokens}}
+
+
+
-- Jailbreak:
+
{{oaiJailbreakTokens}}
+
+
+
-- NSFW:
+
{{oaiNsfwTokens}}
+
+
+
-- Nudge:
+
{{oaiNudgeTokens}}
+
+
+
-- Impersonate:
+
{{oaiImpersonateTokens}}
+
+
+
+
+
Prompt Tokens:
+
{{oaiPromptTokens}}
+
+
+
-- Description:
+
{{charDescriptionTokens}}
+
+
+
-- Personality:
+
{{charPersonalityTokens}}
+
+
+
-- Scenario:
+
{{scenarioTextTokens}}
+
+
+
-- Examples:
+
{{examplesStringTokens}}
+
+
+
-- User Persona:
+
{{userPersonaStringTokens}}
+
+
+
+
World Info:
+
{{worldInfoStringTokens}}
+
+
+
Chat History:
+
{{ActualChatHistoryTokens}}
+
+
+
+
Extensions:
+
{{allAnchorsTokens}}
+
+
+
-- Summarize:
+
{{summarizeStringTokens}}
+
+
+
-- Author's Note:
+
{{authorsNoteStringTokens}}
+
+
+
-- Smart Context:
+
{{smartContextStringTokens}}
+
+
+
+
{{}} Bias:
+
{{oaiBiasTokens}}
+
+
+
+
+
+
+
+
Total Tokens in Prompt:
+
{{finalPromptTokens}}
+
+
+
Max Context (Context Size - Response Length):
+
{{thisPrompt_max_context}}
+
+
+
+
diff --git a/public/scripts/templates/itemizationText.html b/public/scripts/templates/itemizationText.html
new file mode 100644
index 000000000..b6f60404b
--- /dev/null
+++ b/public/scripts/templates/itemizationText.html
@@ -0,0 +1,108 @@
+
+ Prompt Itemization
+
+
+Tokenizer: {{selectedTokenizer}}
+API Used: {{this_main_api}}
+
+ Only the white numbers really matter. All numbers are estimates.
+ Grey color items may not have been included in the context due to certain prompt format settings.
+
+
+
+
+
+
+
+
+
Character Definitions:
+
{{storyStringTokens}}
+
+
+
-- Description:
+
{{charDescriptionTokens}}
+
+
+
-- Personality:
+
{{charPersonalityTokens}}
+
+
+
-- Scenario:
+
{{scenarioTextTokens}}
+
+
+
-- Examples:
+
{{examplesStringTokens}}
+
+
+
-- User Persona:
+
{{userPersonaStringTokens}}
+
+
+
-- System Prompt (Instruct):
+
{{instructionTokens}}
+
+
+
+
World Info:
+
{{worldInfoStringTokens}}
+
+
+
Chat History:
+
{{ActualChatHistoryTokens}}
+
+
+
+
Extensions:
+
{{allAnchorsTokens}}
+
+
+
-- Summarize:
+
{{summarizeStringTokens}}
+
+
+
-- Author's Note:
+
{{authorsNoteStringTokens}}
+
+
+
-- Smart Context:
+
{{smartContextStringTokens}}
+
+
+
+
{{}} Bias:
+
{{promptBiasTokens}}
+
+
+
+
+
+
+
+
Total Tokens in Prompt:
+
{{totalTokensInPrompt}}
+
+
+
Max Context (Context Size - Response Length):
+
{{thisPrompt_max_context}}
+
+
+
- Padding:
+
{{thisPrompt_padding}}
+
+
+
Actual Max Context Allowed:
+
{{thisPrompt_actual}}
+
+
+
+
diff --git a/public/scripts/templates/macros.html b/public/scripts/templates/macros.html
new file mode 100644
index 000000000..dd163197f
--- /dev/null
+++ b/public/scripts/templates/macros.html
@@ -0,0 +1,11 @@
+System-wide Replacement Macros:
+
+ - {{user}} - your current Persona username
+ - {{char}} - the Character's name
+ - {{input}} - the user input
+ - {{time}} - the current time
+ - {{date}} - the current date
+ - {{idle_duration}} - the time since the last user message was sent
+ - {{random:(args)}} - returns a random item from the list. (ex: {{random:1,2,3,4}} will return 1 of the 4 numbers at random. Works with text lists too.
+ - {{roll:(formula)}} - rolls a dice. (ex: {{roll:1d6}} will roll a 6-sided dice and return a number between 1 and 6)
+
diff --git a/public/scripts/templates/welcome.html b/public/scripts/templates/welcome.html
new file mode 100644
index 000000000..7c80d19ab
--- /dev/null
+++ b/public/scripts/templates/welcome.html
@@ -0,0 +1,72 @@
+
+ SillyTavern
+
+
+
+ Want to update?
+
+
+How to start chatting?
+
+ - Click
and select a Chat API.
+ - Click
and pick a character
+
+
+
+ Want more characters?
+
+
+ Not controlled by SillyTavern team.
+
+
+
+Confused or lost?
+
+
+
+Still have questions?
+
diff --git a/public/scripts/textgen-settings.js b/public/scripts/textgen-settings.js
index 9ac736306..ca551b068 100644
--- a/public/scripts/textgen-settings.js
+++ b/public/scripts/textgen-settings.js
@@ -6,8 +6,6 @@ import {
setGenerationParamsFromPreset,
} from "../script.js";
-import { getCfg } from "./extensions/cfg/util.js";
-
import {
power_user,
} from "./power-user.js";
@@ -170,9 +168,9 @@ $(document).ready(function () {
textgenerationwebui_settings[id] = value;
}
else {
- const value = parseFloat($(this).val());
+ const value = Number($(this).val());
$(`#${id}_counter_textgenerationwebui`).text(value.toFixed(2));
- textgenerationwebui_settings[id] = parseFloat(value);
+ textgenerationwebui_settings[id] = value;
}
saveSettingsDebounced();
@@ -209,7 +207,7 @@ async function generateTextGenWithStreaming(generate_data, signal) {
const response = await fetch('/generate_textgenerationwebui', {
headers: {
...getRequestHeaders(),
- 'X-Response-Streaming': true,
+ 'X-Response-Streaming': String(true),
'X-Streaming-URL': textgenerationwebui_settings.streaming_url,
},
body: JSON.stringify(generate_data),
@@ -235,9 +233,7 @@ async function generateTextGenWithStreaming(generate_data, signal) {
}
}
-export function getTextGenGenerationData(finalPromt, this_amount_gen, isImpersonate) {
- const cfgValues = getCfg();
-
+export function getTextGenGenerationData(finalPromt, this_amount_gen, isImpersonate, cfgValues) {
return {
'prompt': finalPromt,
'max_new_tokens': this_amount_gen,
@@ -255,7 +251,7 @@ export function getTextGenGenerationData(finalPromt, this_amount_gen, isImperson
'penalty_alpha': textgenerationwebui_settings.penalty_alpha,
'length_penalty': textgenerationwebui_settings.length_penalty,
'early_stopping': textgenerationwebui_settings.early_stopping,
- 'guidance_scale': cfgValues?.guidanceScale ?? textgenerationwebui_settings.guidance_scale ?? 1,
+ 'guidance_scale': cfgValues?.guidanceScale?.value ?? textgenerationwebui_settings.guidance_scale ?? 1,
'negative_prompt': cfgValues?.negativePrompt ?? textgenerationwebui_settings.negative_prompt ?? '',
'seed': textgenerationwebui_settings.seed,
'add_bos_token': textgenerationwebui_settings.add_bos_token,
diff --git a/public/scripts/tokenizers.js b/public/scripts/tokenizers.js
new file mode 100644
index 000000000..2714c12ef
--- /dev/null
+++ b/public/scripts/tokenizers.js
@@ -0,0 +1,342 @@
+import { characters, main_api, nai_settings, this_chid } from "../script.js";
+import { power_user } from "./power-user.js";
+import { encode } from "../lib/gpt-2-3-tokenizer/mod.js";
+import { GPT3BrowserTokenizer } from "../lib/gpt-3-tokenizer/gpt3-tokenizer.js";
+import { chat_completion_sources, oai_settings } from "./openai.js";
+import { groups, selected_group } from "./group-chats.js";
+import { getStringHash } from "./utils.js";
+
+export const CHARACTERS_PER_TOKEN_RATIO = 3.35;
+
+export const tokenizers = {
+ NONE: 0,
+ GPT3: 1,
+ CLASSIC: 2,
+ LLAMA: 3,
+ NERD: 4,
+ NERD2: 5,
+ API: 6,
+ BEST_MATCH: 99,
+};
+
+const objectStore = new localforage.createInstance({ name: "SillyTavern_ChatCompletions" });
+const gpt3 = new GPT3BrowserTokenizer({ type: 'gpt3' });
+
+let tokenCache = {};
+
+async function loadTokenCache() {
+ try {
+ console.debug('Chat Completions: loading token cache')
+ tokenCache = await objectStore.getItem('tokenCache') || {};
+ } catch (e) {
+ console.log('Chat Completions: unable to load token cache, using default value', e);
+ tokenCache = {};
+ }
+}
+
+export async function saveTokenCache() {
+ try {
+ console.debug('Chat Completions: saving token cache')
+ await objectStore.setItem('tokenCache', tokenCache);
+ } catch (e) {
+ console.log('Chat Completions: unable to save token cache', e);
+ }
+}
+
+async function resetTokenCache() {
+ try {
+ console.debug('Chat Completions: resetting token cache');
+ Object.keys(tokenCache).forEach(key => delete tokenCache[key]);
+ await objectStore.removeItem('tokenCache');
+ } catch (e) {
+ console.log('Chat Completions: unable to reset token cache', e);
+ }
+}
+
+window['resetTokenCache'] = resetTokenCache;
+
+function getTokenizerBestMatch() {
+ if (main_api === 'novel') {
+ if (nai_settings.model_novel.includes('krake') || nai_settings.model_novel.includes('euterpe')) {
+ return tokenizers.CLASSIC;
+ }
+ if (nai_settings.model_novel.includes('clio')) {
+ return tokenizers.NERD;
+ }
+ if (nai_settings.model_novel.includes('kayra')) {
+ return tokenizers.NERD2;
+ }
+ }
+ if (main_api === 'kobold' || main_api === 'textgenerationwebui' || main_api === 'koboldhorde') {
+ return tokenizers.LLAMA;
+ }
+
+ return tokenizers.NONE;
+}
+
+/**
+ * Gets the token count for a string using the current model tokenizer.
+ * @param {string} str String to tokenize
+ * @param {number | undefined} padding Optional padding tokens. Defaults to 0.
+ * @returns {number} Token count.
+ */
+export function getTokenCount(str, padding = undefined) {
+ /**
+ * Calculates the token count for a string.
+ * @param {number} [type] Tokenizer type.
+ * @returns {number} Token count.
+ */
+ function calculate(type) {
+ switch (type) {
+ case tokenizers.NONE:
+ return Math.ceil(str.length / CHARACTERS_PER_TOKEN_RATIO) + padding;
+ case tokenizers.GPT3:
+ return gpt3.encode(str).bpe.length + padding;
+ case tokenizers.CLASSIC:
+ return encode(str).length + padding;
+ case tokenizers.LLAMA:
+ return countTokensRemote('/tokenize_llama', str, padding);
+ case tokenizers.NERD:
+ return countTokensRemote('/tokenize_nerdstash', str, padding);
+ case tokenizers.NERD2:
+ return countTokensRemote('/tokenize_nerdstash_v2', str, padding);
+ case tokenizers.API:
+ return countTokensRemote('/tokenize_via_api', str, padding);
+ default:
+ console.warn("Unknown tokenizer type", type);
+ return calculate(tokenizers.NONE);
+ }
+ }
+
+ if (typeof str !== 'string' || !str?.length) {
+ return 0;
+ }
+
+ let tokenizerType = power_user.tokenizer;
+
+ if (main_api === 'openai') {
+ if (padding === power_user.token_padding) {
+ // For main "shadow" prompt building
+ tokenizerType = tokenizers.NONE;
+ } else {
+ // For extensions and WI
+ return counterWrapperOpenAI(str);
+ }
+ }
+
+ if (tokenizerType === tokenizers.BEST_MATCH) {
+ tokenizerType = getTokenizerBestMatch();
+ }
+
+ if (padding === undefined) {
+ padding = 0;
+ }
+
+ const cacheObject = getTokenCacheObject();
+ const hash = getStringHash(str);
+ const cacheKey = `${tokenizerType}-${hash}`;
+
+ if (typeof cacheObject[cacheKey] === 'number') {
+ return cacheObject[cacheKey];
+ }
+
+ const result = calculate(tokenizerType);
+
+ if (isNaN(result)) {
+ console.warn("Token count calculation returned NaN");
+ return 0;
+ }
+
+ cacheObject[cacheKey] = result;
+ return result;
+}
+
+/**
+ * Gets the token count for a string using the OpenAI tokenizer.
+ * @param {string} text Text to tokenize.
+ * @returns {number} Token count.
+ */
+function counterWrapperOpenAI(text) {
+ const message = { role: 'system', content: text };
+ return countTokensOpenAI(message, true);
+}
+
+export function getTokenizerModel() {
+ // OpenAI models always provide their own tokenizer
+ if (oai_settings.chat_completion_source == chat_completion_sources.OPENAI) {
+ return oai_settings.openai_model;
+ }
+
+ const turboTokenizer = 'gpt-3.5-turbo';
+ const gpt4Tokenizer = 'gpt-4';
+ const gpt2Tokenizer = 'gpt2';
+ const claudeTokenizer = 'claude';
+
+ // Assuming no one would use it for different models.. right?
+ if (oai_settings.chat_completion_source == chat_completion_sources.SCALE) {
+ return gpt4Tokenizer;
+ }
+
+ // Select correct tokenizer for WindowAI proxies
+ if (oai_settings.chat_completion_source == chat_completion_sources.WINDOWAI && oai_settings.windowai_model) {
+ if (oai_settings.windowai_model.includes('gpt-4')) {
+ return gpt4Tokenizer;
+ }
+ else if (oai_settings.windowai_model.includes('gpt-3.5-turbo')) {
+ return turboTokenizer;
+ }
+ else if (oai_settings.windowai_model.includes('claude')) {
+ return claudeTokenizer;
+ }
+ else if (oai_settings.windowai_model.includes('GPT-NeoXT')) {
+ return gpt2Tokenizer;
+ }
+ }
+
+ // And for OpenRouter (if not a site model, then it's impossible to determine the tokenizer)
+ if (oai_settings.chat_completion_source == chat_completion_sources.OPENROUTER && oai_settings.openrouter_model) {
+ if (oai_settings.openrouter_model.includes('gpt-4')) {
+ return gpt4Tokenizer;
+ }
+ else if (oai_settings.openrouter_model.includes('gpt-3.5-turbo')) {
+ return turboTokenizer;
+ }
+ else if (oai_settings.openrouter_model.includes('claude')) {
+ return claudeTokenizer;
+ }
+ else if (oai_settings.openrouter_model.includes('GPT-NeoXT')) {
+ return gpt2Tokenizer;
+ }
+ }
+
+ if (oai_settings.chat_completion_source == chat_completion_sources.CLAUDE) {
+ return claudeTokenizer;
+ }
+
+ // Default to Turbo 3.5
+ return turboTokenizer;
+}
+
+/**
+ * @param {any[] | Object} messages
+ */
+export function countTokensOpenAI(messages, full = false) {
+ const shouldTokenizeAI21 = oai_settings.chat_completion_source === chat_completion_sources.AI21 && oai_settings.use_ai21_tokenizer;
+ const cacheObject = getTokenCacheObject();
+
+ if (!Array.isArray(messages)) {
+ messages = [messages];
+ }
+
+ let token_count = -1;
+
+ for (const message of messages) {
+ const model = getTokenizerModel();
+
+ if (model === 'claude' || shouldTokenizeAI21) {
+ full = true;
+ }
+
+ const hash = getStringHash(JSON.stringify(message));
+ const cacheKey = `${model}-${hash}`;
+ const cachedCount = cacheObject[cacheKey];
+
+ if (typeof cachedCount === 'number') {
+ token_count += cachedCount;
+ }
+
+ else {
+ jQuery.ajax({
+ async: false,
+ type: 'POST', //
+ url: shouldTokenizeAI21 ? '/tokenize_ai21' : `/tokenize_openai?model=${model}`,
+ data: JSON.stringify([message]),
+ dataType: "json",
+ contentType: "application/json",
+ success: function (data) {
+ token_count += Number(data.token_count);
+ cacheObject[cacheKey] = Number(data.token_count);
+ }
+ });
+ }
+ }
+
+ if (!full) token_count -= 2;
+
+ return token_count;
+}
+
+/**
+ * Gets the token cache object for the current chat.
+ * @returns {Object} Token cache object for the current chat.
+ */
+function getTokenCacheObject() {
+ let chatId = 'undefined';
+
+ try {
+ if (selected_group) {
+ chatId = groups.find(x => x.id == selected_group)?.chat_id;
+ }
+ else if (this_chid !== undefined) {
+ chatId = characters[this_chid].chat;
+ }
+ } catch {
+ console.log('No character / group selected. Using default cache item');
+ }
+
+ if (typeof tokenCache[chatId] !== 'object') {
+ tokenCache[chatId] = {};
+ }
+
+ return tokenCache[String(chatId)];
+}
+
+function countTokensRemote(endpoint, str, padding) {
+ let tokenCount = 0;
+ jQuery.ajax({
+ async: false,
+ type: 'POST',
+ url: endpoint,
+ data: JSON.stringify({ text: str }),
+ dataType: "json",
+ contentType: "application/json",
+ success: function (data) {
+ tokenCount = data.count;
+ }
+ });
+ return tokenCount + padding;
+}
+
+function getTextTokensRemote(endpoint, str) {
+ let ids = [];
+ jQuery.ajax({
+ async: false,
+ type: 'POST',
+ url: endpoint,
+ data: JSON.stringify({ text: str }),
+ dataType: "json",
+ contentType: "application/json",
+ success: function (data) {
+ ids = data.ids;
+ }
+ });
+ return ids;
+}
+
+export function getTextTokens(tokenizerType, str) {
+ switch (tokenizerType) {
+ case tokenizers.LLAMA:
+ return getTextTokensRemote('/tokenize_llama', str);
+ case tokenizers.NERD:
+ return getTextTokensRemote('/tokenize_nerdstash', str);
+ case tokenizers.NERD2:
+ return getTextTokensRemote('/tokenize_nerdstash_v2', str);
+ default:
+ console.warn("Calling getTextTokens with unsupported tokenizer type", tokenizerType);
+ return [];
+ }
+}
+
+jQuery(async () => {
+ await loadTokenCache();
+});
diff --git a/public/scripts/utils.js b/public/scripts/utils.js
index aba6f2153..48140cbe6 100644
--- a/public/scripts/utils.js
+++ b/public/scripts/utils.js
@@ -1,21 +1,56 @@
import { getContext } from "./extensions.js";
import { getRequestHeaders } from "../script.js";
+/**
+ * Pagination status string template.
+ * @type {string}
+ */
export const PAGINATION_TEMPLATE = '<%= rangeStart %>-<%= rangeEnd %> of <%= totalNumber %>';
+/**
+ * Navigation options for pagination.
+ * @enum {number}
+ */
+export const navigation_option = { none: 0, previous: 1, last: 2, };
+
+/**
+ * Determines if a value is unique in an array.
+ * @param {any} value Current value.
+ * @param {number} index Current index.
+ * @param {any} array The array being processed.
+ * @returns {boolean} True if the value is unique, false otherwise.
+ */
export function onlyUnique(value, index, array) {
return array.indexOf(value) === index;
}
+/**
+ * Checks if a string only contains digits.
+ * @param {string} str The string to check.
+ * @returns {boolean} True if the string only contains digits, false otherwise.
+ * @example
+ * isDigitsOnly('123'); // true
+ * isDigitsOnly('abc'); // false
+ */
export function isDigitsOnly(str) {
return /^\d+$/.test(str);
}
-// Increase delay on touch screens
+/**
+ * Gets a drag delay for sortable elements. This is to prevent accidental drags when scrolling.
+ * @returns {number} The delay in milliseconds. 100ms for desktop, 750ms for mobile.
+ */
export function getSortableDelay() {
return navigator.maxTouchPoints > 0 ? 750 : 100;
}
+/**
+ * Rearranges an array in a random order.
+ * @param {any[]} array The array to shuffle.
+ * @returns {any[]} The shuffled array.
+ * @example
+ * shuffle([1, 2, 3]); // [2, 3, 1]
+ */
export function shuffle(array) {
let currentIndex = array.length,
randomIndex;
@@ -31,6 +66,12 @@ export function shuffle(array) {
return array;
}
+/**
+ * Downloads a file to the user's devices.
+ * @param {BlobPart} content File content to download.
+ * @param {string} fileName File name.
+ * @param {string} contentType File content type.
+ */
export function download(content, fileName, contentType) {
const a = document.createElement("a");
const file = new Blob([content], { type: contentType });
@@ -39,22 +80,38 @@ export function download(content, fileName, contentType) {
a.click();
}
+/**
+ * Fetches a file by URL and parses its contents as data URI.
+ * @param {string} url The URL to fetch.
+ * @param {any} params Fetch parameters.
+ * @returns {Promise} A promise that resolves to the data URI.
+ */
export async function urlContentToDataUri(url, params) {
const response = await fetch(url, params);
const blob = await response.blob();
- return await new Promise(callback => {
- let reader = new FileReader();
- reader.onload = function () { callback(this.result); };
+ return await new Promise((resolve, reject) => {
+ const reader = new FileReader();
+ reader.onload = function () {
+ resolve(String(reader.result));
+ };
+ reader.onerror = function (error) {
+ reject(error);
+ };
reader.readAsDataURL(blob);
});
}
+/**
+ * Returns a promise that resolves to the file's text.
+ * @param {Blob} file The file to read.
+ * @returns {Promise} A promise that resolves to the file's text.
+ */
export function getFileText(file) {
return new Promise((resolve, reject) => {
const reader = new FileReader();
reader.readAsText(file);
reader.onload = function () {
- resolve(reader.result);
+ resolve(String(reader.result));
};
reader.onerror = function (error) {
reject(error);
@@ -62,6 +119,10 @@ export function getFileText(file) {
});
}
+/**
+ * Returns a promise that resolves to the file's array buffer.
+ * @param {Blob} file The file to read.
+ */
export function getFileBuffer(file) {
return new Promise((resolve, reject) => {
const reader = new FileReader();
@@ -75,12 +136,17 @@ export function getFileBuffer(file) {
});
}
+/**
+ * Returns a promise that resolves to the base64 encoded string of a file.
+ * @param {Blob} file The file to read.
+ * @returns {Promise} A promise that resolves to the base64 encoded string.
+ */
export function getBase64Async(file) {
return new Promise((resolve, reject) => {
const reader = new FileReader();
reader.readAsDataURL(file);
reader.onload = function () {
- resolve(reader.result);
+ resolve(String(reader.result));
};
reader.onerror = function (error) {
reject(error);
@@ -88,15 +154,26 @@ export function getBase64Async(file) {
});
}
+/**
+ * Parses a file blob as a JSON object.
+ * @param {Blob} file The file to read.
+ * @returns {Promise} A promise that resolves to the parsed JSON object.
+ */
export async function parseJsonFile(file) {
return new Promise((resolve, reject) => {
const fileReader = new FileReader();
- fileReader.onload = event => resolve(JSON.parse(event.target.result));
- fileReader.onerror = error => reject(error);
fileReader.readAsText(file);
+ fileReader.onload = event => resolve(JSON.parse(String(event.target.result)));
+ fileReader.onerror = error => reject(error);
});
}
+/**
+ * Calculates a hash code for a string.
+ * @param {string} str The string to hash.
+ * @param {number} [seed=0] The seed to use for the hash.
+ * @returns {number} The hash code.
+ */
export function getStringHash(str, seed = 0) {
if (typeof str !== 'string') {
return 0;
@@ -116,6 +193,12 @@ export function getStringHash(str, seed = 0) {
return 4294967296 * (2097151 & h2) + (h1 >>> 0);
};
+/**
+ * Creates a debounced function that delays invoking func until after wait milliseconds have elapsed since the last time the debounced function was invoked.
+ * @param {function} func The function to debounce.
+ * @param {number} [timeout=300] The timeout in milliseconds.
+ * @returns {function} The debounced function.
+ */
export function debounce(func, timeout = 300) {
let timer;
return (...args) => {
@@ -124,6 +207,12 @@ export function debounce(func, timeout = 300) {
};
}
+/**
+ * Creates a throttled function that only invokes func at most once per every limit milliseconds.
+ * @param {function} func The function to throttle.
+ * @param {number} [limit=300] The limit in milliseconds.
+ * @returns {function} The throttled function.
+ */
export function throttle(func, limit = 300) {
let lastCall;
return (...args) => {
@@ -135,6 +224,11 @@ export function throttle(func, limit = 300) {
};
}
+/**
+ * Checks if an element is in the viewport.
+ * @param {Element} el The element to check.
+ * @returns {boolean} True if the element is in the viewport, false otherwise.
+ */
export function isElementInViewport(el) {
if (typeof jQuery === "function" && el instanceof jQuery) {
el = el[0];
@@ -148,6 +242,12 @@ export function isElementInViewport(el) {
);
}
+/**
+ * Returns a name that is unique among the names that exist.
+ * @param {string} name The name to check.
+ * @param {{ (y: any): boolean; }} exists Function to check if name exists.
+ * @returns {string} A unique name.
+ */
export function getUniqueName(name, exists) {
let i = 1;
let baseName = name;
@@ -158,18 +258,48 @@ export function getUniqueName(name, exists) {
return name;
}
-export const delay = (ms) => new Promise((res) => setTimeout(res, ms));
-export const isSubsetOf = (a, b) => (Array.isArray(a) && Array.isArray(b)) ? b.every(val => a.includes(val)) : false;
+/**
+ * Returns a promise that resolves after the specified number of milliseconds.
+ * @param {number} ms The number of milliseconds to wait.
+ * @returns {Promise} A promise that resolves after the specified number of milliseconds.
+ */
+export function delay(ms) {
+ return new Promise((res) => setTimeout(res, ms));
+}
+/**
+ * Checks if an array is a subset of another array.
+ * @param {any[]} a Array A
+ * @param {any[]} b Array B
+ * @returns {boolean} True if B is a subset of A, false otherwise.
+ */
+export function isSubsetOf(a, b) {
+ return (Array.isArray(a) && Array.isArray(b)) ? b.every(val => a.includes(val)) : false;
+}
+
+/**
+ * Increments the trailing number in a string.
+ * @param {string} str The string to process.
+ * @returns {string} The string with the trailing number incremented by 1.
+ * @example
+ * incrementString('Hello, world! 1'); // 'Hello, world! 2'
+ */
export function incrementString(str) {
// Find the trailing number or it will match the empty string
const count = str.match(/\d*$/);
// Take the substring up until where the integer was matched
// Concatenate it to the matched count incremented by 1
- return str.substr(0, count.index) + (++count[0]);
+ return str.substring(0, count.index) + (Number(count[0]) + 1);
};
+/**
+ * Formats a string using the specified arguments.
+ * @param {string} format The format string.
+ * @returns {string} The formatted string.
+ * @example
+ * stringFormat('Hello, {0}!', 'world'); // 'Hello, world!'
+ */
export function stringFormat(format) {
const args = Array.prototype.slice.call(arguments, 1);
return format.replace(/{(\d+)}/g, function (match, number) {
@@ -180,7 +310,11 @@ export function stringFormat(format) {
});
};
-// Save the caret position in a contenteditable element
+/**
+ * Save the caret position in a contenteditable element.
+ * @param {Element} element The element to save the caret position of.
+ * @returns {{ start: number, end: number }} An object with the start and end offsets of the caret.
+ */
export function saveCaretPosition(element) {
// Get the current selection
const selection = window.getSelection();
@@ -209,7 +343,11 @@ export function saveCaretPosition(element) {
return position;
}
-// Restore the caret position in a contenteditable element
+/**
+ * Restore the caret position in a contenteditable element.
+ * @param {Element} element The element to restore the caret position of.
+ * @param {{ start: any; end: any; }} position An object with the start and end offsets of the caret.
+ */
export function restoreCaretPosition(element, position) {
// If the position is null, do nothing
if (!position) {
@@ -236,6 +374,11 @@ export async function resetScrollHeight(element) {
$(element).css('height', $(element).prop('scrollHeight') + 3 + 'px');
}
+/**
+ * Sets the height of an element to its scroll height.
+ * @param {JQuery} element The element to initialize the scroll height of.
+ * @returns {Promise} A promise that resolves when the scroll height has been initialized.
+ */
export async function initScrollHeight(element) {
await delay(1);
@@ -252,15 +395,27 @@ export async function initScrollHeight(element) {
//resetScrollHeight(element);
}
+/**
+ * Compares elements by their CSS order property. Used for sorting.
+ * @param {any} a The first element.
+ * @param {any} b The second element.
+ * @returns {number} A negative number if a is before b, a positive number if a is after b, or 0 if they are equal.
+ */
export function sortByCssOrder(a, b) {
const _a = Number($(a).css('order'));
const _b = Number($(b).css('order'));
return _a - _b;
}
+/**
+ * Trims a string to the end of a nearest sentence.
+ * @param {string} input The string to trim.
+ * @param {boolean} include_newline Whether to include a newline character in the trimmed string.
+ * @returns {string} The trimmed string.
+ * @example
+ * end_trim_to_sentence('Hello, world! I am from'); // 'Hello, world!'
+ */
export function end_trim_to_sentence(input, include_newline = false) {
- // inspired from https://github.com/kaihordewebui/kaihordewebui.github.io/blob/06b95e6b7720eb85177fbaf1a7f52955d7cdbc02/index.html#L4853-L4867
-
const punctuation = new Set(['.', '!', '?', '*', '"', ')', '}', '`', ']', '$', '。', '!', '?', '”', ')', '】', '】', '’', '」', '】']); // extend this as you see fit
let last = -1;
@@ -285,6 +440,15 @@ export function end_trim_to_sentence(input, include_newline = false) {
return input.substring(0, last + 1).trimEnd();
}
+/**
+ * Counts the number of occurrences of a character in a string.
+ * @param {string} string The string to count occurrences in.
+ * @param {string} character The character to count occurrences of.
+ * @returns {number} The number of occurrences of the character in the string.
+ * @example
+ * countOccurrences('Hello, world!', 'l'); // 3
+ * countOccurrences('Hello, world!', 'x'); // 0
+ */
export function countOccurrences(string, character) {
let count = 0;
@@ -297,6 +461,14 @@ export function countOccurrences(string, character) {
return count;
}
+/**
+ * Checks if a number is odd.
+ * @param {number} number The number to check.
+ * @returns {boolean} True if the number is odd, false otherwise.
+ * @example
+ * isOdd(3); // true
+ * isOdd(4); // false
+ */
export function isOdd(number) {
return number % 2 !== 0;
}
@@ -337,6 +509,12 @@ export function timestampToMoment(timestamp) {
return moment.invalid();
}
+/**
+ * Compare two moment objects for sorting.
+ * @param {*} a The first moment object.
+ * @param {*} b The second moment object.
+ * @returns {number} A negative number if a is before b, a positive number if a is after b, or 0 if they are equal.
+ */
export function sortMoments(a, b) {
if (a.isBefore(b)) {
return 1;
@@ -347,14 +525,21 @@ export function sortMoments(a, b) {
}
}
-/** Split string to parts no more than length in size */
-export function splitRecursive(input, length, delimitiers = ['\n\n', '\n', ' ', '']) {
- const delim = delimitiers[0] ?? '';
+/** Split string to parts no more than length in size.
+ * @param {string} input The string to split.
+ * @param {number} length The maximum length of each part.
+ * @param {string[]} delimiters The delimiters to use when splitting the string.
+ * @returns {string[]} The split string.
+ * @example
+ * splitRecursive('Hello, world!', 3); // ['Hel', 'lo,', 'wor', 'ld!']
+*/
+export function splitRecursive(input, length, delimiters = ['\n\n', '\n', ' ', '']) {
+ const delim = delimiters[0] ?? '';
const parts = input.split(delim);
const flatParts = parts.flatMap(p => {
if (p.length < length) return p;
- return splitRecursive(input, length, delimitiers.slice(1));
+ return splitRecursive(input, length, delimiters.slice(1));
});
// Merge short chunks
@@ -378,6 +563,13 @@ export function splitRecursive(input, length, delimitiers = ['\n\n', '\n', ' ',
return result;
}
+/**
+ * Checks if a string is a valid data URL.
+ * @param {string} str The string to check.
+ * @returns {boolean} True if the string is a valid data URL, false otherwise.
+ * @example
+ * isDataURL('data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAAUA...'); // true
+ */
export function isDataURL(str) {
const regex = /^data:([a-z]+\/[a-z0-9-+.]+(;[a-z-]+=[a-z0-9-]+)*;?)?(base64)?,([a-z0-9!$&',()*+;=\-_%.~:@\/?#]+)?$/i;
return regex.test(str);
@@ -392,6 +584,13 @@ export function getCharaFilename(chid) {
}
}
+/**
+ * Extracts words from a string.
+ * @param {string} value The string to extract words from.
+ * @returns {string[]} The extracted words.
+ * @example
+ * extractAllWords('Hello, world!'); // ['hello', 'world']
+ */
export function extractAllWords(value) {
const words = [];
@@ -406,21 +605,45 @@ export function extractAllWords(value) {
return words;
}
+/**
+ * Escapes a string for use in a regular expression.
+ * @param {string} string The string to escape.
+ * @returns {string} The escaped string.
+ * @example
+ * escapeRegex('^Hello$'); // '\\^Hello\\$'
+ */
export function escapeRegex(string) {
return string.replace(/[/\-\\^$*+?.()|[\]{}]/g, '\\$&');
}
+/**
+ * Provides an interface for rate limiting function calls.
+ */
export class RateLimiter {
- constructor(intervalMillis) {
- this._intervalMillis = intervalMillis;
- this._lastResolveTime = 0;
- this._pendingResolve = Promise.resolve();
+ /**
+ * Creates a new RateLimiter.
+ * @param {number} interval The interval in milliseconds.
+ * @example
+ * const rateLimiter = new RateLimiter(1000);
+ * rateLimiter.waitForResolve().then(() => {
+ * console.log('Waited 1000ms');
+ * });
+ */
+ constructor(interval) {
+ this.interval = interval;
+ this.lastResolveTime = 0;
+ this.pendingResolve = Promise.resolve();
}
+ /**
+ * Waits for the remaining time in the interval.
+ * @param {AbortSignal} abortSignal An optional AbortSignal to abort the wait.
+ * @returns {Promise} A promise that resolves when the remaining time has elapsed.
+ */
_waitRemainingTime(abortSignal) {
const currentTime = Date.now();
- const elapsedTime = currentTime - this._lastResolveTime;
- const remainingTime = Math.max(0, this._intervalMillis - elapsedTime);
+ const elapsedTime = currentTime - this.lastResolveTime;
+ const remainingTime = Math.max(0, this.interval - elapsedTime);
return new Promise((resolve, reject) => {
const timeoutId = setTimeout(() => {
@@ -436,19 +659,29 @@ export class RateLimiter {
});
}
+ /**
+ * Waits for the next interval to elapse.
+ * @param {AbortSignal} abortSignal An optional AbortSignal to abort the wait.
+ * @returns {Promise} A promise that resolves when the next interval has elapsed.
+ */
async waitForResolve(abortSignal) {
- await this._pendingResolve;
- this._pendingResolve = this._waitRemainingTime(abortSignal);
+ await this.pendingResolve;
+ this.pendingResolve = this._waitRemainingTime(abortSignal);
// Update the last resolve time
- this._lastResolveTime = Date.now() + this._intervalMillis;
- console.debug(`RateLimiter.waitForResolve() ${this._lastResolveTime}`);
+ this.lastResolveTime = Date.now() + this.interval;
+ console.debug(`RateLimiter.waitForResolve() ${this.lastResolveTime}`);
}
}
-// Taken from https://github.com/LostRuins/lite.koboldai.net/blob/main/index.html
-//import tavern png data. adapted from png-chunks-extract under MIT license
-//accepts png input data, and returns the extracted JSON
+/**
+ * Extracts a JSON object from a PNG file.
+ * Taken from https://github.com/LostRuins/lite.koboldai.net/blob/main/index.html
+ * Adapted from png-chunks-extract under MIT license
+ * @param {Uint8Array} data The PNG data to extract the JSON from.
+ * @param {string} identifier The identifier to look for in the PNG tEXT data.
+ * @returns {object} The extracted JSON object.
+ */
export function extractDataFromPng(data, identifier = 'chara') {
console.log("Attempting PNG import...");
let uint8 = new Uint8Array(4);
@@ -599,6 +832,13 @@ export async function saveBase64AsFile(base64Data, characterName, filename = "",
}
}
+/**
+ * Creates a thumbnail from a data URL.
+ * @param {string} dataUrl The data URL encoded data of the image.
+ * @param {number} maxWidth The maximum width of the thumbnail.
+ * @param {number} maxHeight The maximum height of the thumbnail.
+ * @returns {Promise} A promise that resolves to the thumbnail data URL.
+ */
export function createThumbnail(dataUrl, maxWidth, maxHeight) {
return new Promise((resolve, reject) => {
const img = new Image();
@@ -634,6 +874,13 @@ export function createThumbnail(dataUrl, maxWidth, maxHeight) {
});
}
+/**
+ * Waits for a condition to be true. Throws an error if the condition is not true within the timeout.
+ * @param {{ (): boolean; }} condition The condition to wait for.
+ * @param {number} [timeout=1000] The timeout in milliseconds.
+ * @param {number} [interval=100] The interval in milliseconds.
+ * @returns {Promise} A promise that resolves when the condition is true.
+ */
export async function waitUntilCondition(condition, timeout = 1000, interval = 100) {
return new Promise((resolve, reject) => {
const timeoutId = setTimeout(() => {
@@ -651,6 +898,12 @@ export async function waitUntilCondition(condition, timeout = 1000, interval = 1
});
}
+/**
+ * Returns a UUID v4 string.
+ * @returns {string} A UUID v4 string.
+ * @example
+ * uuidv4(); // '3e2fd9e1-0a7a-4f6d-9aaf-8a7a4babe7eb'
+ */
export function uuidv4() {
return 'xxxxxxxx-xxxx-4xxx-yxxx-xxxxxxxxxxxx'.replace(/[xy]/g, function (c) {
const r = Math.random() * 16 | 0;
@@ -659,6 +912,11 @@ export function uuidv4() {
});
}
+/**
+ * Clones an object using JSON serialization.
+ * @param {any} obj The object to clone.
+ * @returns {any} A deep clone of the object.
+ */
export function deepClone(obj) {
return JSON.parse(JSON.stringify(obj));
}
diff --git a/public/scripts/world-info.js b/public/scripts/world-info.js
index 347045913..9674914f6 100644
--- a/public/scripts/world-info.js
+++ b/public/scripts/world-info.js
@@ -1,10 +1,11 @@
-import { saveSettings, callPopup, substituteParams, getTokenCount, getRequestHeaders, chat_metadata, this_chid, characters, saveCharacterDebounced, menu_type, eventSource, event_types } from "../script.js";
-import { download, debounce, initScrollHeight, resetScrollHeight, parseJsonFile, extractDataFromPng, getFileBuffer, getCharaFilename, deepClone, getSortableDelay, escapeRegex, PAGINATION_TEMPLATE } from "./utils.js";
+import { saveSettings, callPopup, substituteParams, getRequestHeaders, chat_metadata, this_chid, characters, saveCharacterDebounced, menu_type, eventSource, event_types } from "../script.js";
+import { download, debounce, initScrollHeight, resetScrollHeight, parseJsonFile, extractDataFromPng, getFileBuffer, getCharaFilename, deepClone, getSortableDelay, escapeRegex, PAGINATION_TEMPLATE, navigation_option } from "./utils.js";
import { getContext } from "./extensions.js";
import { NOTE_MODULE_NAME, metadata_keys, shouldWIAddPrompt } from "./authors-note.js";
import { registerSlashCommand } from "./slash-commands.js";
import { deviceInfo } from "./RossAscends-mods.js";
import { FILTER_TYPES, FilterHelper } from "./filters.js";
+import { getTokenCount } from "./tokenizers.js";
export {
world_info,
@@ -46,7 +47,6 @@ const saveSettingsDebounced = debounce(() => {
saveSettings()
}, 1000);
const sortFn = (a, b) => b.order - a.order;
-const navigation_option = { none: 0, previous: 1, last: 2, };
let updateEditor = (navigation) => { navigation; };
// Do not optimize. updateEditor is a function that is updated by the displayWorldEntries with new data.
@@ -418,7 +418,7 @@ function getWorldEntry(name, data, entry) {
keyInput.on("input", function () {
const uid = $(this).data("uid");
- const value = $(this).val();
+ const value = String($(this).val());
resetScrollHeight(this);
data.entries[uid].key = value
.split(",")
@@ -454,7 +454,7 @@ function getWorldEntry(name, data, entry) {
keySecondaryInput.data("uid", entry.uid);
keySecondaryInput.on("input", function () {
const uid = $(this).data("uid");
- const value = $(this).val();
+ const value = String($(this).val());
resetScrollHeight(this);
data.entries[uid].keysecondary = value
.split(",")
@@ -1506,19 +1506,6 @@ jQuery(() => {
return;
}
- /*
- if (deviceInfo.device.type === 'desktop') {
- let selectScrollTop = null;
- e.preventDefault();
- const option = $(e.target);
- const selectElement = $(this)[0];
- selectScrollTop = selectElement.scrollTop;
- option.prop('selected', !option.prop('selected'));
- await delay(1);
- selectElement.scrollTop = selectScrollTop;
- }
- */
-
onWorldInfoChange('__notSlashCommand__');
});
diff --git a/server.js b/server.js
index 27eee499c..ac19479b7 100644
--- a/server.js
+++ b/server.js
@@ -3301,6 +3301,72 @@ async function sendScaleRequest(request, response) {
}
}
+app.post("/generate_altscale", jsonParser, function (request, response_generate_scale) {
+ if (!request.body) return response_generate_scale.sendStatus(400);
+
+ fetch('https://dashboard.scale.com/spellbook/api/trpc/v2.variant.run', {
+ method: 'POST',
+ headers: {
+ 'Content-Type': 'application/json',
+ 'cookie': `_jwt=${readSecret(SECRET_KEYS.SCALE_COOKIE)}`,
+ },
+ body: JSON.stringify({
+ json: {
+ variant: {
+ name: 'New Variant',
+ appId: '',
+ taxonomy: null
+ },
+ prompt: {
+ id: '',
+ template: '{{input}}\n',
+ exampleVariables: {},
+ variablesSourceDataId: null,
+ systemMessage: request.body.sysprompt
+ },
+ modelParameters: {
+ id: '',
+ modelId: 'GPT4',
+ modelType: 'OpenAi',
+ maxTokens: request.body.max_tokens,
+ temperature: request.body.temp,
+ stop: "user:",
+ suffix: null,
+ topP: request.body.top_p,
+ logprobs: null,
+ logitBias: request.body.logit_bias
+ },
+ inputs: [
+ {
+ index: '-1',
+ valueByName: {
+ input: request.body.prompt
+ }
+ }
+ ]
+ },
+ meta: {
+ values: {
+ 'variant.taxonomy': ['undefined'],
+ 'prompt.variablesSourceDataId': ['undefined'],
+ 'modelParameters.suffix': ['undefined'],
+ 'modelParameters.logprobs': ['undefined'],
+ }
+ }
+ })
+ })
+ .then(response => response.json())
+ .then(data => {
+ console.log(data.result.data.json.outputs[0])
+ return response_generate_scale.send({ output: data.result.data.json.outputs[0] });
+ })
+ .catch((error) => {
+ console.error('Error:', error)
+ return response_generate_scale.send({ error: true })
+ });
+
+});
+
async function sendClaudeRequest(request, response) {
const fetch = require('node-fetch').default;
@@ -3989,7 +4055,8 @@ const SECRET_KEYS = {
DEEPL: 'deepl',
OPENROUTER: 'api_key_openrouter',
SCALE: 'api_key_scale',
- AI21: 'api_key_ai21'
+ AI21: 'api_key_ai21',
+ SCALE_COOKIE: 'scale_cookie',
}
function migrateSecrets() {