diff --git a/public/index.html b/public/index.html index 33e4e3fb0..bbb003a2a 100644 --- a/public/index.html +++ b/public/index.html @@ -5,7 +5,7 @@ SillyTavern - + @@ -1278,6 +1278,28 @@ + +
+

+ + +
+
+

+
+
+ Threshold + + +
+
+ Probability + + +
+
+
+

@@ -1823,7 +1845,7 @@

- + @@ -2931,6 +2953,8 @@ + + diff --git a/public/script.js b/public/script.js index 49a552159..5e6bbdb85 100644 --- a/public/script.js +++ b/public/script.js @@ -488,14 +488,6 @@ let default_user_name = 'User'; export let name1 = default_user_name; export let name2 = 'SillyTavern System'; export let chat = []; -let safetychat = [ - { - name: systemUserName, - is_user: false, - create_date: 0, - mes: 'You deleted a character/chat and arrived back here for safety reasons! Pick another character!', - }, -]; let chatSaveTimeout; let importFlashTimeout; export let isChatSaving = false; @@ -594,6 +586,17 @@ export const extension_prompt_roles = { export const MAX_INJECTION_DEPTH = 1000; +const SAFETY_CHAT = [ + { + name: systemUserName, + force_avatar: system_avatar, + is_system: true, + is_user: false, + create_date: 0, + mes: 'You deleted a character/chat and arrived back here for safety reasons! Pick another character!', + }, +]; + export let system_messages = {}; async function getSystemMessages() { @@ -3760,7 +3763,7 @@ export async function Generate(type, { automatic_trigger, force_name2, quiet_pro } let examplesString = ''; - let chatString = ''; + let chatString = addChatsPreamble(addChatsSeparator('')); let cyclePrompt = ''; async function getMessagesTokenCount() { @@ -3769,10 +3772,10 @@ export async function Generate(type, { automatic_trigger, force_name2, quiet_pro storyString, afterScenarioAnchor, examplesString, - chatString, - quiet_prompt, - cyclePrompt, userAlignmentMessage, + chatString, + modifyLastPromptLine(''), + cyclePrompt, ].join('').replace(/\r/gm, ''); return getTokenCountAsync(encodeString, power_user.token_padding); } @@ -3803,8 +3806,8 @@ export async function Generate(type, { automatic_trigger, force_name2, quiet_pro } tokenCount += await getTokenCountAsync(item.replace(/\r/gm, '')); - chatString = item + chatString; if (tokenCount < this_max_context) { + chatString = chatString + item; arrMes[index] = item; lastAddedIndex = Math.max(lastAddedIndex, index); } else { @@ -3830,8 +3833,8 @@ export async function Generate(type, { automatic_trigger, force_name2, quiet_pro } tokenCount += await getTokenCountAsync(item.replace(/\r/gm, '')); - chatString = item + chatString; if (tokenCount < this_max_context) { + chatString = chatString + item; arrMes[i] = item; lastAddedIndex = Math.max(lastAddedIndex, i); } else { @@ -4028,15 +4031,16 @@ export async function Generate(type, { automatic_trigger, force_name2, quiet_pro async function checkPromptSize() { console.debug('---checking Prompt size'); setPromptString(); + const jointMessages = mesSend.map((e) => `${e.extensionPrompts.join('')}${e.message}`).join(''); const prompt = [ beforeScenarioAnchor, storyString, afterScenarioAnchor, mesExmString, - mesSend.map((e) => `${e.extensionPrompts.join('')}${e.message}`).join(''), + addChatsPreamble(addChatsSeparator(jointMessages)), '\n', + modifyLastPromptLine(''), generatedPromptCache, - quiet_prompt, ].join('').replace(/\r/gm, ''); let thisPromptContextSize = await getTokenCountAsync(prompt, power_user.token_padding); @@ -5679,7 +5683,7 @@ export function resetChatState() { // replaces deleted charcter name with system user since it will be displayed next. name2 = systemUserName; // sets up system user to tell user about having deleted a character - chat = [...safetychat]; + chat.splice(0, chat.length, ...SAFETY_CHAT); // resets chat metadata chat_metadata = {}; // resets the characters array, forcing getcharacters to reset @@ -8840,72 +8844,74 @@ export async function handleDeleteCharacter(this_chid, delete_chats) { /** * Deletes a character completely, including associated chats if specified * - * @param {string} characterKey - The key (avatar) of the character to be deleted + * @param {string|string[]} characterKey - The key (avatar) of the character to be deleted * @param {Object} [options] - Optional parameters for the deletion * @param {boolean} [options.deleteChats=true] - Whether to delete associated chats or not * @return {Promise} - A promise that resolves when the character is successfully deleted */ export async function deleteCharacter(characterKey, { deleteChats = true } = {}) { - const character = characters.find(x => x.avatar == characterKey); - if (!character) { - toastr.warning(`Character ${characterKey} not found. Cannot be deleted.`); - return; + if (!Array.isArray(characterKey)) { + characterKey = [characterKey]; } - const chid = characters.indexOf(character); - const pastChats = await getPastCharacterChats(chid); - - const msg = { avatar_url: character.avatar, delete_chats: deleteChats }; - - const response = await fetch('/api/characters/delete', { - method: 'POST', - headers: getRequestHeaders(), - body: JSON.stringify(msg), - cache: 'no-cache', - }); - - if (!response.ok) { - throw new Error(`Failed to delete character: ${response.status} ${response.statusText}`); - } - - await removeCharacterFromUI(character.name, character.avatar); - - if (deleteChats) { - for (const chat of pastChats) { - const name = chat.file_name.replace('.jsonl', ''); - await eventSource.emit(event_types.CHAT_DELETED, name); + for (const key of characterKey) { + const character = characters.find(x => x.avatar == key); + if (!character) { + toastr.warning(`Character ${key} not found. Skipping deletion.`); + continue; } + + const chid = characters.indexOf(character); + const pastChats = await getPastCharacterChats(chid); + + const msg = { avatar_url: character.avatar, delete_chats: deleteChats }; + + const response = await fetch('/api/characters/delete', { + method: 'POST', + headers: getRequestHeaders(), + body: JSON.stringify(msg), + cache: 'no-cache', + }); + + if (!response.ok) { + toastr.error(`${response.status} ${response.statusText}`, 'Failed to delete character'); + continue; + } + + delete tag_map[character.avatar]; + select_rm_info('char_delete', character.name); + + if (deleteChats) { + for (const chat of pastChats) { + const name = chat.file_name.replace('.jsonl', ''); + await eventSource.emit(event_types.CHAT_DELETED, name); + } + } + + await eventSource.emit(event_types.CHARACTER_DELETED, { id: chid, character: character }); } - eventSource.emit(event_types.CHARACTER_DELETED, { id: this_chid, character: characters[this_chid] }); + await removeCharacterFromUI(); } /** * Function to delete a character from UI after character deletion API success. * It manages necessary UI changes such as closing advanced editing popup, unsetting * character ID, resetting characters array and chat metadata, deselecting character's tab - * panel, removing character name from navigation tabs, clearing chat, removing character's - * avatar from tag_map, fetching updated list of characters and updating the 'deleted - * character' message. + * panel, removing character name from navigation tabs, clearing chat, fetching updated list of characters. * It also ensures to save the settings after all the operations. - * - * @param {string} name - The name of the character to be deleted. - * @param {string} avatar - The avatar URL of the character to be deleted. - * @param {boolean} reloadCharacters - Whether the character list should be refreshed after deletion. */ -async function removeCharacterFromUI(name, avatar, reloadCharacters = true) { +async function removeCharacterFromUI() { await clearChat(); $('#character_cross').click(); this_chid = undefined; characters.length = 0; name2 = systemUserName; - chat = [...safetychat]; + chat.splice(0, chat.length, ...SAFETY_CHAT); chat_metadata = {}; $(document.getElementById('rm_button_selected_ch')).children('h2').text(''); this_chid = undefined; - delete tag_map[avatar]; - if (reloadCharacters) await getCharacters(); - select_rm_info('char_delete', name); + await getCharacters(); await printMessages(); saveSettingsDebounced(); } diff --git a/public/scripts/BulkEditOverlay.js b/public/scripts/BulkEditOverlay.js index 28517d32f..dce7e61ab 100644 --- a/public/scripts/BulkEditOverlay.js +++ b/public/scripts/BulkEditOverlay.js @@ -108,14 +108,12 @@ class CharacterContextMenu { * Delete one or more characters, * opens a popup. * - * @param {number} characterId + * @param {string|string[]} characterKey * @param {boolean} [deleteChats] * @returns {Promise} */ - static delete = async (characterId, deleteChats = false) => { - const character = CharacterContextMenu.#getCharacter(characterId); - - await deleteCharacter(character.avatar, { deleteChats: deleteChats }); + static delete = async (characterKey, deleteChats = false) => { + await deleteCharacter(characterKey, { deleteChats: deleteChats }); }; static #getCharacter = (characterId) => characters[characterId] ?? null; @@ -344,7 +342,7 @@ class BulkTagPopupHandler { const mutualTags = this.getMutualTags(); for (const characterId of this.characterIds) { - for(const tag of mutualTags) { + for (const tag of mutualTags) { removeTagFromMap(tag.id, characterId); } } @@ -599,8 +597,7 @@ class BulkEditOverlay { this.container.removeEventListener('mouseup', cancelHold); this.container.removeEventListener('touchend', cancelHold); - }, - BulkEditOverlay.longPressDelay); + }, BulkEditOverlay.longPressDelay); }; handleLongPressEnd = (event) => { @@ -847,11 +844,14 @@ class BulkEditOverlay { const deleteChats = document.getElementById('del_char_checkbox').checked ?? false; showLoader(); - toastr.info('We\'re deleting your characters, please wait...', 'Working on it'); - return Promise.allSettled(characterIds.map(async characterId => CharacterContextMenu.delete(characterId, deleteChats))) - .then(() => getCharacters()) + const toast = toastr.info('We\'re deleting your characters, please wait...', 'Working on it'); + const avatarList = characterIds.map(id => characters[id]?.avatar).filter(a => a); + return CharacterContextMenu.delete(avatarList, deleteChats) .then(() => this.browseState()) - .finally(() => hideLoader()); + .finally(() => { + toastr.clear(toast); + hideLoader(); + }); }); // At this moment the popup is already changed in the dom, but not yet closed/resolved. We build the avatar list here diff --git a/public/scripts/nai-settings.js b/public/scripts/nai-settings.js index edc69d70b..6b59033a8 100644 --- a/public/scripts/nai-settings.js +++ b/public/scripts/nai-settings.js @@ -78,6 +78,19 @@ export function getKayraMaxContextTokens() { return null; } +export function getKayraMaxResponseTokens() { + switch (novel_data?.tier) { + case 1: + return 100; + case 2: + return 100; + case 3: + return 150; + } + + return maximum_output_length; +} + export function getNovelTier() { return nai_tiers[novel_data?.tier] ?? 'no_connection'; } @@ -438,12 +451,14 @@ export function getNovelGenerationData(finalPrompt, settings, maxLength, isImper console.log(finalPrompt); } + const adjustedMaxLength = nai_settings.model_novel.includes('kayra') ? getKayraMaxResponseTokens() : maximum_output_length; + return { 'input': finalPrompt, 'model': nai_settings.model_novel, 'use_string': true, 'temperature': Number(nai_settings.temperature), - 'max_length': maxLength < maximum_output_length ? maxLength : maximum_output_length, + 'max_length': maxLength < adjustedMaxLength ? maxLength : adjustedMaxLength, 'min_length': Number(nai_settings.min_length), 'tail_free_sampling': Number(nai_settings.tail_free_sampling), 'repetition_penalty': Number(nai_settings.repetition_penalty), diff --git a/public/scripts/openai.js b/public/scripts/openai.js index 5b5c99e0b..913b9c06b 100644 --- a/public/scripts/openai.js +++ b/public/scripts/openai.js @@ -133,7 +133,7 @@ const max_2mil = 2000 * 1000; const scale_max = 8191; const claude_max = 9000; // We have a proper tokenizer, so theoretically could be larger (up to 9k) const claude_100k_max = 99000; -const unlocked_max = max_200k; +const unlocked_max = max_2mil; const oai_max_temp = 2.0; const claude_max_temp = 1.0; const openrouter_website_model = 'OR_Website'; @@ -4191,7 +4191,7 @@ async function onModelChange() { else if (['command-light-nightly', 'command-nightly'].includes(oai_settings.cohere_model)) { $('#openai_max_context').attr('max', max_8k); } - else if (['command-r', 'command-r-plus'].includes(oai_settings.cohere_model)) { + else if (oai_settings.cohere_model.includes('command-r')) { $('#openai_max_context').attr('max', max_128k); } else if (['c4ai-aya-23'].includes(oai_settings.cohere_model)) { diff --git a/public/scripts/textgen-settings.js b/public/scripts/textgen-settings.js index 6b0b41fb4..161182381 100644 --- a/public/scripts/textgen-settings.js +++ b/public/scripts/textgen-settings.js @@ -188,6 +188,8 @@ const settings = { custom_model: '', bypass_status_check: false, openrouter_allow_fallbacks: true, + xtc_threshold: 0.1, + xtc_probability: 0, }; export let textgenerationwebui_banned_in_macros = []; @@ -263,6 +265,8 @@ export const setting_names = [ 'custom_model', 'bypass_status_check', 'openrouter_allow_fallbacks', + 'xtc_threshold', + 'xtc_probability', ]; const DYNATEMP_BLOCK = document.getElementById('dynatemp_block_ooba'); @@ -718,6 +722,8 @@ jQuery(function () { 'dry_multiplier_textgenerationwebui': 0, 'dry_base_textgenerationwebui': 1.75, 'dry_penalty_last_n_textgenerationwebui': 0, + 'xtc_threshold_textgenerationwebui': 0.1, + 'xtc_probability_textgenerationwebui': 0, }; for (const [id, value] of Object.entries(inputs)) { @@ -1156,6 +1162,8 @@ export function getTextGenGenerationData(finalPrompt, maxTokens, isImpersonate, 'api_server': getTextGenServer(), 'legacy_api': settings.legacy_api && (settings.type === OOBA || settings.type === APHRODITE), 'sampler_order': settings.type === textgen_types.KOBOLDCPP ? settings.sampler_order : undefined, + 'xtc_threshold': settings.xtc_threshold, + 'xtc_probability': settings.xtc_probability, }; const nonAphroditeParams = { 'rep_pen': settings.rep_pen, diff --git a/public/scripts/world-info.js b/public/scripts/world-info.js index 29cb62a1a..8fc8a19a2 100644 --- a/public/scripts/world-info.js +++ b/public/scripts/world-info.js @@ -4777,8 +4777,10 @@ jQuery(() => { world_info_min_activations = Number($(this).val()); $('#world_info_min_activations_counter').val(world_info_min_activations); - if (world_info_min_activations !== 0) { + if (world_info_min_activations !== 0 && world_info_max_recursion_steps !== 0) { $('#world_info_max_recursion_steps').val(0).trigger('input'); + flashHighlight($('#world_info_max_recursion_steps').parent()); // flash the other control to show it has changed + console.info('[WI] Max recursion steps set to 0, as min activations is set to', world_info_min_activations); } else { saveSettings(); } @@ -4840,8 +4842,10 @@ jQuery(() => { $('#world_info_max_recursion_steps').on('input', function () { world_info_max_recursion_steps = Number($(this).val()); $('#world_info_max_recursion_steps_counter').val(world_info_max_recursion_steps); - if (world_info_max_recursion_steps !== 0) { + if (world_info_max_recursion_steps !== 0 && world_info_min_activations !== 0) { $('#world_info_min_activations').val(0).trigger('input'); + flashHighlight($('#world_info_min_activations').parent()); // flash the other control to show it has changed + console.info('[WI] Min activations set to 0, as max recursion steps is set to', world_info_max_recursion_steps); } else { saveSettings(); } diff --git a/src/endpoints/backends/chat-completions.js b/src/endpoints/backends/chat-completions.js index 58bbbb062..a7689fa1c 100644 --- a/src/endpoints/backends/chat-completions.js +++ b/src/endpoints/backends/chat-completions.js @@ -572,6 +572,11 @@ async function sendCohereRequest(request, response) { search_queries_only: false, }; + const canDoSafetyMode = String(request.body.model).endsWith('08-2024'); + if (canDoSafetyMode) { + requestBody.safety_mode = 'NONE'; + } + console.log('Cohere request:', requestBody); const config = { diff --git a/src/endpoints/novelai.js b/src/endpoints/novelai.js index abbfe3ef6..d21602cc2 100644 --- a/src/endpoints/novelai.js +++ b/src/endpoints/novelai.js @@ -6,6 +6,7 @@ const { readAllChunks, extractFileFromZipBuffer, forwardFetchResponse } = requir const { jsonParser } = require('../express-common'); const API_NOVELAI = 'https://api.novelai.net'; +const TEXT_NOVELAI = 'https://text.novelai.net'; const IMAGE_NOVELAI = 'https://image.novelai.net'; // Ban bracket generation, plus defaults @@ -155,7 +156,7 @@ router.post('/generate', jsonParser, async function (req, res) { 'repetition_penalty_slope': req.body.repetition_penalty_slope, 'repetition_penalty_frequency': req.body.repetition_penalty_frequency, 'repetition_penalty_presence': req.body.repetition_penalty_presence, - 'repetition_penalty_whitelist': isNewModel ? repPenaltyAllowList : null, + 'repetition_penalty_whitelist': isNewModel ? repPenaltyAllowList.flat() : null, 'top_a': req.body.top_a, 'top_p': req.body.top_p, 'top_k': req.body.top_k, @@ -178,9 +179,7 @@ router.post('/generate', jsonParser, async function (req, res) { }; // Tells the model to stop generation at '>' - if ('theme_textadventure' === req.body.prefix && - (true === req.body.model.includes('clio') || - true === req.body.model.includes('kayra'))) { + if ('theme_textadventure' === req.body.prefix && isNewModel) { data.parameters.eos_token_id = 49405; } @@ -193,7 +192,8 @@ router.post('/generate', jsonParser, async function (req, res) { }; try { - const url = req.body.streaming ? `${API_NOVELAI}/ai/generate-stream` : `${API_NOVELAI}/ai/generate`; + const baseURL = req.body.model.includes('kayra') ? TEXT_NOVELAI : API_NOVELAI; + const url = req.body.streaming ? `${baseURL}/ai/generate-stream` : `${baseURL}/ai/generate`; const response = await fetch(url, { method: 'POST', timeout: 0, ...args }); if (req.body.streaming) {