Merge branch 'neo-server' into parser-v2

This commit is contained in:
LenAnderson
2024-04-18 16:08:19 -04:00
57 changed files with 2909 additions and 492 deletions

View File

@ -0,0 +1,9 @@
<div id="attachFile" class="list-group-item flex-container flexGap5" title="Attach a file or image to a current chat.">
<div class="fa-fw fa-solid fa-paperclip extensionsMenuExtensionButton"></div>
<span data-i18n="Attach a File">Attach a File</span>
</div>
<div id="manageAttachments" class="list-group-item flex-container flexGap5" title="View global, character, or data files.">
<div class="fa-fw fa-solid fa-book-open-reader extensionsMenuExtensionButton"></div>
<span data-i18n="Open Data Bank">Open Data Bank</span>
</div>

View File

@ -0,0 +1,51 @@
<div>
<div class="flex-container flexFlowColumn">
<label for="fandomScrapeInput" data-i18n="Enter a URL or the ID of a Fandom wiki page to scrape:">
Enter a URL or the ID of a Fandom wiki page to scrape:
</label>
<small>
<span data-i18n=Examples:">Examples:</span>
<code>https://harrypotter.fandom.com/</code>
<span data-i18n="or">or</span>
<code>harrypotter</code>
</small>
<input type="text" id="fandomScrapeInput" name="fandomScrapeInput" class="text_pole" placeholder="">
</div>
<div class="flex-container flexFlowColumn">
<label for="fandomScrapeFilter">
Optional regex to pick the content by its title:
</label>
<small>
<span data-i18n="Example:">Example:</span>
<code>/(Azkaban|Weasley)/gi</code>
</small>
<input type="text" id="fandomScrapeFilter" name="fandomScrapeFilter" class="text_pole" placeholder="">
</div>
<div class="flex-container flexFlowColumn">
<label>
Output format:
</label>
<label class="checkbox_label justifyLeft" for="fandomScrapeOutputSingle">
<input id="fandomScrapeOutputSingle" type="radio" name="fandomScrapeOutput" value="single" checked>
<div class="flex-container flexFlowColumn flexNoGap">
<span data-i18n="Single file">
Single file
</span>
<small data-i18n="All articles will be concatenated into a single file.">
All articles will be concatenated into a single file.
</small>
</div>
</label>
<label class="checkbox_label justifyLeft" for="fandomScrapeOutputMulti">
<input id="fandomScrapeOutputMulti" type="radio" name="fandomScrapeOutput" value="multi">
<div class="flex-container flexFlowColumn flexNoGap">
<span data-i18n="File per article">
File per article
</span>
<small data-i18n="Each article will be saved as a separate file.">
Not recommended. Each article will be saved as a separate file.
</small>
</div>
</label>
</div>
</div>

View File

@ -0,0 +1,6 @@
import { renderExtensionTemplateAsync } from '../../extensions.js';
jQuery(async () => {
const buttons = await renderExtensionTemplateAsync('attachments', 'buttons', {});
$('#extensionsMenu').prepend(buttons);
});

View File

@ -0,0 +1,117 @@
<div class="wide100p padding5">
<h2 class="marginBot5">
<span data-i18n="Data Bank">
Data Bank
</span>
</h2>
<div data-i18n="These files will be available for extensions that support attachments (e.g. Vector Storage).">
These files will be available for extensions that support attachments (e.g. Vector Storage).
</div>
<div data-i18n="Supported file types: Plain Text, PDF, Markdown, HTML." class="marginTopBot5">
Supported file types: Plain Text, PDF, Markdown, HTML.
</div>
<div class="flex-container marginTopBot5">
<input type="search" id="attachmentSearch" class="attachmentSearch text_pole margin0 flex1" placeholder="Search...">
<select id="attachmentSort" class="attachmentSort text_pole margin0 flex1">
<option data-sort-field="created" data-sort-order="desc" data-i18n="Date (Newest First)">
Date (Newest First)
</option>
<option data-sort-field="created" data-sort-order="asc" data-i18n="Date (Oldest First)">
Date (Oldest First)
</option>
<option data-sort-field="name" data-sort-order="asc" data-i18n="Name (A-Z)">
Name (A-Z)
</option>
<option data-sort-field="name" data-sort-order="desc" data-i18n="Name (Z-A)">
Name (Z-A)
</option>
<option data-sort-field="size" data-sort-order="asc" data-i18n="Size (Smallest First)">
Size (Smallest First)
</option>
<option data-sort-field="size" data-sort-order="desc" data-i18n="Size (Largest First)">
Size (Largest First)
</option>
</select>
</div>
<div class="justifyLeft globalAttachmentsBlock marginBot10">
<h3 class="globalAttachmentsTitle margin0 title_restorable">
<span data-i18n="Global Attachments">
Global Attachments
</span>
<div class="openActionModalButton menu_button menu_button_icon">
<i class="fa-solid fa-plus"></i>
<span data-i18n="Add">Add</span>
</div>
</h3>
<small data-i18n="These files are available for all characters in all chats.">
These files are available for all characters in all chats.
</small>
<div class="globalAttachmentsList attachmentsList"></div>
<hr>
</div>
<div class="justifyLeft characterAttachmentsBlock marginBot10">
<h3 class="characterAttachmentsTitle margin0 title_restorable">
<span data-i18n="Character Attachments">
Character Attachments
</span>
<div class="openActionModalButton menu_button menu_button_icon">
<i class="fa-solid fa-plus"></i>
<span data-i18n="Add">Add</span>
</div>
</h3>
<div class="flex-container flexFlowColumn">
<strong><small class="characterAttachmentsName"></small></strong>
<small>
<span data-i18n="These files are available the current character in all chats they are in.">
These files are available the current character in all chats they are in.
</span>
<span>
<span data-i18n="Saved locally. Not exported.">
Saved locally. Not exported.
</span>
</span>
</small>
</div>
<div class="characterAttachmentsList attachmentsList"></div>
<hr>
</div>
<div class="justifyLeft chatAttachmentsBlock marginBot10">
<h3 class="chatAttachmentsTitle margin0 title_restorable">
<span data-i18n="Chat Attachments">
Chat Attachments
</span>
<div class="openActionModalButton menu_button menu_button_icon">
<i class="fa-solid fa-plus"></i>
<span data-i18n="Add">Add</span>
</div>
</h3>
<div class="flex-container flexFlowColumn">
<strong><small class="chatAttachmentsName"></small></strong>
<small data-i18n="These files are available to all characters in the current chat.">
These files are available to all characters in the current chat.
</small>
</div>
<div class="chatAttachmentsList attachmentsList"></div>
</div>
<div class="attachmentListItemTemplate template_element">
<div class="attachmentListItem flex-container alignItemsCenter flexGap10">
<div class="attachmentFileIcon fa-solid fa-file-alt"></div>
<div class="attachmentListItemName flex1"></div>
<small class="attachmentListItemCreated"></small>
<small class="attachmentListItemSize"></small>
<div class="viewAttachmentButton right_menu_button fa-solid fa-magnifying-glass" title="View attachment content"></div>
<div class="deleteAttachmentButton right_menu_button fa-solid fa-trash" title="Delete attachment"></div>
</div>
</div>
<div class="actionButtonTemplate">
<div class="actionButton list-group-item flex-container flexGap5" title="">
<i class="actionButtonIcon"></i>
<span class="actionButtonText"></span>
</div>
</div>
<div class="actionButtonsModal popper-modal options-content list-group"></div>
</div>

View File

@ -0,0 +1,11 @@
{
"display_name": "Chat Attachments",
"loading_order": 3,
"requires": [],
"optional": [],
"js": "index.js",
"css": "style.css",
"author": "Cohee1207",
"version": "1.0.0",
"homePage": "https://github.com/SillyTavern/SillyTavern"
}

View File

@ -0,0 +1,29 @@
.attachmentsList:empty {
width: 100%;
height: 100%;
}
.attachmentsList:empty::before {
display: flex;
align-items: center;
justify-content: center;
content: "No data";
font-weight: bolder;
width: 100%;
height: 100%;
opacity: 0.8;
min-height: 3rem;
}
.attachmentListItem {
padding: 10px;
}
.attachmentListItemSize {
min-width: 4em;
text-align: right;
}
.attachmentListItemCreated {
text-align: right;
}

View File

@ -0,0 +1,3 @@
<div data-i18n="Enter web URLs to scrape (one per line):">
Enter web URLs to scrape (one per line):
</div>

View File

@ -1,10 +1,11 @@
import { getBase64Async, saveBase64AsFile } from '../../utils.js';
import { getBase64Async, isTrueBoolean, saveBase64AsFile } from '../../utils.js';
import { getContext, getApiUrl, doExtrasFetch, extension_settings, modules } from '../../extensions.js';
import { callPopup, getRequestHeaders, saveSettingsDebounced, substituteParams } from '../../../script.js';
import { getMessageTimeStamp } from '../../RossAscends-mods.js';
import { SECRET_KEYS, secret_state } from '../../secrets.js';
import { getMultimodalCaption } from '../shared.js';
import { textgen_types, textgenerationwebui_settings } from '../../textgen-settings.js';
import { registerSlashCommand } from '../../slash-commands.js';
export { MODULE_NAME };
const MODULE_NAME = 'caption';
@ -124,9 +125,10 @@ async function sendCaptionedMessage(caption, image) {
* Generates a caption for an image using a selected source.
* @param {string} base64Img Base64 encoded image without the data:image/...;base64, prefix
* @param {string} fileData Base64 encoded image with the data:image/...;base64, prefix
* @param {string} externalPrompt Caption prompt
* @returns {Promise<{caption: string}>} Generated caption
*/
async function doCaptionRequest(base64Img, fileData) {
async function doCaptionRequest(base64Img, fileData, externalPrompt) {
switch (extension_settings.caption.source) {
case 'local':
return await captionLocal(base64Img);
@ -135,7 +137,7 @@ async function doCaptionRequest(base64Img, fileData) {
case 'horde':
return await captionHorde(base64Img);
case 'multimodal':
return await captionMultimodal(fileData);
return await captionMultimodal(fileData, externalPrompt);
default:
throw new Error('Unknown caption source.');
}
@ -214,12 +216,13 @@ async function captionHorde(base64Img) {
/**
* Generates a caption for an image using a multimodal model.
* @param {string} base64Img Base64 encoded image with the data:image/...;base64, prefix
* @param {string} externalPrompt Caption prompt
* @returns {Promise<{caption: string}>} Generated caption
*/
async function captionMultimodal(base64Img) {
let prompt = extension_settings.caption.prompt || PROMPT_DEFAULT;
async function captionMultimodal(base64Img, externalPrompt) {
let prompt = externalPrompt || extension_settings.caption.prompt || PROMPT_DEFAULT;
if (extension_settings.caption.prompt_ask) {
if (!externalPrompt && extension_settings.caption.prompt_ask) {
const customPrompt = await callPopup('<h3>Enter a comment or question:</h3>', 'input', prompt, { rows: 2 });
if (!customPrompt) {
throw new Error('User aborted the caption sending.');
@ -231,29 +234,46 @@ async function captionMultimodal(base64Img) {
return { caption };
}
async function onSelectImage(e) {
setSpinnerIcon();
/**
* Handles the image selection event.
* @param {Event} e Input event
* @param {string} prompt Caption prompt
* @param {boolean} quiet Suppresses sending a message
* @returns {Promise<string>} Generated caption
*/
async function onSelectImage(e, prompt, quiet) {
if (!(e.target instanceof HTMLInputElement)) {
return '';
}
const file = e.target.files[0];
const form = e.target.form;
if (!file || !(file instanceof File)) {
return;
form && form.reset();
return '';
}
try {
setSpinnerIcon();
const context = getContext();
const fileData = await getBase64Async(file);
const base64Format = fileData.split(',')[0].split(';')[0].split('/')[1];
const base64Data = fileData.split(',')[1];
const { caption } = await doCaptionRequest(base64Data, fileData);
const imagePath = await saveBase64AsFile(base64Data, context.name2, '', base64Format);
await sendCaptionedMessage(caption, imagePath);
const { caption } = await doCaptionRequest(base64Data, fileData, prompt);
if (!quiet) {
const imagePath = await saveBase64AsFile(base64Data, context.name2, '', base64Format);
await sendCaptionedMessage(caption, imagePath);
}
return caption;
}
catch (error) {
toastr.error('Failed to caption image.');
console.log(error);
return '';
}
finally {
e.target.form.reset();
form && form.reset();
setImageIcon();
}
}
@ -263,6 +283,26 @@ function onRefineModeInput() {
saveSettingsDebounced();
}
/**
* Callback for the /caption command.
* @param {object} args Named parameters
* @param {string} prompt Caption prompt
*/
function captionCommandCallback(args, prompt) {
return new Promise(resolve => {
const quiet = isTrueBoolean(args?.quiet);
const input = document.createElement('input');
input.type = 'file';
input.accept = 'image/*';
input.onchange = async (e) => {
const caption = await onSelectImage(e, prompt, quiet);
resolve(caption);
};
input.oncancel = () => resolve('');
input.click();
});
}
jQuery(function () {
function addSendPictureButton() {
const sendButton = $(`
@ -270,14 +310,8 @@ jQuery(function () {
<div class="fa-solid fa-image extensionsMenuExtensionButton"></div>
Generate Caption
</div>`);
const attachFileButton = $(`
<div id="attachFile" class="list-group-item flex-container flexGap5">
<div class="fa-solid fa-paperclip extensionsMenuExtensionButton"></div>
Attach a File
</div>`);
$('#extensionsMenu').prepend(sendButton);
$('#extensionsMenu').prepend(attachFileButton);
$(sendButton).on('click', () => {
const hasCaptionModule =
(modules.includes('caption') && extension_settings.caption.source === 'extras') ||
@ -308,7 +342,7 @@ jQuery(function () {
$(imgForm).append(inputHtml);
$(imgForm).hide();
$('#form_sheld').append(imgForm);
$('#img_file').on('change', onSelectImage);
$('#img_file').on('change', (e) => onSelectImage(e.originalEvent, '', false));
}
function switchMultimodalBlocks() {
const isMultimodal = extension_settings.caption.source === 'multimodal';
@ -457,4 +491,6 @@ jQuery(function () {
extension_settings.caption.prompt_ask = $('#caption_prompt_ask').prop('checked');
saveSettingsDebounced();
});
registerSlashCommand('caption', captionCommandCallback, [], '<span class="monospace">quiet=true/false [prompt]</span> - caption an image with an optional prompt and passes the caption down the pipe. Only multimodal sources support custom prompts. Set the "quiet" argument to true to suppress sending a captioned message, default: false.', true, true);
});

View File

@ -1,17 +1,19 @@
import { callPopup, eventSource, event_types, getRequestHeaders, saveSettingsDebounced } from '../../../script.js';
import { callPopup, eventSource, event_types, generateQuietPrompt, getRequestHeaders, saveSettingsDebounced, substituteParams } from '../../../script.js';
import { dragElement, isMobile } from '../../RossAscends-mods.js';
import { getContext, getApiUrl, modules, extension_settings, ModuleWorkerWrapper, doExtrasFetch, renderExtensionTemplateAsync } from '../../extensions.js';
import { loadMovingUIState, power_user } from '../../power-user.js';
import { registerSlashCommand } from '../../slash-commands.js';
import { onlyUnique, debounce, getCharaFilename, trimToEndSentence, trimToStartSentence } from '../../utils.js';
import { hideMutedSprites } from '../../group-chats.js';
import { isJsonSchemaSupported } from '../../textgen-settings.js';
export { MODULE_NAME };
const MODULE_NAME = 'expressions';
const UPDATE_INTERVAL = 2000;
const STREAMING_UPDATE_INTERVAL = 6000;
const STREAMING_UPDATE_INTERVAL = 10000;
const TALKINGCHECK_UPDATE_INTERVAL = 500;
const DEFAULT_FALLBACK_EXPRESSION = 'joy';
const DEFAULT_LLM_PROMPT = 'Pause your roleplay. Classify the emotion of the last message. Output just one word, e.g. "joy" or "anger". Choose only one of the following labels: {{labels}}';
const DEFAULT_EXPRESSIONS = [
'talkinghead',
'admiration',
@ -43,6 +45,11 @@ const DEFAULT_EXPRESSIONS = [
'surprise',
'neutral',
];
const EXPRESSION_API = {
local: 0,
extras: 1,
llm: 2,
};
let expressionsList = null;
let lastCharacter = undefined;
@ -55,7 +62,7 @@ let lastServerResponseTime = 0;
export let lastExpression = {};
function isTalkingHeadEnabled() {
return extension_settings.expressions.talkinghead && !extension_settings.expressions.local;
return extension_settings.expressions.talkinghead && extension_settings.expressions.api == EXPRESSION_API.extras;
}
/**
@ -500,6 +507,10 @@ async function loadTalkingHead() {
},
body: JSON.stringify(emotionsSettings),
});
if (!apiResult.ok) {
throw new Error(apiResult.statusText);
}
}
catch (error) {
// it's ok if not supported
@ -532,6 +543,10 @@ async function loadTalkingHead() {
},
body: JSON.stringify(animatorSettings),
});
if (!apiResult.ok) {
throw new Error(apiResult.statusText);
}
}
catch (error) {
// it's ok if not supported
@ -585,10 +600,10 @@ function handleImageChange() {
async function moduleWorker() {
const context = getContext();
// Hide and disable Talkinghead while in local mode
$('#image_type_block').toggle(!extension_settings.expressions.local);
// Hide and disable Talkinghead while not in extras
$('#image_type_block').toggle(extension_settings.expressions.api == EXPRESSION_API.extras);
if (extension_settings.expressions.local && extension_settings.expressions.talkinghead) {
if (extension_settings.expressions.api != EXPRESSION_API.extras && extension_settings.expressions.talkinghead) {
$('#image_type_toggle').prop('checked', false);
setTalkingHeadState(false);
}
@ -628,7 +643,7 @@ async function moduleWorker() {
}
const offlineMode = $('.expression_settings .offline_mode');
if (!modules.includes('classify') && !extension_settings.expressions.local) {
if (!modules.includes('classify') && extension_settings.expressions.api == EXPRESSION_API.extras) {
$('#open_chat_expressions').show();
$('#no_chat_expressions').hide();
offlineMode.css('display', 'block');
@ -821,7 +836,7 @@ function setTalkingHeadState(newState) {
extension_settings.expressions.talkinghead = newState; // Store setting
saveSettingsDebounced();
if (extension_settings.expressions.local) {
if (extension_settings.expressions.api == EXPRESSION_API.local || extension_settings.expressions.api == EXPRESSION_API.llm) {
return;
}
@ -900,7 +915,7 @@ async function classifyCommand(_, text) {
return '';
}
if (!modules.includes('classify') && !extension_settings.expressions.local) {
if (!modules.includes('classify') && extension_settings.expressions.api == EXPRESSION_API.extras) {
toastr.warning('Text classification is disabled or not available');
return '';
}
@ -971,9 +986,76 @@ function sampleClassifyText(text) {
return result.trim();
}
/**
* Gets the classification prompt for the LLM API.
* @param {string[]} labels A list of labels to search for.
* @returns {Promise<string>} Prompt for the LLM API.
*/
async function getLlmPrompt(labels) {
if (isJsonSchemaSupported()) {
return '';
}
const labelsString = labels.map(x => `"${x}"`).join(', ');
const prompt = substituteParams(String(extension_settings.expressions.llmPrompt))
.replace(/{{labels}}/gi, labelsString);
return prompt;
}
/**
* Parses the emotion response from the LLM API.
* @param {string} emotionResponse The response from the LLM API.
* @param {string[]} labels A list of labels to search for.
* @returns {string} The parsed emotion or the fallback expression.
*/
function parseLlmResponse(emotionResponse, labels) {
const fallbackExpression = getFallbackExpression();
try {
const parsedEmotion = JSON.parse(emotionResponse);
return parsedEmotion?.emotion ?? fallbackExpression;
} catch {
const fuse = new Fuse([emotionResponse]);
for (const label of labels) {
const result = fuse.search(label);
if (result.length > 0) {
return label;
}
}
}
throw new Error('Could not parse emotion response ' + emotionResponse);
}
function onTextGenSettingsReady(args) {
// Only call if inside an API call
if (inApiCall && extension_settings.expressions.api === EXPRESSION_API.llm && isJsonSchemaSupported()) {
const emotions = DEFAULT_EXPRESSIONS.filter((e) => e != 'talkinghead');
Object.assign(args, {
top_k: 1,
stop: [],
stopping_strings: [],
custom_token_bans: [],
json_schema: {
$schema: 'http://json-schema.org/draft-04/schema#',
type: 'object',
properties: {
emotion: {
type: 'string',
enum: emotions,
},
},
required: [
'emotion',
],
},
});
}
}
async function getExpressionLabel(text) {
// Return if text is undefined, saving a costly fetch request
if ((!modules.includes('classify') && !extension_settings.expressions.local) || !text) {
if ((!modules.includes('classify') && extension_settings.expressions.api == EXPRESSION_API.extras) || !text) {
return getFallbackExpression();
}
@ -984,39 +1066,50 @@ async function getExpressionLabel(text) {
text = sampleClassifyText(text);
try {
if (extension_settings.expressions.local) {
// Local transformers pipeline
const apiResult = await fetch('/api/extra/classify', {
method: 'POST',
headers: getRequestHeaders(),
body: JSON.stringify({ text: text }),
});
switch (extension_settings.expressions.api) {
// Local BERT pipeline
case EXPRESSION_API.local: {
const localResult = await fetch('/api/extra/classify', {
method: 'POST',
headers: getRequestHeaders(),
body: JSON.stringify({ text: text }),
});
if (apiResult.ok) {
const data = await apiResult.json();
return data.classification[0].label;
if (localResult.ok) {
const data = await localResult.json();
return data.classification[0].label;
}
} break;
// Using LLM
case EXPRESSION_API.llm: {
const expressionsList = await getExpressionsList();
const prompt = await getLlmPrompt(expressionsList);
const emotionResponse = await generateQuietPrompt(prompt, false, false);
return parseLlmResponse(emotionResponse, expressionsList);
}
} else {
// Extras
const url = new URL(getApiUrl());
url.pathname = '/api/classify';
default: {
const url = new URL(getApiUrl());
url.pathname = '/api/classify';
const apiResult = await doExtrasFetch(url, {
method: 'POST',
headers: {
'Content-Type': 'application/json',
'Bypass-Tunnel-Reminder': 'bypass',
},
body: JSON.stringify({ text: text }),
});
const extrasResult = await doExtrasFetch(url, {
method: 'POST',
headers: {
'Content-Type': 'application/json',
'Bypass-Tunnel-Reminder': 'bypass',
},
body: JSON.stringify({ text: text }),
});
if (apiResult.ok) {
const data = await apiResult.json();
return data.classification[0].label;
}
if (extrasResult.ok) {
const data = await extrasResult.json();
return data.classification[0].label;
}
} break;
}
} catch (error) {
console.log(error);
toastr.info('Could not classify expression. Check the console or your backend for more information.');
console.error(error);
return getFallbackExpression();
}
}
@ -1177,23 +1270,12 @@ async function getExpressionsList() {
*/
async function resolveExpressionsList() {
// get something for offline mode (default images)
if (!modules.includes('classify') && !extension_settings.expressions.local) {
if (!modules.includes('classify') && extension_settings.expressions.api == EXPRESSION_API.extras) {
return DEFAULT_EXPRESSIONS;
}
try {
if (extension_settings.expressions.local) {
const apiResult = await fetch('/api/extra/classify/labels', {
method: 'POST',
headers: getRequestHeaders(),
});
if (apiResult.ok) {
const data = await apiResult.json();
expressionsList = data.labels;
return expressionsList;
}
} else {
if (extension_settings.expressions.api == EXPRESSION_API.extras) {
const url = new URL(getApiUrl());
url.pathname = '/api/classify/labels';
@ -1204,6 +1286,17 @@ async function getExpressionsList() {
if (apiResult.ok) {
const data = await apiResult.json();
expressionsList = data.labels;
return expressionsList;
}
} else {
const apiResult = await fetch('/api/extra/classify/labels', {
method: 'POST',
headers: getRequestHeaders(),
});
if (apiResult.ok) {
const data = await apiResult.json();
expressionsList = data.labels;
return expressionsList;
@ -1444,6 +1537,16 @@ async function onClickExpressionRemoveCustom() {
moduleWorker();
}
function onExperesionApiChanged() {
const tempApi = this.value;
if (tempApi) {
extension_settings.expressions.api = Number(tempApi);
$('.expression_llm_prompt_block').toggle(extension_settings.expressions.api === EXPRESSION_API.llm);
moduleWorker();
saveSettingsDebounced();
}
}
function onExpressionFallbackChanged() {
const expression = this.value;
if (expression) {
@ -1556,6 +1659,7 @@ async function onClickExpressionOverrideButton() {
// Refresh sprites list. Assume the override path has been properly handled.
try {
inApiCall = true;
$('#visual-novel-wrapper').empty();
await validateImages(overridePath.length === 0 ? currentLastMessage.name : overridePath, true);
const expression = await getExpressionLabel(currentLastMessage.mes);
@ -1563,6 +1667,8 @@ async function onClickExpressionOverrideButton() {
forceUpdateVisualNovelMode();
} catch (error) {
console.debug(`Setting expression override for ${avatarFileName} failed with error: ${error}`);
} finally {
inApiCall = false;
}
}
@ -1699,6 +1805,27 @@ async function fetchImagesNoCache() {
return await Promise.allSettled(promises);
}
function migrateSettings() {
if (extension_settings.expressions.api === undefined) {
extension_settings.expressions.api = EXPRESSION_API.extras;
saveSettingsDebounced();
}
if (Object.keys(extension_settings.expressions).includes('local')) {
if (extension_settings.expressions.local) {
extension_settings.expressions.api = EXPRESSION_API.local;
}
delete extension_settings.expressions.local;
saveSettingsDebounced();
}
if (extension_settings.expressions.llmPrompt === undefined) {
extension_settings.expressions.llmPrompt = DEFAULT_LLM_PROMPT;
saveSettingsDebounced();
}
}
(async function () {
function addExpressionImage() {
const html = `
@ -1730,11 +1857,6 @@ async function fetchImagesNoCache() {
extension_settings.expressions.translate = !!$(this).prop('checked');
saveSettingsDebounced();
});
$('#expression_local').prop('checked', extension_settings.expressions.local).on('input', function () {
extension_settings.expressions.local = !!$(this).prop('checked');
moduleWorker();
saveSettingsDebounced();
});
$('#expression_override_cleanup_button').on('click', onClickExpressionOverrideRemoveAllButton);
$(document).on('dragstart', '.expression', (e) => {
e.preventDefault();
@ -1753,10 +1875,23 @@ async function fetchImagesNoCache() {
});
await renderAdditionalExpressionSettings();
$('#expression_api').val(extension_settings.expressions.api ?? EXPRESSION_API.extras);
$('.expression_llm_prompt_block').toggle(extension_settings.expressions.api === EXPRESSION_API.llm);
$('#expression_llm_prompt').val(extension_settings.expressions.llmPrompt ?? '');
$('#expression_llm_prompt').on('input', function () {
extension_settings.expressions.llmPrompt = $(this).val();
saveSettingsDebounced();
});
$('#expression_llm_prompt_restore').on('click', function () {
$('#expression_llm_prompt').val(DEFAULT_LLM_PROMPT);
extension_settings.expressions.llmPrompt = DEFAULT_LLM_PROMPT;
saveSettingsDebounced();
});
$('#expression_custom_add').on('click', onClickExpressionAddCustom);
$('#expression_custom_remove').on('click', onClickExpressionRemoveCustom);
$('#expression_fallback').on('change', onExpressionFallbackChanged);
$('#expression_api').on('change', onExperesionApiChanged);
}
// Pause Talkinghead to save resources when the ST tab is not visible or the window is minimized.
@ -1789,6 +1924,7 @@ async function fetchImagesNoCache() {
addExpressionImage();
addVisualNovelMode();
migrateSettings();
await addSettings();
const wrapper = new ModuleWorkerWrapper(moduleWorker);
const updateFunction = wrapper.update.bind(wrapper);
@ -1828,6 +1964,7 @@ async function fetchImagesNoCache() {
});
eventSource.on(event_types.MOVABLE_PANELS_RESET, updateVisualNovelModeDebounced);
eventSource.on(event_types.GROUP_UPDATED, updateVisualNovelModeDebounced);
eventSource.on(event_types.TEXT_COMPLETION_SETTINGS_READY, onTextGenSettingsReady);
registerSlashCommand('sprite', setSpriteSlashCommand, ['emote'], '<span class="monospace">(spriteId)</span> force sets the sprite for the current character', true, true);
registerSlashCommand('spriteoverride', setSpriteSetCommand, ['costume'], '<span class="monospace">(optional folder)</span> sets an override sprite folder for the current character. If the name starts with a slash or a backslash, selects a sub-folder in the character-named folder. Empty value to reset to default.', true, true);
registerSlashCommand('lastsprite', (_, value) => lastExpression[value.trim()] ?? '', [], '<span class="monospace">(charName)</span> Returns the last set sprite / expression for the named character.', true, true);

View File

@ -6,10 +6,6 @@
</div>
<div class="inline-drawer-content">
<label class="checkbox_label" for="expression_local" title="Use classification model without the Extras server.">
<input id="expression_local" type="checkbox" />
<span data-i18n="Local server classification">Local server classification</span>
</label>
<label class="checkbox_label" for="expression_translate" title="Use the selected API from Chat Translation extension settings.">
<input id="expression_translate" type="checkbox">
<span>Translate text to English before classification</span>
@ -22,6 +18,25 @@
<input id="image_type_toggle" type="checkbox">
<span>Image Type - talkinghead (extras)</span>
</label>
<div class="expression_api_block m-b-1 m-t-1">
<label for="expression_api">Classifier API</label>
<small>Select the API for classifying expressions.</small>
<select id="expression_api" class="flex1 margin0" data-i18n="Expression API" placeholder="Expression API">
<option value="0">Local</option>
<option value="1">Extras</option>
<option value="2">LLM</option>
</select>
</div>
<div class="expression_llm_prompt_block m-b-1 m-t-1">
<label for="expression_llm_prompt" class="title_restorable">
<span>LLM Prompt</span>
<div id="expression_llm_prompt_restore" title="Restore default value" class="right_menu_button">
<i class="fa-solid fa-clock-rotate-left fa-sm"></i>
</div>
</label>
<small>Will be used if the API doesn't support JSON schemas.</small>
<textarea id="expression_llm_prompt" type="text" class="text_pole textarea_compact" rows="2" placeholder="Use &lcub;&lcub;labels&rcub;&rcub; special macro."></textarea>
</div>
<div class="expression_fallback_block m-b-1 m-t-1">
<label for="expression_fallback">Default / Fallback Expression</label>
<small>Set the default and fallback expression being used when no matching expression is found.</small>

View File

@ -1,4 +1,4 @@
import { getStringHash, debounce, waitUntilCondition, extractAllWords, delay } from '../../utils.js';
import { getStringHash, debounce, waitUntilCondition, extractAllWords } from '../../utils.js';
import { getContext, getApiUrl, extension_settings, doExtrasFetch, modules, renderExtensionTemplateAsync } from '../../extensions.js';
import {
activateSendButtons,

View File

@ -23,7 +23,6 @@ export async function getMultimodalCaption(base64Img, prompt) {
// OpenRouter has a payload limit of ~2MB. Google is 4MB, but we love democracy.
// Ooba requires all images to be JPEGs. Koboldcpp just asked nicely.
const isGoogle = extension_settings.caption.multimodal_api === 'google';
const isClaude = extension_settings.caption.multimodal_api === 'anthropic';
const isOllama = extension_settings.caption.multimodal_api === 'ollama';
const isLlamaCpp = extension_settings.caption.multimodal_api === 'llamacpp';
const isCustom = extension_settings.caption.multimodal_api === 'custom';

View File

@ -433,8 +433,8 @@ class AllTalkTtsProvider {
updateLanguageDropdown() {
const languageSelect = document.getElementById('language_options');
if (languageSelect) {
// Ensure default language is set
this.settings.language = this.settings.language;
// Ensure default language is set (??? whatever that means)
// this.settings.language = this.settings.language;
languageSelect.innerHTML = '';
for (let language in this.languageLabels) {

View File

@ -1,12 +1,33 @@
import { eventSource, event_types, extension_prompt_types, getCurrentChatId, getRequestHeaders, is_send_press, saveSettingsDebounced, setExtensionPrompt, substituteParams } from '../../../script.js';
import { ModuleWorkerWrapper, extension_settings, getContext, modules, renderExtensionTemplateAsync } from '../../extensions.js';
import {
eventSource,
event_types,
extension_prompt_types,
extension_prompt_roles,
getCurrentChatId,
getRequestHeaders,
is_send_press,
saveSettingsDebounced,
setExtensionPrompt,
substituteParams,
generateRaw,
} from '../../../script.js';
import {
ModuleWorkerWrapper,
extension_settings,
getContext,
modules,
renderExtensionTemplateAsync,
doExtrasFetch, getApiUrl,
} from '../../extensions.js';
import { collapseNewlines } from '../../power-user.js';
import { SECRET_KEYS, secret_state, writeSecret } from '../../secrets.js';
import { getDataBankAttachments, getFileAttachment } from '../../chats.js';
import { debounce, getStringHash as calculateHash, waitUntilCondition, onlyUnique, splitRecursive } from '../../utils.js';
const MODULE_NAME = 'vectors';
export const EXTENSION_PROMPT_TAG = '3_vectors';
export const EXTENSION_PROMPT_TAG_DB = '4_vectors_data_bank';
const settings = {
// For both
@ -14,10 +35,14 @@ const settings = {
include_wi: false,
togetherai_model: 'togethercomputer/m2-bert-80M-32k-retrieval',
openai_model: 'text-embedding-ada-002',
summarize: false,
summarize_sent: false,
summary_source: 'main',
summary_prompt: 'Pause your roleplay. Summarize the most important parts of the message. Limit yourself to 250 words or less. Your response should include nothing but the summary.',
// For chats
enabled_chats: false,
template: 'Past events: {{text}}',
template: 'Past events:\n{{text}}',
depth: 2,
position: extension_prompt_types.IN_PROMPT,
protect: 5,
@ -30,6 +55,15 @@ const settings = {
size_threshold: 10,
chunk_size: 5000,
chunk_count: 2,
// For Data Bank
size_threshold_db: 5,
chunk_size_db: 2500,
chunk_count_db: 5,
file_template_db: 'Related information:\n{{text}}',
file_position_db: extension_prompt_types.IN_PROMPT,
file_depth_db: 4,
file_depth_role_db: extension_prompt_roles.SYSTEM,
};
const moduleWorker = new ModuleWorkerWrapper(synchronizeChat);
@ -113,6 +147,56 @@ function splitByChunks(items) {
return chunkedItems;
}
async function summarizeExtra(hashedMessages) {
for (const element of hashedMessages) {
try {
const url = new URL(getApiUrl());
url.pathname = '/api/summarize';
const apiResult = await doExtrasFetch(url, {
method: 'POST',
headers: {
'Content-Type': 'application/json',
'Bypass-Tunnel-Reminder': 'bypass',
},
body: JSON.stringify({
text: element.text,
params: {},
}),
});
if (apiResult.ok) {
const data = await apiResult.json();
element.text = data.summary;
}
}
catch (error) {
console.log(error);
}
}
return hashedMessages;
}
async function summarizeMain(hashedMessages) {
for (const element of hashedMessages) {
element.text = await generateRaw(element.text, '', false, false, settings.summary_prompt);
}
return hashedMessages;
}
async function summarize(hashedMessages, endpoint = 'main') {
switch (endpoint) {
case 'main':
return await summarizeMain(hashedMessages);
case 'extras':
return await summarizeExtra(hashedMessages);
default:
console.error('Unsupported endpoint', endpoint);
}
}
async function synchronizeChat(batchSize = 5) {
if (!settings.enabled_chats) {
return -1;
@ -135,14 +219,20 @@ async function synchronizeChat(batchSize = 5) {
return -1;
}
const hashedMessages = context.chat.filter(x => !x.is_system).map(x => ({ text: String(x.mes), hash: getStringHash(x.mes), index: context.chat.indexOf(x) }));
let hashedMessages = context.chat.filter(x => !x.is_system).map(x => ({ text: String(substituteParams(x.mes)), hash: getStringHash(substituteParams(x.mes)), index: context.chat.indexOf(x) }));
const hashesInCollection = await getSavedHashes(chatId);
if (settings.summarize) {
hashedMessages = await summarize(hashedMessages, settings.summary_source);
}
const newVectorItems = hashedMessages.filter(x => !hashesInCollection.includes(x.hash));
const deletedHashes = hashesInCollection.filter(x => !hashedMessages.some(y => y.hash === x));
if (newVectorItems.length > 0) {
const chunkedBatch = splitByChunks(newVectorItems.slice(0, batchSize));
console.log(`Vectors: Found ${newVectorItems.length} new items. Processing ${batchSize}...`);
await insertVectorItems(chatId, chunkedBatch);
}
@ -214,6 +304,34 @@ async function processFiles(chat) {
return;
}
const dataBank = getDataBankAttachments();
const dataBankCollectionIds = [];
for (const file of dataBank) {
const collectionId = `file_${getStringHash(file.url)}`;
const hashesInCollection = await getSavedHashes(collectionId);
dataBankCollectionIds.push(collectionId);
// File is already in the collection
if (hashesInCollection.length) {
continue;
}
// Download and process the file
file.text = await getFileAttachment(file.url);
console.log(`Vectors: Retrieved file ${file.name} from Data Bank`);
// Convert kilobytes to string length
const thresholdLength = settings.size_threshold_db * 1024;
// Use chunk size from settings if file is larger than threshold
const chunkSize = file.size > thresholdLength ? settings.chunk_size_db : -1;
await vectorizeFile(file.text, file.name, collectionId, chunkSize);
}
if (dataBankCollectionIds.length) {
const queryText = await getQueryText(chat);
await injectDataBankChunks(queryText, dataBankCollectionIds);
}
for (const message of chat) {
// Message has no file
if (!message?.extra?.file) {
@ -222,8 +340,7 @@ async function processFiles(chat) {
// Trim file inserted by the script
const fileText = String(message.mes)
.substring(0, message.extra.fileLength).trim()
.replace(/^```/, '').replace(/```$/, '').trim();
.substring(0, message.extra.fileLength).trim();
// Convert kilobytes to string length
const thresholdLength = settings.size_threshold * 1024;
@ -236,25 +353,55 @@ async function processFiles(chat) {
message.mes = message.mes.substring(message.extra.fileLength);
const fileName = message.extra.file.name;
const collectionId = `file_${getStringHash(fileName)}`;
const fileUrl = message.extra.file.url;
const collectionId = `file_${getStringHash(fileUrl)}`;
const hashesInCollection = await getSavedHashes(collectionId);
// File is already in the collection
if (!hashesInCollection.length) {
await vectorizeFile(fileText, fileName, collectionId);
await vectorizeFile(fileText, fileName, collectionId, settings.chunk_size);
}
const queryText = getQueryText(chat);
const queryText = await getQueryText(chat);
const fileChunks = await retrieveFileChunks(queryText, collectionId);
// Wrap it back in a code block
message.mes = `\`\`\`\n${fileChunks}\n\`\`\`\n\n${message.mes}`;
message.mes = `${fileChunks}\n\n${message.mes}`;
}
} catch (error) {
console.error('Vectors: Failed to retrieve files', error);
}
}
/**
* Inserts file chunks from the Data Bank into the prompt.
* @param {string} queryText Text to query
* @param {string[]} collectionIds File collection IDs
* @returns {Promise<void>}
*/
async function injectDataBankChunks(queryText, collectionIds) {
try {
const queryResults = await queryMultipleCollections(collectionIds, queryText, settings.chunk_count_db);
console.debug(`Vectors: Retrieved ${collectionIds.length} Data Bank collections`, queryResults);
let textResult = '';
for (const collectionId in queryResults) {
console.debug(`Vectors: Processing Data Bank collection ${collectionId}`, queryResults[collectionId]);
const metadata = queryResults[collectionId].metadata?.filter(x => x.text)?.sort((a, b) => a.index - b.index)?.map(x => x.text)?.filter(onlyUnique) || [];
textResult += metadata.join('\n') + '\n\n';
}
if (!textResult) {
console.debug('Vectors: No Data Bank chunks found');
return;
}
const insertedText = substituteParams(settings.file_template_db.replace(/{{text}}/i, textResult));
setExtensionPrompt(EXTENSION_PROMPT_TAG_DB, insertedText, settings.file_position_db, settings.file_depth_db, settings.include_wi, settings.file_depth_role_db);
} catch (error) {
console.error('Vectors: Failed to insert Data Bank chunks', error);
}
}
/**
* Retrieves file chunks from the vector index and inserts them into the chat.
* @param {string} queryText Text to query
@ -276,16 +423,18 @@ async function retrieveFileChunks(queryText, collectionId) {
* @param {string} fileText File text
* @param {string} fileName File name
* @param {string} collectionId File collection ID
* @param {number} chunkSize Chunk size
*/
async function vectorizeFile(fileText, fileName, collectionId) {
async function vectorizeFile(fileText, fileName, collectionId, chunkSize) {
try {
toastr.info('Vectorization may take some time, please wait...', `Ingesting file ${fileName}`);
const chunks = splitRecursive(fileText, settings.chunk_size);
const toast = toastr.info('Vectorization may take some time, please wait...', `Ingesting file ${fileName}`);
const chunks = splitRecursive(fileText, chunkSize);
console.debug(`Vectors: Split file ${fileName} into ${chunks.length} chunks`, chunks);
const items = chunks.map((chunk, index) => ({ hash: getStringHash(chunk), text: chunk, index: index }));
await insertVectorItems(collectionId, items);
toastr.clear(toast);
console.log(`Vectors: Inserted ${chunks.length} vector items for file ${fileName} into ${collectionId}`);
} catch (error) {
console.error('Vectors: Failed to vectorize file', error);
@ -299,7 +448,8 @@ async function vectorizeFile(fileText, fileName, collectionId) {
async function rearrangeChat(chat) {
try {
// Clear the extension prompt
setExtensionPrompt(EXTENSION_PROMPT_TAG, '', extension_prompt_types.IN_PROMPT, 0, settings.include_wi);
setExtensionPrompt(EXTENSION_PROMPT_TAG, '', settings.position, settings.depth, settings.include_wi);
setExtensionPrompt(EXTENSION_PROMPT_TAG_DB, '', settings.file_position_db, settings.file_depth_db, settings.include_wi, settings.file_depth_role_db);
if (settings.enabled_files) {
await processFiles(chat);
@ -321,7 +471,7 @@ async function rearrangeChat(chat) {
return;
}
const queryText = getQueryText(chat);
const queryText = await getQueryText(chat);
if (queryText.length === 0) {
console.debug('Vectors: No text to query');
@ -339,7 +489,7 @@ async function rearrangeChat(chat) {
if (retainMessages.includes(message) || !message.mes) {
continue;
}
const hash = getStringHash(message.mes);
const hash = getStringHash(substituteParams(message.mes));
if (queryHashes.includes(hash) && !insertedHashes.has(hash)) {
queriedMessages.push(message);
insertedHashes.add(hash);
@ -348,7 +498,7 @@ async function rearrangeChat(chat) {
// Rearrange queried messages to match query order
// Order is reversed because more relevant are at the lower indices
queriedMessages.sort((a, b) => queryHashes.indexOf(getStringHash(b.mes)) - queryHashes.indexOf(getStringHash(a.mes)));
queriedMessages.sort((a, b) => queryHashes.indexOf(getStringHash(substituteParams(b.mes))) - queryHashes.indexOf(getStringHash(substituteParams(a.mes))));
// Remove queried messages from the original chat array
for (const message of chat) {
@ -387,15 +537,21 @@ const onChatEvent = debounce(async () => await moduleWorker.update(), 500);
/**
* Gets the text to query from the chat
* @param {object[]} chat Chat messages
* @returns {string} Text to query
* @returns {Promise<string>} Text to query
*/
function getQueryText(chat) {
async function getQueryText(chat) {
let queryText = '';
let i = 0;
for (const message of chat.slice().reverse()) {
if (message.mes) {
queryText += message.mes + '\n';
let hashedMessages = chat.map(x => ({ text: String(substituteParams(x.mes)) }));
if (settings.summarize && settings.summarize_sent) {
hashedMessages = await summarize(hashedMessages, settings.summary_source);
}
for (const message of hashedMessages.slice().reverse()) {
if (message.text) {
queryText += message.text + '\n';
i++;
}
@ -565,6 +721,65 @@ async function queryCollection(collectionId, searchText, topK) {
return await response.json();
}
/**
* Queries multiple collections for a given text.
* @param {string[]} collectionIds - Collection IDs to query
* @param {string} searchText - Text to query
* @param {number} topK - Number of results to return
* @returns {Promise<Record<string, { hashes: number[], metadata: object[] }>>} - Results mapped to collection IDs
*/
async function queryMultipleCollections(collectionIds, searchText, topK) {
const headers = getVectorHeaders();
const response = await fetch('/api/vector/query-multi', {
method: 'POST',
headers: headers,
body: JSON.stringify({
collectionIds: collectionIds,
searchText: searchText,
topK: topK,
source: settings.source,
}),
});
if (!response.ok) {
throw new Error('Failed to query multiple collections');
}
return await response.json();
}
/**
* Purges the vector index for a file.
* @param {string} fileUrl File URL to purge
*/
async function purgeFileVectorIndex(fileUrl) {
try {
if (!settings.enabled_files) {
return;
}
console.log(`Vectors: Purging file vector index for ${fileUrl}`);
const collectionId = `file_${getStringHash(fileUrl)}`;
const response = await fetch('/api/vector/purge', {
method: 'POST',
headers: getRequestHeaders(),
body: JSON.stringify({
collectionId: collectionId,
}),
});
if (!response.ok) {
throw new Error(`Could not delete vector index for collection ${collectionId}`);
}
console.log(`Vectors: Purged vector index for collection ${collectionId}`);
} catch (error) {
console.error('Vectors: Failed to purge file', error);
}
}
/**
* Purges the vector index for a collection.
* @param {string} collectionId Collection ID to purge
@ -636,7 +851,7 @@ async function onViewStatsClick() {
const chat = getContext().chat;
for (const message of chat) {
if (hashesInCollection.includes(getStringHash(message.mes))) {
if (hashesInCollection.includes(getStringHash(substituteParams(message.mes)))) {
const messageElement = $(`.mes[mesid="${chat.indexOf(message)}"]`);
messageElement.addClass('vectorized');
}
@ -757,12 +972,79 @@ jQuery(async () => {
saveSettingsDebounced();
});
$('#vectors_summarize').prop('checked', settings.summarize).on('input', () => {
settings.summarize = !!$('#vectors_summarize').prop('checked');
Object.assign(extension_settings.vectors, settings);
saveSettingsDebounced();
});
$('#vectors_summarize_user').prop('checked', settings.summarize_sent).on('input', () => {
settings.summarize_sent = !!$('#vectors_summarize_user').prop('checked');
Object.assign(extension_settings.vectors, settings);
saveSettingsDebounced();
});
$('#vectors_summary_source').val(settings.summary_source).on('change', () => {
settings.summary_source = String($('#vectors_summary_source').val());
Object.assign(extension_settings.vectors, settings);
saveSettingsDebounced();
});
$('#vectors_summary_prompt').val(settings.summary_prompt).on('input', () => {
settings.summary_prompt = String($('#vectors_summary_prompt').val());
Object.assign(extension_settings.vectors, settings);
saveSettingsDebounced();
});
$('#vectors_message_chunk_size').val(settings.message_chunk_size).on('input', () => {
settings.message_chunk_size = Number($('#vectors_message_chunk_size').val());
Object.assign(extension_settings.vectors, settings);
saveSettingsDebounced();
});
$('#vectors_size_threshold_db').val(settings.size_threshold_db).on('input', () => {
settings.size_threshold_db = Number($('#vectors_size_threshold_db').val());
Object.assign(extension_settings.vectors, settings);
saveSettingsDebounced();
});
$('#vectors_chunk_size_db').val(settings.chunk_size_db).on('input', () => {
settings.chunk_size_db = Number($('#vectors_chunk_size_db').val());
Object.assign(extension_settings.vectors, settings);
saveSettingsDebounced();
});
$('#vectors_chunk_count_db').val(settings.chunk_count_db).on('input', () => {
settings.chunk_count_db = Number($('#vectors_chunk_count_db').val());
Object.assign(extension_settings.vectors, settings);
saveSettingsDebounced();
});
$('#vectors_file_template_db').val(settings.file_template_db).on('input', () => {
settings.file_template_db = String($('#vectors_file_template_db').val());
Object.assign(extension_settings.vectors, settings);
saveSettingsDebounced();
});
$(`input[name="vectors_file_position_db"][value="${settings.file_position_db}"]`).prop('checked', true);
$('input[name="vectors_file_position_db"]').on('change', () => {
settings.file_position_db = Number($('input[name="vectors_file_position_db"]:checked').val());
Object.assign(extension_settings.vectors, settings);
saveSettingsDebounced();
});
$('#vectors_file_depth_db').val(settings.file_depth_db).on('input', () => {
settings.file_depth_db = Number($('#vectors_file_depth_db').val());
Object.assign(extension_settings.vectors, settings);
saveSettingsDebounced();
});
$('#vectors_file_depth_role_db').val(settings.file_depth_role_db).on('input', () => {
settings.file_depth_role_db = Number($('#vectors_file_depth_role_db').val());
Object.assign(extension_settings.vectors, settings);
saveSettingsDebounced();
});
const validSecret = !!secret_state[SECRET_KEYS.NOMICAI];
const placeholder = validSecret ? '✔️ Key saved' : '❌ Missing key';
$('#api_key_nomicai').attr('placeholder', placeholder);
@ -775,4 +1057,5 @@ jQuery(async () => {
eventSource.on(event_types.MESSAGE_SWIPED, onChatEvent);
eventSource.on(event_types.CHAT_DELETED, purgeVectorIndex);
eventSource.on(event_types.GROUP_CHAT_DELETED, purgeVectorIndex);
eventSource.on(event_types.FILE_ATTACHMENT_DELETED, purgeFileVectorIndex);
});

View File

@ -73,10 +73,12 @@
<input type="number" id="vectors_query" class="text_pole widthUnset" min="1" max="99" />
</div>
<label class="checkbox_label" for="vectors_include_wi" title="Query results can activate World Info entries.">
<input id="vectors_include_wi" type="checkbox" class="checkbox">
Include in World Info Scanning
</label>
<div class="flex-container">
<label class="checkbox_label expander" for="vectors_include_wi" title="Query results can activate World Info entries.">
<input id="vectors_include_wi" type="checkbox" class="checkbox">
Include in World Info Scanning
</label>
</div>
<hr>
@ -89,8 +91,10 @@
Enabled for files
</label>
<div id="vectors_files_settings">
<div id="vectors_files_settings" class="marginTopBot5">
<div class="flex justifyCenter" title="These settings apply to files attached directly to messages.">
<span>Message attachments</span>
</div>
<div class="flex-container">
<div class="flex1" title="Only files past this size will be vectorized.">
<label for="vectors_size_threshold">
@ -111,6 +115,58 @@
<input id="vectors_chunk_count" type="number" class="text_pole widthUnset" min="1" max="99999" />
</div>
</div>
<div class="flex justifyCenter" title="These settings apply to files stored in the Data Bank.">
<span>Data Bank files</span>
</div>
<div class="flex-container">
<div class="flex1" title="Only files past this size will be vectorized.">
<label for="vectors_size_threshold_db">
<small>Size threshold (KB)</small>
</label>
<input id="vectors_size_threshold_db" type="number" class="text_pole widthUnset" min="1" max="99999" />
</div>
<div class="flex1" title="Chunk size for file splitting.">
<label for="vectors_chunk_size_db">
<small>Chunk size (chars)</small>
</label>
<input id="vectors_chunk_size_db" type="number" class="text_pole widthUnset" min="1" max="99999" />
</div>
<div class="flex1" title="How many chunks to retrieve when querying.">
<label for="vectors_chunk_count_db">
<small>Retrieve chunks</small>
</label>
<input id="vectors_chunk_count_db" type="number" class="text_pole widthUnset" min="1" max="99999" />
</div>
</div>
<div class="flex-container flexFlowColumn">
<label for="vectors_file_template_db">
<span>Injection Template</span>
</label>
<textarea id="vectors_file_template_db" class="margin0 text_pole textarea_compact" rows="3" placeholder="Use &lcub;&lcub;text&rcub;&rcub; macro to specify the position of retrieved text."></textarea>
<label for="vectors_file_position_db">Injection Position</label>
<div class="radio_group">
<label>
<input type="radio" name="vectors_file_position_db" value="2" />
<span>Before Main Prompt / Story String</span>
</label>
<!--Keep these as 0 and 1 to interface with the setExtensionPrompt function-->
<label>
<input type="radio" name="vectors_file_position_db" value="0" />
<span>After Main Prompt / Story String</span>
</label>
<label for="vectors_file_depth_db" title="How many messages before the current end of the chat." data-i18n="[title]How many messages before the current end of the chat.">
<input type="radio" name="vectors_file_position_db" value="1" />
<span>In-chat @ Depth</span>
<input id="vectors_file_depth_db" class="text_pole widthUnset" type="number" min="0" max="999" />
<span>as</span>
<select id="vectors_file_depth_role_db" class="text_pole widthNatural">
<option value="0">System</option>
<option value="1">User</option>
<option value="2">Assistant</option>
</select>
</label>
</div>
</div>
</div>
<hr>
@ -123,12 +179,14 @@
Enabled for chat messages
</label>
<hr>
<div id="vectors_chats_settings">
<div id="vectors_advanced_settings">
<label for="vectors_template">
Insertion Template
Injection Template
</label>
<textarea id="vectors_template" class="text_pole textarea_compact" rows="3" placeholder="Use {{text}} macro to specify the position of retrieved text."></textarea>
<textarea id="vectors_template" class="text_pole textarea_compact" rows="3" placeholder="Use &lcub;&lcub;text&rcub;&rcub; macro to specify the position of retrieved text."></textarea>
<label for="vectors_position">Injection Position</label>
<div class="radio_group">
<label>
@ -165,6 +223,34 @@
<input type="number" id="vectors_insert" class="text_pole widthUnset" min="1" max="9999" />
</div>
</div>
<hr class="m-b-1">
<div class="flex-container flexFlowColumn">
<div class="flex-container alignitemscenter justifyCenter">
<i class="fa-solid fa-flask" title="Summarization for vectors is an experimental feature that may improve vectors or may worsen them. Use at your own discretion."></i>
<span>Vector Summarization</span>
</div>
<label class="checkbox_label expander" for="vectors_summarize" title="Summarize chat messages before generating embeddings.">
<input id="vectors_summarize" type="checkbox" class="checkbox">
Summarize chat messages for vector generation
</label>
<i class="failure">Warning: This will slow down vector generation drastically, as all messages have to be summarized first.</i>
<label class="checkbox_label expander" for="vectors_summarize_user" title="Summarize sent chat messages before generating embeddings.">
<input id="vectors_summarize_user" type="checkbox" class="checkbox">
Summarize chat messages when sending
</label>
<i class="failure">Warning: This might cause your sent messages to take a bit to process and slow down response time.</i>
<label for="vectors_summary_source">Summarize with:</label>
<select id="vectors_summary_source" class="text_pole">
<option value="main">Main API</option>
<option value="extras">Extras API</option>
</select>
<label for="vectors_summary_prompt">Summary Prompt:</label>
<small>Only used when Main API is selected.</small>
<textarea id="vectors_summary_prompt" class="text_pole textarea_compact" rows="6" placeholder="This prompt will be sent to AI to request the summary generation."></textarea>
</div>
</div>
<small>
Old messages are vectorized gradually as you chat.