Merge branch 'staging' of https://github.com/Cohee1207/SillyTavern into staging
This commit is contained in:
commit
862fc56b39
|
@ -39,6 +39,9 @@
|
|||
"#@markdown Allows to run Stable Diffusion pipeline on CPU (slow!)\n",
|
||||
"use_sd_cpu = False #@param {type:\"boolean\"}\n",
|
||||
"#@markdown ***\n",
|
||||
"#@markdown Enables the WebSearch module\n",
|
||||
"extras_enable_websearch = True #@param {type:\"boolean\"}\n",
|
||||
"#@markdown ***\n",
|
||||
"#@markdown Loads the image captioning module\n",
|
||||
"extras_enable_caption = True #@param {type:\"boolean\"}\n",
|
||||
"captioning_model = \"Salesforce/blip-image-captioning-large\" #@param [ \"Salesforce/blip-image-captioning-large\", \"Salesforce/blip-image-captioning-base\" ]\n",
|
||||
|
@ -124,6 +127,12 @@
|
|||
" params.append('--rvc-save-file')\n",
|
||||
"\n",
|
||||
"\n",
|
||||
"if extras_enable_websearch:\n",
|
||||
" print(\"Enabling WebSearch module\")\n",
|
||||
" modules.append('websearch')\n",
|
||||
" !apt update\n",
|
||||
" !apt install -y chromium-chromedriver\n",
|
||||
"\n",
|
||||
"params.append(f'--classification-model={classification_model}')\n",
|
||||
"params.append(f'--summarization-model={summarization_model}')\n",
|
||||
"params.append(f'--captioning-model={captioning_model}')\n",
|
||||
|
@ -142,7 +151,7 @@
|
|||
"\n",
|
||||
"if extras_enable_rvc:\n",
|
||||
" print(\"Installing RVC requirements\")\n",
|
||||
" !pip install -r requirements-rvc.txt\n",
|
||||
" %pip install -r requirements-rvc.txt\n",
|
||||
"\n",
|
||||
"# Generate a random API key\n",
|
||||
"api_key = secrets.token_hex(5)\n",
|
||||
|
|
|
@ -1,12 +1,12 @@
|
|||
{
|
||||
"name": "sillytavern",
|
||||
"version": "1.10.10",
|
||||
"version": "1.11.0",
|
||||
"lockfileVersion": 3,
|
||||
"requires": true,
|
||||
"packages": {
|
||||
"": {
|
||||
"name": "sillytavern",
|
||||
"version": "1.10.10",
|
||||
"version": "1.11.0",
|
||||
"hasInstallScript": true,
|
||||
"license": "AGPL-3.0",
|
||||
"dependencies": {
|
||||
|
|
|
@ -52,7 +52,7 @@
|
|||
"type": "git",
|
||||
"url": "https://github.com/SillyTavern/SillyTavern.git"
|
||||
},
|
||||
"version": "1.10.10",
|
||||
"version": "1.11.0",
|
||||
"scripts": {
|
||||
"start": "node server.js",
|
||||
"start-multi": "node server.js --disableCsrf",
|
||||
|
|
|
@ -737,6 +737,17 @@
|
|||
<div id="openai_proxy_password_show" title="Peek a password" class="menu_button fa-solid fa-eye-slash fa-fw"></div>
|
||||
</div>
|
||||
</div>
|
||||
<div data-newbie-hidden class="range-block" data-source="openai,openrouter">
|
||||
<div class="range-block-title justifyLeft" data-i18n="Seed">
|
||||
Seed
|
||||
</div>
|
||||
<div class="toggle-description justifyLeft" data-i18n="Set to get deterministic results. Use -1 for random seed.">
|
||||
Set to get deterministic results. Use -1 for random seed.
|
||||
</div>
|
||||
<div class="wide100p">
|
||||
<input type="number" id="seed_openai" name="seed_openai" class="text_pole" min="-1" max="2147483647" value="-1">
|
||||
</div>
|
||||
</div>
|
||||
<div data-newbie-hidden class="range-block" data-source="openai,claude">
|
||||
<div class="range-block-title justifyLeft">
|
||||
<label for="legacy_streaming" class="checkbox_label">
|
||||
|
@ -756,16 +767,16 @@
|
|||
<div id="kobold_api-settings">
|
||||
<div class="flex-container gap10h5v justifyCenter">
|
||||
<div class="alignitemscenter flex-container flexFlowColumn flexBasis48p flexGrow flexShrink gap0">
|
||||
<small data-i18n="temperature">
|
||||
Temperature
|
||||
<small>
|
||||
<span data-i18n="temperature"Temperature></span>
|
||||
<div class="fa-solid fa-circle-info opacity50p" title="Temperature controls the randomness in token selection: - low temperature (<1.0) leads to more predictable text, favoring higher probability tokens. - high temperature (>1.0) increases creativity and diversity in the output by giving lower probability tokens a better chance. Set to 1.0 for the original probabilities."></div>
|
||||
</small>
|
||||
<input class="neo-range-slider" type="range" id="temp" name="volume" min="0.0" max="4.0" step="0.01">
|
||||
<input class="neo-range-input" type="number" min="0.0" max="4.0" step="0.01" data-for="temp" id="temp_counter">
|
||||
</div>
|
||||
<div data-newbie-hidden class="alignitemscenter flex-container flexFlowColumn flexBasis48p flexGrow flexShrink gap0">
|
||||
<small data-i18n="Top K">
|
||||
Top K
|
||||
<small>
|
||||
<span data-i18n="Top K">Top K</span>
|
||||
<div class="fa-solid fa-circle-info opacity50p" title="Top K sets a maximum amount of top tokens that can be chosen from. E.g Top K is 20, this means only the 20 highest ranking tokens will be kept (regardless of their probabilities being diverse or limited). Set to 0 to disable."></div>
|
||||
</small>
|
||||
<input class="neo-range-slider" type="range" id="top_k" name="volume" min="0" max="100" step="1">
|
||||
|
@ -780,54 +791,54 @@
|
|||
<input class="neo-range-input" type="number" min="0" max="1" step="0.01" data-for="top_p" id="top_p_counter">
|
||||
</div>
|
||||
<div data-newbie-hidden class="alignitemscenter flex-container flexFlowColumn flexBasis48p flexGrow flexShrink gap0">
|
||||
<small data-i18n="Typical P">
|
||||
Typical P
|
||||
<small>
|
||||
<span data-i18n="Typical P">Typical P</span>
|
||||
<div class="fa-solid fa-circle-info opacity50p" title="Typical P Sampling prioritizes tokens based on their deviation from the average entropy of the set. It maintains tokens whose cumulative probability is close to a predefined threshold (e.g., 0.5), emphasizing those with average information content. Set to 1.0 to disable."></div>
|
||||
</small>
|
||||
<input class="neo-range-slider" type="range" id="typical_p" name="volume" min="0" max="1" step="0.001">
|
||||
<input class="neo-range-input" type="number" min="0" max="1" step="0.001" data-for="typical_p" id="typical_p_counter">
|
||||
</div>
|
||||
<div data-newbie-hidden class="alignitemscenter flex-container flexFlowColumn flexBasis48p flexGrow flexShrink gap0">
|
||||
<small data-i18n="Min P">
|
||||
Min P
|
||||
<div class="alignitemscenter flex-container flexFlowColumn flexBasis48p flexGrow flexShrink gap0">
|
||||
<small>
|
||||
<span data-i18n="Min P">Min P</span>
|
||||
<div class="fa-solid fa-circle-info opacity50p" title="Min P sets a base minimum probability. This is scaled according to the top token's probability. E.g If Top token is 80% probability, and Min P is 0.1, only tokens higher than 8% would be considered. Set to 0 to disable."></div>
|
||||
</small>
|
||||
<input class="neo-range-slider" type="range" id="min_p" name="volume" min="0" max="1" step="0.001">
|
||||
<input class="neo-range-input" type="number" min="0" max="1" step="0.001" data-for="min_p" id="min_p_counter">
|
||||
</div>
|
||||
<div data-newbie-hidden class="alignitemscenter flex-container flexFlowColumn flexBasis48p flexGrow flexShrink gap0">
|
||||
<small data-i18n="Top A">
|
||||
Top A
|
||||
<small>
|
||||
<span data-i18n="Top A">Top A</span>
|
||||
<div class="fa-solid fa-circle-info opacity50p" title="Top A sets a threshold for token selection based on the square of the highest token probability. E.g if the Top-A value is 0.2 and the top token's probability is 50%, tokens with probabilities below 5% (0.2 * 0.5^2) are excluded. Set to 0 to disable."></div>
|
||||
</small>
|
||||
<input class="neo-range-slider" type="range" id="top_a" name="volume" min="0" max="1" step="0.001">
|
||||
<input class="neo-range-input" type="number" min="0" max="1" step="0.001" data-for="top_a" id="top_a_counter">
|
||||
</div>
|
||||
<div data-newbie-hidden class="alignitemscenter flex-container flexFlowColumn flexBasis48p flexGrow flexShrink gap0">
|
||||
<small data-i18n="Tail Free Sampling">
|
||||
Tail Free Sampling
|
||||
<small>
|
||||
<span data-i18n="Tail Free Sampling">Tail Free Sampling</span>
|
||||
<div class="fa-solid fa-circle-info opacity50p" title="Tail-Free Sampling (TFS) analyzes the rate of change in token probabilities using derivatives. It retains tokens up to a threshold (e.g., 0.3) based on the normalized second derivative. Set to 1.0 to disable."></div>
|
||||
</small>
|
||||
<input class="neo-range-slider" type="range" id="tfs" name="volume" min="0" max="1" step="0.001">
|
||||
<input class="neo-range-input" type="number" min="0" max="1" step="0.001" data-for="tfs" id="tfs_counter">
|
||||
</div>
|
||||
<div class="alignitemscenter flex-container flexFlowColumn flexBasis48p flexGrow flexShrink gap0">
|
||||
<small data-i18n="rep.pen">
|
||||
Repetition Penalty
|
||||
<small>
|
||||
<span data-i18n="rep.pen">Repetition Penalty</span>
|
||||
</small>
|
||||
<input class="neo-range-slider" type="range" id="rep_pen" name="volume" min="1" max="1.5" step="0.01">
|
||||
<input class="neo-range-input" type="number" min="1" max="1.5" step="0.01" data-for="rep_pen" id="rep_pen_counter">
|
||||
</div>
|
||||
<div class="alignitemscenter flex-container flexFlowColumn flexBasis48p flexGrow flexShrink gap0">
|
||||
<small data-i18n="rep.pen range">
|
||||
Repetition Penalty Range
|
||||
<small>
|
||||
<span data-i18n="rep.pen range">Repetition Penalty Range</span>
|
||||
</small>
|
||||
<input class="neo-range-slider" type="range" id="rep_pen_range" name="volume" min="0" max="4096" step="1">
|
||||
<input class="neo-range-input" type="number" min="0" max="4096" step="1" data-for="rep_pen_range" id="rep_pen_range_counter">
|
||||
</div>
|
||||
<div data-newbie-hidden class="alignitemscenter flex-container flexFlowColumn flexBasis48p flexGrow flexShrink gap0">
|
||||
<small data-i18n="Rep. Pen. Slope">
|
||||
Repetition Penalty Slope
|
||||
<small>
|
||||
<span data-i18n="Rep. Pen. Slope">Repetition Penalty Slope</span>
|
||||
</small>
|
||||
<input class="neo-range-slider" type="range" id="rep_pen_slope" name="volume" min="0" max="10" step="0.01">
|
||||
<input class="neo-range-input" type="number" min="0" max="10" step="0.01" data-for="rep_pen_slope" id="rep_pen_slope_counter">
|
||||
|
@ -862,8 +873,8 @@
|
|||
<div data-newbie-hidden class="alignitemscenter justifyCenter flex-container flexFlowColumn flexBasis48p flexGrow flexShrink gap0">
|
||||
<label class="checkbox_label alignItemsBaseline" for="use_default_badwordsids">
|
||||
<input id="use_default_badwordsids" type="checkbox" />
|
||||
<span data-i18n="Ban EOS Token">
|
||||
Ban EOS Token
|
||||
<span>
|
||||
<span data-i18n="Ban EOS Token">Ban EOS Token</span>
|
||||
<small class="fa-solid fa-circle-info opacity50p" title="Ban the End-of-Sequence (EOS) token with KoboldCpp (and possibly also other tokens with KoboldAI). Good for story writing, but should not be used for chat and instruct mode."></small>
|
||||
</span>
|
||||
</label>
|
||||
|
@ -876,7 +887,7 @@
|
|||
</div>
|
||||
<div data-newbie-hidden id="grammar_block" class="wide100p">
|
||||
<hr class="wide100p">
|
||||
<h4 class="wide100p textAlignCenter" data-i18n="GBNF Grammar">GBNF Grammar
|
||||
<h4 class="wide100p textAlignCenter"><span data-i18n="GBNF Grammar">GBNF Grammar</span>
|
||||
<a href="https://github.com/ggerganov/llama.cpp/blob/master/grammars/README.md" target="_blank">
|
||||
<small>
|
||||
<div class="fa-solid fa-circle-question note-link-span"></div>
|
||||
|
@ -1143,17 +1154,26 @@
|
|||
<div id="textgenerationwebui_api-settings">
|
||||
<div class="flex-container gap10h5v justifyCenter">
|
||||
<div class="alignitemscenter flex-container flexFlowColumn flexBasis48p flexGrow flexShrink gap0">
|
||||
<small data-i18n="temperature">Temperature</small>
|
||||
<input class="neo-range-slider" type="range" id="temp_textgenerationwebui" name="volume" min="0.0" max="2.0" step="0.01" x-setting-id="temp">
|
||||
<input class="neo-range-input" type="number" min="0.0" max="2.0" step="0.01" data-for="temp_textgenerationwebui" id="temp_counter_textgenerationwebui">
|
||||
<small>
|
||||
<span data-i18n="temperature">Temperature</span>
|
||||
<div class="fa-solid fa-circle-info opacity50p" title="Temperature controls the randomness in token selection: - low temperature (<1.0) leads to more predictable text, favoring higher probability tokens. - high temperature (>1.0) increases creativity and diversity in the output by giving lower probability tokens a better chance. Set to 1.0 for the original probabilities."></div>
|
||||
</small>
|
||||
<input class="neo-range-slider" type="range" id="temp_textgenerationwebui" name="volume" min="0.0" max="4.0" step="0.01" x-setting-id="temp">
|
||||
<input class="neo-range-input" type="number" min="0.0" max="4.0" step="0.01" data-for="temp_textgenerationwebui" id="temp_counter_textgenerationwebui">
|
||||
</div>
|
||||
<div data-newbie-hidden class="alignitemscenter flex-container flexFlowColumn flexBasis48p flexGrow flexShrink gap0">
|
||||
<small data-i18n="Top K">Top K</small>
|
||||
<small>
|
||||
<span data-i18n="Top K">Top K</span>
|
||||
<div class="fa-solid fa-circle-info opacity50p" title="Top K sets a maximum amount of top tokens that can be chosen from. E.g Top K is 20, this means only the 20 highest ranking tokens will be kept (regardless of their probabilities being diverse or limited). Set to 0 to disable."></div>
|
||||
</small>
|
||||
<input class="neo-range-slider" type="range" id="top_k_textgenerationwebui" name="volume" min="0" max="200" step="1">
|
||||
<input class="neo-range-input" type="number" min="0" max="200" step="1" data-for="top_k_textgenerationwebui" id="top_k_counter_textgenerationwebui">
|
||||
</div>
|
||||
<div data-newbie-hidden class="alignitemscenter flex-container flexFlowColumn flexBasis48p flexGrow flexShrink gap0">
|
||||
<small data-i18n="Top P">Top P</small>
|
||||
<small>
|
||||
<span data-i18n="Top P">Top P</span>
|
||||
<div class="fa-solid fa-circle-info opacity50p" title="Top P adds up all the top tokens required to add up to the target percentage. E.g If the Top 2 tokens are both 25%, and Top P is 0.50, only the Top 2 tokens are considered. Set to 1.0 to disable."></div>
|
||||
</small>
|
||||
<input class="neo-range-slider" type="range" id="top_p_textgenerationwebui" name="volume" min="0" max="1" step="0.01">
|
||||
<input class="neo-range-input" type="number" min="0" max="1" step="0.01" data-for="top_p_textgenerationwebui" id="top_p_counter_textgenerationwebui">
|
||||
</div>
|
||||
|
@ -1162,8 +1182,11 @@
|
|||
<input class="neo-range-slider" type="range" id="typical_p_textgenerationwebui" name="volume" min="0" max="1" step="0.01">
|
||||
<input class="neo-range-input" type="number" min="0" max="1" step="0.01" data-for="typical_p_textgenerationwebui" id="typical_p_counter_textgenerationwebui">
|
||||
</div>
|
||||
<div data-newbie-hidden class="alignitemscenter flex-container flexFlowColumn flexBasis48p flexGrow flexShrink gap0">
|
||||
<small data-i18n="Min P">Min P</small>
|
||||
<div class="alignitemscenter flex-container flexFlowColumn flexBasis48p flexGrow flexShrink gap0">
|
||||
<small>
|
||||
<span data-i18n="Min P">Min P</span>
|
||||
<div class="fa-solid fa-circle-info opacity50p" title="Min P sets a base minimum probability. This is scaled according to the top token's probability. E.g If Top token is 80% probability, and Min P is 0.1, only tokens higher than 8% would be considered. Set to 0 to disable."></div>
|
||||
</small>
|
||||
<input class="neo-range-slider" type="range" id="min_p_textgenerationwebui" name="volume" min="0" max="1" step="0.01">
|
||||
<input class="neo-range-input" type="number" min="0" max="1" step="0.05" data-for="min_p_textgenerationwebui" id="min_p_counter_textgenerationwebui">
|
||||
</div>
|
||||
|
@ -1357,17 +1380,17 @@
|
|||
<textarea id="banned_tokens_textgenerationwebui" class="text_pole textarea_compact" name="banned_tokens_textgenerationwebui" rows="3" placeholder="Example: some text [42, 69, 1337]"></textarea>
|
||||
</div>
|
||||
</div>
|
||||
<div data-forAphro=False class="wide100p">
|
||||
<hr data-newbie-hidden class="width100p">
|
||||
<div data-newbie-hidden data-forAphro=False class="wide100p">
|
||||
<hr class="width100p">
|
||||
<h4 data-i18n="CFG" class="textAlignCenter">CFG
|
||||
<div class="margin5 fa-solid fa-circle-info opacity50p " title="Helpful tip coming soon."></div>
|
||||
</h4>
|
||||
<div data-newbie-hidden class="alignitemscenter flex-container flexFlowColumn flexShrink gap0">
|
||||
<div class="alignitemscenter flex-container flexFlowColumn flexShrink gap0">
|
||||
<small>Scale</small>
|
||||
<input class="neo-range-slider" type="range" id="guidance_scale_textgenerationwebui" name="volume" min="0.1" max="4" step="0.05">
|
||||
<input class="neo-range-input" type="number" min="0.1" max="4" step="0.05" data-for="guidance_scale_textgenerationwebui" id="guidance_scale_counter_textgenerationwebui">
|
||||
</div>
|
||||
<div data-newbie-hidden class="range-block">
|
||||
<div class="range-block">
|
||||
<div class="range-block-title justifyLeft">
|
||||
<span data-i18n="Negative Prompt">Negative Prompt</span>
|
||||
<small>
|
||||
|
@ -3975,7 +3998,7 @@
|
|||
<span class="flex-container alignitemscenter wide100p">
|
||||
<div class="WIEntryTitleAndStatus flex-container flex1 alignitemscenter">
|
||||
<div class="flex-container flex1">
|
||||
<textarea class="text_pole" name="comment" maxlength="5000" data-i18n="[placeholder]Entry Title/Memo" placeholder="Entry Title/Memo"></textarea>
|
||||
<textarea class="text_pole autoSetHeight" name="comment" maxlength="5000" data-i18n="[placeholder]Entry Title/Memo" placeholder="Entry Title/Memo"></textarea>
|
||||
</div>
|
||||
<!-- <span class="world_entry_form_position_value"></span> -->
|
||||
<select title="WI Entry Status: 🔵 Constant 🟢 Normal ❌ Disabled" name="entryStateSelector" class="widthNatural margin0">
|
||||
|
|
|
@ -22,7 +22,8 @@
|
|||
"droll",
|
||||
"handlebars",
|
||||
"highlight.js",
|
||||
"localforage"
|
||||
"localforage",
|
||||
"pdfjs-dist"
|
||||
]
|
||||
}
|
||||
}
|
||||
|
|
File diff suppressed because it is too large
Load Diff
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
|
@ -195,7 +195,7 @@ import { getBackgrounds, initBackgrounds } from "./scripts/backgrounds.js";
|
|||
import { hideLoader, showLoader } from "./scripts/loader.js";
|
||||
import { CharacterContextMenu, BulkEditOverlay } from "./scripts/BulkEditOverlay.js";
|
||||
import { loadMancerModels } from "./scripts/mancer-settings.js";
|
||||
import { hasPendingFileAttachment, populateFileAttachment } from "./scripts/chats.js";
|
||||
import { getFileAttachment, hasPendingFileAttachment, populateFileAttachment } from "./scripts/chats.js";
|
||||
import { replaceVariableMacros } from "./scripts/variables.js";
|
||||
|
||||
//exporting functions and vars for mods
|
||||
|
@ -3005,9 +3005,6 @@ async function Generate(type, { automatic_trigger, force_name2, resolve, reject,
|
|||
const blockHeading = main_api === 'openai' ? '<START>\n' : exampleSeparator;
|
||||
let mesExamplesArray = mesExamples.split(/<START>/gi).slice(1).map(block => `${blockHeading}${block.trim()}\n`);
|
||||
|
||||
if (power_user.strip_examples)
|
||||
mesExamplesArray = []
|
||||
|
||||
// First message in fresh 1-on-1 chat reacts to user/character settings changes
|
||||
if (chat.length) {
|
||||
chat[0].mes = substituteParams(chat[0].mes);
|
||||
|
@ -3019,22 +3016,28 @@ async function Generate(type, { automatic_trigger, force_name2, resolve, reject,
|
|||
coreChat.pop();
|
||||
}
|
||||
|
||||
coreChat = coreChat.map(chatItem => {
|
||||
coreChat = await Promise.all(coreChat.map(async (chatItem) => {
|
||||
let message = chatItem.mes;
|
||||
let regexType = chatItem.is_user ? regex_placement.USER_INPUT : regex_placement.AI_OUTPUT;
|
||||
let options = { isPrompt: true };
|
||||
|
||||
let regexedMessage = getRegexedString(message, regexType, options);
|
||||
|
||||
if (chatItem.extra?.file?.text) {
|
||||
regexedMessage += `\n\n${chatItem.extra.file.text}`;
|
||||
if (chatItem.extra?.file) {
|
||||
const fileText = chatItem.extra.file.text || (await getFileAttachment(chatItem.extra.file.url));
|
||||
|
||||
if (fileText) {
|
||||
const fileWrapped = `\`\`\`\n${fileText}\n\`\`\`\n\n`;
|
||||
chatItem.extra.fileLength = fileWrapped.length;
|
||||
regexedMessage = fileWrapped + regexedMessage;
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
...chatItem,
|
||||
mes: regexedMessage,
|
||||
};
|
||||
});
|
||||
}));
|
||||
|
||||
// Determine token limit
|
||||
let this_max_context = getMaxContextSize();
|
||||
|
@ -3153,6 +3156,11 @@ async function Generate(type, { automatic_trigger, force_name2, resolve, reject,
|
|||
|
||||
const storyString = renderStoryString(storyStringParams);
|
||||
|
||||
// Story string rendered, safe to remove
|
||||
if (power_user.strip_examples) {
|
||||
mesExamplesArray = [];
|
||||
}
|
||||
|
||||
let oaiMessages = [];
|
||||
let oaiMessageExamples = [];
|
||||
|
||||
|
@ -9057,6 +9065,7 @@ jQuery(async function () {
|
|||
hideStopButton();
|
||||
}
|
||||
eventSource.emit(event_types.GENERATION_STOPPED);
|
||||
activateSendButtons();
|
||||
});
|
||||
|
||||
$('.drawer-toggle').on('click', function () {
|
||||
|
|
|
@ -8,14 +8,33 @@ import {
|
|||
eventSource,
|
||||
event_types,
|
||||
getCurrentChatId,
|
||||
getRequestHeaders,
|
||||
hideSwipeButtons,
|
||||
name2,
|
||||
saveChatDebounced,
|
||||
showSwipeButtons,
|
||||
} from "../script.js";
|
||||
import { getBase64Async, humanFileSize, saveBase64AsFile } from "./utils.js";
|
||||
import {
|
||||
extractTextFromHTML,
|
||||
extractTextFromMarkdown,
|
||||
extractTextFromPDF,
|
||||
getBase64Async,
|
||||
getStringHash,
|
||||
humanFileSize,
|
||||
saveBase64AsFile,
|
||||
} from "./utils.js";
|
||||
|
||||
const fileSizeLimit = 1024 * 1024 * 1; // 1 MB
|
||||
const fileSizeLimit = 1024 * 1024 * 10; // 10 MB
|
||||
|
||||
const converters = {
|
||||
'application/pdf': extractTextFromPDF,
|
||||
'text/html': extractTextFromHTML,
|
||||
'text/markdown': extractTextFromMarkdown,
|
||||
}
|
||||
|
||||
function isConvertible(type) {
|
||||
return Object.keys(converters).includes(type);
|
||||
}
|
||||
|
||||
/**
|
||||
* Mark message as hidden (system message).
|
||||
|
@ -70,7 +89,7 @@ export async function unhideChatMessage(messageId, messageBlock) {
|
|||
/**
|
||||
* Adds a file attachment to the message.
|
||||
* @param {object} message Message object
|
||||
* @returns {Promise<void>}
|
||||
* @returns {Promise<void>} A promise that resolves when file is uploaded.
|
||||
*/
|
||||
export async function populateFileAttachment(message, inputId = 'file_form_input') {
|
||||
try {
|
||||
|
@ -81,18 +100,38 @@ export async function populateFileAttachment(message, inputId = 'file_form_input
|
|||
const file = fileInput.files[0];
|
||||
if (!file) return;
|
||||
|
||||
const fileBase64 = await getBase64Async(file);
|
||||
let base64Data = fileBase64.split(',')[1];
|
||||
|
||||
// If file is image
|
||||
if (file.type.startsWith('image/')) {
|
||||
const base64Img = await getBase64Async(file);
|
||||
const base64ImgData = base64Img.split(',')[1];
|
||||
const extension = file.type.split('/')[1];
|
||||
const imageUrl = await saveBase64AsFile(base64ImgData, name2, file.name, extension);
|
||||
const imageUrl = await saveBase64AsFile(base64Data, name2, file.name, extension);
|
||||
message.extra.image = imageUrl;
|
||||
message.extra.inline_image = true;
|
||||
} else {
|
||||
const fileText = await file.text();
|
||||
const slug = getStringHash(file.name);
|
||||
const uniqueFileName = `${Date.now()}_${slug}.txt`;
|
||||
|
||||
if (isConvertible(file.type)) {
|
||||
try {
|
||||
const converter = converters[file.type];
|
||||
const fileText = await converter(file);
|
||||
base64Data = window.btoa(unescape(encodeURIComponent(fileText)));
|
||||
} catch (error) {
|
||||
toastr.error(String(error), 'Could not convert file');
|
||||
console.error('Could not convert file', error);
|
||||
}
|
||||
}
|
||||
|
||||
const fileUrl = await uploadFileAttachment(uniqueFileName, base64Data);
|
||||
|
||||
if (!fileUrl) {
|
||||
return;
|
||||
}
|
||||
|
||||
message.extra.file = {
|
||||
text: fileText,
|
||||
url: fileUrl,
|
||||
size: file.size,
|
||||
name: file.name,
|
||||
};
|
||||
|
@ -105,6 +144,62 @@ export async function populateFileAttachment(message, inputId = 'file_form_input
|
|||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Uploads file to the server.
|
||||
* @param {string} fileName
|
||||
* @param {string} base64Data
|
||||
* @returns {Promise<string>} File URL
|
||||
*/
|
||||
export async function uploadFileAttachment(fileName, base64Data) {
|
||||
try {
|
||||
const result = await fetch('/api/file/upload', {
|
||||
method: 'POST',
|
||||
headers: getRequestHeaders(),
|
||||
body: JSON.stringify({
|
||||
name: fileName,
|
||||
data: base64Data,
|
||||
}),
|
||||
});
|
||||
|
||||
if (!result.ok) {
|
||||
const error = await result.text();
|
||||
throw new Error(error);
|
||||
}
|
||||
|
||||
const responseData = await result.json();
|
||||
return responseData.path.replace(/\\/g, '/');
|
||||
} catch (error) {
|
||||
toastr.error(String(error), 'Could not upload file');
|
||||
console.error('Could not upload file', error);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Downloads file from the server.
|
||||
* @param {string} url File URL
|
||||
* @returns {Promise<string>} File text
|
||||
*/
|
||||
export async function getFileAttachment(url) {
|
||||
try {
|
||||
const result = await fetch(url, {
|
||||
method: 'GET',
|
||||
cache: 'force-cache',
|
||||
headers: getRequestHeaders(),
|
||||
});
|
||||
|
||||
if (!result.ok) {
|
||||
const error = await result.text();
|
||||
throw new Error(error);
|
||||
}
|
||||
|
||||
const text = await result.text();
|
||||
return text;
|
||||
} catch (error) {
|
||||
toastr.error(error, 'Could not download file');
|
||||
console.error('Could not download file', error);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Validates file to make sure it is not binary or not image.
|
||||
* @param {File} file File object
|
||||
|
@ -121,7 +216,7 @@ async function validateFile(file) {
|
|||
}
|
||||
|
||||
// If file is binary
|
||||
if (isBinary && !isImage) {
|
||||
if (isBinary && !isImage && !isConvertible(file.type)) {
|
||||
toastr.error('Binary files are not supported. Select a text file or image.');
|
||||
return false;
|
||||
}
|
||||
|
@ -193,22 +288,23 @@ async function deleteMessageFile(messageId) {
|
|||
* @param {number} messageId Message ID
|
||||
*/
|
||||
async function viewMessageFile(messageId) {
|
||||
const messageText = chat[messageId]?.extra?.file?.text;
|
||||
const messageFile = chat[messageId]?.extra?.file;
|
||||
|
||||
if (!messageText) {
|
||||
if (!messageFile) {
|
||||
console.debug('Message has no file or it is empty');
|
||||
return;
|
||||
}
|
||||
|
||||
const fileText = messageFile.text || (await getFileAttachment(messageFile.url));
|
||||
|
||||
const modalTemplate = $('<div><pre><code></code></pre></div>');
|
||||
modalTemplate.find('code').addClass('txt').text(messageText);
|
||||
modalTemplate.find('code').addClass('txt').text(fileText);
|
||||
modalTemplate.addClass('file_modal');
|
||||
addCopyToCodeBlocks(modalTemplate);
|
||||
|
||||
callPopup(modalTemplate, 'text');
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Inserts a file embed into the message.
|
||||
* @param {number} messageId
|
||||
|
|
|
@ -852,7 +852,7 @@ async function autoUpdateExtensions() {
|
|||
return;
|
||||
}
|
||||
|
||||
toastr.info('Auto-updating extensions. This may take several minutes.', 'Please wait...', { timeOut: 10000, extendedTimeOut: 20000 });
|
||||
const banner = toastr.info('Auto-updating extensions. This may take several minutes.', 'Please wait...', { timeOut: 10000, extendedTimeOut: 10000 });
|
||||
const promises = [];
|
||||
for (const [id, manifest] of Object.entries(manifests)) {
|
||||
if (manifest.auto_update && id.startsWith('third-party')) {
|
||||
|
@ -861,6 +861,7 @@ async function autoUpdateExtensions() {
|
|||
}
|
||||
}
|
||||
await Promise.allSettled(promises);
|
||||
toastr.clear(banner);
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -1,7 +1,9 @@
|
|||
import { callPopup, getCurrentChatId, reloadCurrentChat, saveSettingsDebounced } from "../../../script.js";
|
||||
import { extension_settings } from "../../extensions.js";
|
||||
import { registerSlashCommand } from "../../slash-commands.js";
|
||||
import { getSortableDelay, uuidv4 } from "../../utils.js";
|
||||
import { regex_placement } from "./engine.js";
|
||||
import { resolveVariable } from "../../variables.js";
|
||||
import { regex_placement, runRegexScript } from "./engine.js";
|
||||
|
||||
async function saveRegexScript(regexScript, existingScriptIndex) {
|
||||
// If not editing
|
||||
|
@ -242,6 +244,36 @@ function migrateSettings() {
|
|||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* /regex slash command callback
|
||||
* @param {object} args Named arguments
|
||||
* @param {string} value Unnamed argument
|
||||
* @returns {string} The regexed string
|
||||
*/
|
||||
function runRegexCallback(args, value) {
|
||||
if (!args.name) {
|
||||
toastr.warning("No regex script name provided.");
|
||||
return value;
|
||||
}
|
||||
|
||||
const scriptName = String(resolveVariable(args.name));
|
||||
|
||||
for (const script of extension_settings.regex) {
|
||||
if (String(script.scriptName).toLowerCase() === String(scriptName).toLowerCase()) {
|
||||
if (script.disabled) {
|
||||
toastr.warning(`Regex script "${scriptName}" is disabled.`);
|
||||
return value;
|
||||
}
|
||||
|
||||
console.debug(`Running regex callback for ${scriptName}`);
|
||||
return runRegexScript(script, value);
|
||||
}
|
||||
}
|
||||
|
||||
toastr.warning(`Regex script "${scriptName}" not found.`);
|
||||
return value;
|
||||
}
|
||||
|
||||
// Workaround for loading in sequence with other extensions
|
||||
// NOTE: Always puts extension at the top of the list, but this is fine since it's static
|
||||
jQuery(async () => {
|
||||
|
@ -282,4 +314,6 @@ jQuery(async () => {
|
|||
|
||||
await loadRegexScripts();
|
||||
$("#saved_regex_scripts").sortable("enable");
|
||||
|
||||
registerSlashCommand('regex', runRegexCallback, [], '(name=scriptName [input]) – runs a Regex extension script by name on the provided string. The script must be enabled.', true, true);
|
||||
});
|
||||
|
|
|
@ -2387,7 +2387,7 @@ async function sdMessageButton(e) {
|
|||
}
|
||||
else {
|
||||
console.log("doing /sd raw last");
|
||||
await generatePicture('sd', 'raw_last', `${characterName} said: ${messageText}`, saveGeneratedImage);
|
||||
await generatePicture('sd', 'raw_last', messageText, saveGeneratedImage);
|
||||
}
|
||||
}
|
||||
catch (error) {
|
||||
|
|
|
@ -2,28 +2,37 @@ import { eventSource, event_types, extension_prompt_types, getCurrentChatId, get
|
|||
import { ModuleWorkerWrapper, extension_settings, getContext, renderExtensionTemplate } from "../../extensions.js";
|
||||
import { collapseNewlines, power_user, ui_mode } from "../../power-user.js";
|
||||
import { SECRET_KEYS, secret_state } from "../../secrets.js";
|
||||
import { debounce, getStringHash as calculateHash, waitUntilCondition, onlyUnique } from "../../utils.js";
|
||||
import { debounce, getStringHash as calculateHash, waitUntilCondition, onlyUnique, splitRecursive } from "../../utils.js";
|
||||
|
||||
const MODULE_NAME = 'vectors';
|
||||
|
||||
export const EXTENSION_PROMPT_TAG = '3_vectors';
|
||||
|
||||
const settings = {
|
||||
enabled: false,
|
||||
// For both
|
||||
source: 'transformers',
|
||||
|
||||
// For chats
|
||||
enabled_chats: false,
|
||||
template: `Past events: {{text}}`,
|
||||
depth: 2,
|
||||
position: extension_prompt_types.IN_PROMPT,
|
||||
protect: 5,
|
||||
insert: 3,
|
||||
query: 2,
|
||||
|
||||
// For files
|
||||
enabled_files: false,
|
||||
size_threshold: 10,
|
||||
chunk_size: 5000,
|
||||
chunk_count: 2,
|
||||
};
|
||||
|
||||
const moduleWorker = new ModuleWorkerWrapper(synchronizeChat);
|
||||
|
||||
async function onVectorizeAllClick() {
|
||||
try {
|
||||
if (!settings.enabled) {
|
||||
if (!settings.enabled_chats) {
|
||||
return;
|
||||
}
|
||||
|
||||
|
@ -78,7 +87,7 @@ async function onVectorizeAllClick() {
|
|||
let syncBlocked = false;
|
||||
|
||||
async function synchronizeChat(batchSize = 5) {
|
||||
if (!settings.enabled) {
|
||||
if (!settings.enabled_chats) {
|
||||
return -1;
|
||||
}
|
||||
|
||||
|
@ -99,7 +108,7 @@ async function synchronizeChat(batchSize = 5) {
|
|||
return -1;
|
||||
}
|
||||
|
||||
const hashedMessages = context.chat.filter(x => !x.is_system).map(x => ({ text: String(x.mes), hash: getStringHash(x.mes) }));
|
||||
const hashedMessages = context.chat.filter(x => !x.is_system).map(x => ({ text: String(x.mes), hash: getStringHash(x.mes), index: context.chat.indexOf(x) }));
|
||||
const hashesInCollection = await getSavedHashes(chatId);
|
||||
|
||||
const newVectorItems = hashedMessages.filter(x => !hashesInCollection.includes(x.hash));
|
||||
|
@ -149,6 +158,95 @@ function getStringHash(str) {
|
|||
return hash;
|
||||
}
|
||||
|
||||
/**
|
||||
* Retrieves files from the chat and inserts them into the vector index.
|
||||
* @param {object[]} chat Array of chat messages
|
||||
* @returns {Promise<void>}
|
||||
*/
|
||||
async function processFiles(chat) {
|
||||
try {
|
||||
if (!settings.enabled_files) {
|
||||
return;
|
||||
}
|
||||
|
||||
for (const message of chat) {
|
||||
// Message has no file
|
||||
if (!message?.extra?.file) {
|
||||
continue;
|
||||
}
|
||||
|
||||
// Trim file inserted by the script
|
||||
const fileText = String(message.mes)
|
||||
.substring(0, message.extra.fileLength).trim()
|
||||
.replace(/^```/, '').replace(/```$/, '').trim();
|
||||
|
||||
// Convert kilobytes to string length
|
||||
const thresholdLength = settings.size_threshold * 1024;
|
||||
|
||||
// File is too small
|
||||
if (fileText.length < thresholdLength) {
|
||||
continue;
|
||||
}
|
||||
|
||||
message.mes = message.mes.substring(message.extra.fileLength);
|
||||
|
||||
const fileName = message.extra.file.name;
|
||||
const collectionId = `file_${getStringHash(fileName)}`;
|
||||
const hashesInCollection = await getSavedHashes(collectionId);
|
||||
|
||||
// File is already in the collection
|
||||
if (!hashesInCollection.length) {
|
||||
await vectorizeFile(fileText, fileName, collectionId);
|
||||
}
|
||||
|
||||
const queryText = getQueryText(chat);
|
||||
const fileChunks = await retrieveFileChunks(queryText, collectionId);
|
||||
|
||||
// Wrap it back in a code block
|
||||
message.mes = `\`\`\`\n${fileChunks}\n\`\`\`\n\n${message.mes}`;
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('Vectors: Failed to retrieve files', error);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Retrieves file chunks from the vector index and inserts them into the chat.
|
||||
* @param {string} queryText Text to query
|
||||
* @param {string} collectionId File collection ID
|
||||
* @returns {Promise<string>} Retrieved file text
|
||||
*/
|
||||
async function retrieveFileChunks(queryText, collectionId) {
|
||||
console.debug(`Vectors: Retrieving file chunks for collection ${collectionId}`, queryText);
|
||||
const queryResults = await queryCollection(collectionId, queryText, settings.chunk_count);
|
||||
console.debug(`Vectors: Retrieved ${queryResults.hashes.length} file chunks for collection ${collectionId}`, queryResults);
|
||||
const metadata = queryResults.metadata.filter(x => x.text).sort((a, b) => a.index - b.index).map(x => x.text);
|
||||
const fileText = metadata.join('\n');
|
||||
|
||||
return fileText;
|
||||
}
|
||||
|
||||
/**
|
||||
* Vectorizes a file and inserts it into the vector index.
|
||||
* @param {string} fileText File text
|
||||
* @param {string} fileName File name
|
||||
* @param {string} collectionId File collection ID
|
||||
*/
|
||||
async function vectorizeFile(fileText, fileName, collectionId) {
|
||||
try {
|
||||
toastr.info("Vectorization may take some time, please wait...", `Ingesting file ${fileName}`);
|
||||
const chunks = splitRecursive(fileText, settings.chunk_size);
|
||||
console.debug(`Vectors: Split file ${fileName} into ${chunks.length} chunks`, chunks);
|
||||
|
||||
const items = chunks.map((chunk, index) => ({ hash: getStringHash(chunk), text: chunk, index: index }));
|
||||
await insertVectorItems(collectionId, items);
|
||||
|
||||
console.log(`Vectors: Inserted ${chunks.length} vector items for file ${fileName} into ${collectionId}`);
|
||||
} catch (error) {
|
||||
console.error('Vectors: Failed to vectorize file', error);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Removes the most relevant messages from the chat and displays them in the extension prompt
|
||||
* @param {object[]} chat Array of chat messages
|
||||
|
@ -158,7 +256,11 @@ async function rearrangeChat(chat) {
|
|||
// Clear the extension prompt
|
||||
setExtensionPrompt(EXTENSION_PROMPT_TAG, '', extension_prompt_types.IN_PROMPT, 0);
|
||||
|
||||
if (!settings.enabled) {
|
||||
if (settings.enabled_files) {
|
||||
await processFiles(chat);
|
||||
}
|
||||
|
||||
if (!settings.enabled_chats) {
|
||||
return;
|
||||
}
|
||||
|
||||
|
@ -182,7 +284,8 @@ async function rearrangeChat(chat) {
|
|||
}
|
||||
|
||||
// Get the most relevant messages, excluding the last few
|
||||
const queryHashes = (await queryCollection(chatId, queryText, settings.insert)).filter(onlyUnique);
|
||||
const queryResults = await queryCollection(chatId, queryText, settings.query);
|
||||
const queryHashes = queryResults.hashes.filter(onlyUnique);
|
||||
const queriedMessages = [];
|
||||
const insertedHashes = new Set();
|
||||
const retainMessages = chat.slice(-settings.protect);
|
||||
|
@ -335,7 +438,7 @@ async function deleteVectorItems(collectionId, hashes) {
|
|||
* @param {string} collectionId - The collection to query
|
||||
* @param {string} searchText - The text to query
|
||||
* @param {number} topK - The number of results to return
|
||||
* @returns {Promise<number[]>} - Hashes of the results
|
||||
* @returns {Promise<{ hashes: number[], metadata: object[]}>} - Hashes of the results
|
||||
*/
|
||||
async function queryCollection(collectionId, searchText, topK) {
|
||||
const response = await fetch('/api/vector/query', {
|
||||
|
@ -359,7 +462,7 @@ async function queryCollection(collectionId, searchText, topK) {
|
|||
|
||||
async function purgeVectorIndex(collectionId) {
|
||||
try {
|
||||
if (!settings.enabled) {
|
||||
if (!settings.enabled_chats) {
|
||||
return;
|
||||
}
|
||||
|
||||
|
@ -382,19 +485,36 @@ async function purgeVectorIndex(collectionId) {
|
|||
}
|
||||
}
|
||||
|
||||
function toggleSettings() {
|
||||
$('#vectors_files_settings').toggle(!!settings.enabled_files);
|
||||
$('#vectors_chats_settings').toggle(!!settings.enabled_chats);
|
||||
}
|
||||
|
||||
jQuery(async () => {
|
||||
if (!extension_settings.vectors) {
|
||||
extension_settings.vectors = settings;
|
||||
}
|
||||
|
||||
// Migrate from old settings
|
||||
if (settings['enabled']) {
|
||||
settings.enabled_chats = true;
|
||||
}
|
||||
|
||||
Object.assign(settings, extension_settings.vectors);
|
||||
// Migrate from TensorFlow to Transformers
|
||||
settings.source = settings.source !== 'local' ? settings.source : 'transformers';
|
||||
$('#extensions_settings2').append(renderExtensionTemplate(MODULE_NAME, 'settings'));
|
||||
$('#vectors_enabled').prop('checked', settings.enabled).on('input', () => {
|
||||
settings.enabled = $('#vectors_enabled').prop('checked');
|
||||
$('#vectors_enabled_chats').prop('checked', settings.enabled_chats).on('input', () => {
|
||||
settings.enabled_chats = $('#vectors_enabled_chats').prop('checked');
|
||||
Object.assign(extension_settings.vectors, settings);
|
||||
saveSettingsDebounced();
|
||||
toggleSettings();
|
||||
});
|
||||
$('#vectors_enabled_files').prop('checked', settings.enabled_files).on('input', () => {
|
||||
settings.enabled_files = $('#vectors_enabled_files').prop('checked');
|
||||
Object.assign(extension_settings.vectors, settings);
|
||||
saveSettingsDebounced();
|
||||
toggleSettings();
|
||||
});
|
||||
$('#vectors_source').val(settings.source).on('change', () => {
|
||||
settings.source = String($('#vectors_source').val());
|
||||
|
@ -436,6 +556,25 @@ jQuery(async () => {
|
|||
|
||||
$('#vectors_vectorize_all').on('click', onVectorizeAllClick);
|
||||
|
||||
$('#vectors_size_threshold').val(settings.size_threshold).on('input', () => {
|
||||
settings.size_threshold = Number($('#vectors_size_threshold').val());
|
||||
Object.assign(extension_settings.vectors, settings);
|
||||
saveSettingsDebounced();
|
||||
});
|
||||
|
||||
$('#vectors_chunk_size').val(settings.chunk_size).on('input', () => {
|
||||
settings.chunk_size = Number($('#vectors_chunk_size').val());
|
||||
Object.assign(extension_settings.vectors, settings);
|
||||
saveSettingsDebounced();
|
||||
});
|
||||
|
||||
$('#vectors_chunk_count').val(settings.chunk_count).on('input', () => {
|
||||
settings.chunk_count = Number($('#vectors_chunk_count').val());
|
||||
Object.assign(extension_settings.vectors, settings);
|
||||
saveSettingsDebounced();
|
||||
});
|
||||
|
||||
toggleSettings();
|
||||
eventSource.on(event_types.MESSAGE_DELETED, onChatEvent);
|
||||
eventSource.on(event_types.MESSAGE_EDITED, onChatEvent);
|
||||
eventSource.on(event_types.MESSAGE_SENT, onChatEvent);
|
||||
|
|
|
@ -5,72 +5,119 @@
|
|||
<div class="inline-drawer-icon fa-solid fa-circle-chevron-down down"></div>
|
||||
</div>
|
||||
<div class="inline-drawer-content">
|
||||
<label class="checkbox_label" for="vectors_enabled">
|
||||
<input id="vectors_enabled" type="checkbox" class="checkbox">
|
||||
Enabled
|
||||
</label>
|
||||
<label for="vectors_source">
|
||||
Vectorization Source
|
||||
</label>
|
||||
<select id="vectors_source" class="select">
|
||||
<option value="transformers">Local (Transformers)</option>
|
||||
<option value="openai">OpenAI</option>
|
||||
<option value="palm">Google MakerSuite (PaLM)</option>
|
||||
</select>
|
||||
<div id="vectors_advanced_settings" data-newbie-hidden>
|
||||
<label for="vectors_template">
|
||||
Insertion Template
|
||||
<div class="flex-container flexFlowColumn">
|
||||
<label for="vectors_source">
|
||||
Vectorization Source
|
||||
</label>
|
||||
<textarea id="vectors_template" class="text_pole textarea_compact autoSetHeight" rows="2" placeholder="Use {{text}} macro to specify the position of retrieved text."></textarea>
|
||||
<label for="vectors_position">Injection Position</label>
|
||||
<div class="radio_group">
|
||||
<label>
|
||||
<input type="radio" name="vectors_position" value="2" />
|
||||
Before Main Prompt / Story String
|
||||
</label>
|
||||
<!--Keep these as 0 and 1 to interface with the setExtensionPrompt function-->
|
||||
<label>
|
||||
<input type="radio" name="vectors_position" value="0" />
|
||||
After Main Prompt / Story String
|
||||
</label>
|
||||
<label>
|
||||
<input type="radio" name="vectors_position" value="1" />
|
||||
In-chat @ Depth <input id="vectors_depth" class="text_pole widthUnset" type="number" min="0" max="999" />
|
||||
</label>
|
||||
</div>
|
||||
<select id="vectors_source" class="text_pole">
|
||||
<option value="transformers">Local (Transformers)</option>
|
||||
<option value="openai">OpenAI</option>
|
||||
<option value="palm">Google MakerSuite (PaLM)</option>
|
||||
</select>
|
||||
</div>
|
||||
|
||||
<div class="flex-container flexFlowColumn" title="How many last messages will be matched for relevance.">
|
||||
<label for="vectors_query">
|
||||
<span>Query messages</span>
|
||||
</label>
|
||||
<input type="number" id="vectors_query" class="text_pole widthUnset" min="1" max="99" />
|
||||
</div>
|
||||
|
||||
<hr>
|
||||
|
||||
<h4>
|
||||
File vectorization settings
|
||||
</h4>
|
||||
|
||||
<label class="checkbox_label" for="vectors_enabled_files">
|
||||
<input id="vectors_enabled_files" type="checkbox" class="checkbox">
|
||||
Enabled for files
|
||||
</label>
|
||||
|
||||
<div id="vectors_files_settings">
|
||||
|
||||
<div class="flex-container">
|
||||
<div class="flex1" title="Prevents last N messages from being placed out of order.">
|
||||
<label for="vectors_protect">
|
||||
<small>Retain#</small>
|
||||
<div class="flex1" title="Only files past this size will be vectorized.">
|
||||
<label for="vectors_size_threshold">
|
||||
<small>Size threshold (KB)</small>
|
||||
</label>
|
||||
<input type="number" id="vectors_protect" class="text_pole widthUnset" min="1" max="99" />
|
||||
<input id="vectors_size_threshold" type="number" class="text_pole widthUnset" min="1" max="99999" />
|
||||
</div>
|
||||
<div class="flex1" title="How many last messages will be matched for relevance.">
|
||||
<label for="vectors_query">
|
||||
<small>Query#</small>
|
||||
<div class="flex1" title="Chunk size for file splitting.">
|
||||
<label for="vectors_chunk_size">
|
||||
<small>Chunk size (chars)</small>
|
||||
</label>
|
||||
<input type="number" id="vectors_query" class="text_pole widthUnset" min="1" max="99" />
|
||||
<input id="vectors_chunk_size" type="number" class="text_pole widthUnset" min="1" max="99999" />
|
||||
</div>
|
||||
<div class="flex1" title="How many past messages to insert as memories.">
|
||||
<label for="vectors_insert">
|
||||
<small>Insert#</small>
|
||||
<div class="flex1" title="How many chunks to retrieve when querying.">
|
||||
<label for="vectors_chunk_count">
|
||||
<small>Retrieve chunks</small>
|
||||
</label>
|
||||
<input type="number" id="vectors_insert" class="text_pole widthUnset" min="1" max="99" />
|
||||
<input id="vectors_chunk_count" type="number" class="text_pole widthUnset" min="1" max="99999" />
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<small>
|
||||
Old messages are vectorized gradually as you chat.
|
||||
To process all previous messages, click the button below.
|
||||
</small>
|
||||
<div id="vectors_vectorize_all" class="menu_button menu_button_icon">
|
||||
Vectorize All
|
||||
</div>
|
||||
<div id="vectorize_progress" style="display: none;">
|
||||
|
||||
<hr>
|
||||
|
||||
<h4>
|
||||
Chat vectorization settings
|
||||
</h4>
|
||||
<label class="checkbox_label" for="vectors_enabled_chats">
|
||||
<input id="vectors_enabled_chats" type="checkbox" class="checkbox">
|
||||
Enabled for chat messages
|
||||
</label>
|
||||
|
||||
<div id="vectors_chats_settings">
|
||||
<div id="vectors_advanced_settings" data-newbie-hidden>
|
||||
<label for="vectors_template">
|
||||
Insertion Template
|
||||
</label>
|
||||
<textarea id="vectors_template" class="text_pole textarea_compact" rows="3" placeholder="Use {{text}} macro to specify the position of retrieved text."></textarea>
|
||||
<label for="vectors_position">Injection Position</label>
|
||||
<div class="radio_group">
|
||||
<label>
|
||||
<input type="radio" name="vectors_position" value="2" />
|
||||
Before Main Prompt / Story String
|
||||
</label>
|
||||
<!--Keep these as 0 and 1 to interface with the setExtensionPrompt function-->
|
||||
<label>
|
||||
<input type="radio" name="vectors_position" value="0" />
|
||||
After Main Prompt / Story String
|
||||
</label>
|
||||
<label>
|
||||
<input type="radio" name="vectors_position" value="1" />
|
||||
In-chat @ Depth <input id="vectors_depth" class="text_pole widthUnset" type="number" min="0" max="999" />
|
||||
</label>
|
||||
</div>
|
||||
<div class="flex-container">
|
||||
<div class="flex1" title="Prevents last N messages from being placed out of order.">
|
||||
<label for="vectors_protect">
|
||||
<small>Retain#</small>
|
||||
</label>
|
||||
<input type="number" id="vectors_protect" class="text_pole widthUnset" min="1" max="99" />
|
||||
</div>
|
||||
<div class="flex1" title="How many past messages to insert as memories.">
|
||||
<label for="vectors_insert">
|
||||
<small>Insert#</small>
|
||||
</label>
|
||||
<input type="number" id="vectors_insert" class="text_pole widthUnset" min="1" max="99" />
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<small>
|
||||
Processed <span id="vectorize_progress_percent">0</span>% of messages.
|
||||
ETA: <span id="vectorize_progress_eta">...</span> seconds.
|
||||
Old messages are vectorized gradually as you chat.
|
||||
To process all previous messages, click the button below.
|
||||
</small>
|
||||
<div id="vectors_vectorize_all" class="menu_button menu_button_icon">
|
||||
Vectorize All
|
||||
</div>
|
||||
<div id="vectorize_progress" style="display: none;">
|
||||
<small>
|
||||
Processed <span id="vectorize_progress_percent">0</span>% of messages.
|
||||
ETA: <span id="vectorize_progress_eta">...</span> seconds.
|
||||
</small>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
|
|
@ -114,7 +114,7 @@ const max_128k = 128 * 1000;
|
|||
const max_200k = 200 * 1000;
|
||||
const scale_max = 8191;
|
||||
const claude_max = 9000; // We have a proper tokenizer, so theoretically could be larger (up to 9k)
|
||||
const palm2_max = 7500; // The real context window is 8192, spare some for padding due to using turbo tokenizer
|
||||
const palm2_max = 7400; // The real context window is 8192, spare some for padding due to using turbo tokenizer
|
||||
const claude_100k_max = 99000;
|
||||
let ai21_max = 9200; //can easily fit 9k gpt tokens because j2's tokenizer is efficient af
|
||||
const unlocked_max = 100 * 1024;
|
||||
|
@ -229,6 +229,7 @@ const default_settings = {
|
|||
squash_system_messages: false,
|
||||
image_inlining: false,
|
||||
bypass_status_check: false,
|
||||
seed: -1,
|
||||
};
|
||||
|
||||
const oai_settings = {
|
||||
|
@ -282,6 +283,7 @@ const oai_settings = {
|
|||
squash_system_messages: false,
|
||||
image_inlining: false,
|
||||
bypass_status_check: false,
|
||||
seed: -1,
|
||||
};
|
||||
|
||||
let openai_setting_names;
|
||||
|
@ -1557,6 +1559,10 @@ async function sendOpenAIRequest(type, messages, signal) {
|
|||
generate_data['stop_tokens'] = [name1 + ':', oai_settings.new_chat_prompt, oai_settings.new_group_chat_prompt];
|
||||
}
|
||||
|
||||
if ((isOAI || isOpenRouter) && oai_settings.seed >= 0) {
|
||||
generate_data['seed'] = oai_settings.seed;
|
||||
}
|
||||
|
||||
const generate_url = '/generate_openai';
|
||||
const response = await fetch(generate_url, {
|
||||
method: 'POST',
|
||||
|
@ -2415,6 +2421,7 @@ function loadOpenAISettings(data, settings) {
|
|||
|
||||
$('#top_k_openai').val(oai_settings.top_k_openai);
|
||||
$('#top_k_counter_openai').val(Number(oai_settings.top_k_openai).toFixed(0));
|
||||
$('#seed_openai').val(oai_settings.seed);
|
||||
|
||||
if (settings.reverse_proxy !== undefined) oai_settings.reverse_proxy = settings.reverse_proxy;
|
||||
$('#openai_reverse_proxy').val(oai_settings.reverse_proxy);
|
||||
|
@ -2594,6 +2601,7 @@ async function saveOpenAIPreset(name, settings, triggerUi = true) {
|
|||
use_alt_scale: settings.use_alt_scale,
|
||||
squash_system_messages: settings.squash_system_messages,
|
||||
image_inlining: settings.image_inlining,
|
||||
seed: settings.seed,
|
||||
};
|
||||
|
||||
const savePresetSettings = await fetch(`/api/presets/save-openai?name=${name}`, {
|
||||
|
@ -2953,6 +2961,7 @@ function onSettingsPresetChange() {
|
|||
use_alt_scale: ['#use_alt_scale', 'use_alt_scale', true],
|
||||
squash_system_messages: ['#squash_system_messages', 'squash_system_messages', true],
|
||||
image_inlining: ['#openai_image_inlining', 'image_inlining', true],
|
||||
seed: ['#seed_openai', 'seed', false],
|
||||
};
|
||||
|
||||
const presetName = $('#settings_preset_openai').find(":selected").text();
|
||||
|
@ -2961,7 +2970,7 @@ function onSettingsPresetChange() {
|
|||
const preset = structuredClone(openai_settings[openai_setting_names[oai_settings.preset_settings_openai]]);
|
||||
|
||||
const updateInput = (selector, value) => $(selector).val(value).trigger('input');
|
||||
const updateCheckbox = (selector, value) => $(selector).prop('checked', value).trigger('input');
|
||||
const updateCheckbox = (selector, value) => $(selector).prop('checked', value).trigger('input').trigger('change');
|
||||
|
||||
// Allow subscribers to alter the preset before applying deltas
|
||||
eventSource.emit(event_types.OAI_PRESET_CHANGED_BEFORE, {
|
||||
|
@ -3768,6 +3777,11 @@ $(document).ready(async function () {
|
|||
saveSettingsDebounced();
|
||||
});
|
||||
|
||||
$('#seed_openai').on('input', function () {
|
||||
oai_settings.seed = Number($(this).val());
|
||||
saveSettingsDebounced();
|
||||
});
|
||||
|
||||
$(document).on('input', '#openai_settings .autoSetHeight', function () {
|
||||
resetScrollHeight($(this));
|
||||
});
|
||||
|
|
|
@ -59,8 +59,12 @@
|
|||
<ul>
|
||||
<li><tt>{{getvar::name}}</tt> – replaced with the value of the local variable "name"</li>
|
||||
<li><tt>{{setvar::name::value}}</tt> – replaced with empty string, sets the local variable "name" to "value"</li>
|
||||
<li><tt>{{addvar::name::increment}}</tt> – replaced with the result of addition numeric value of "increment" to the local variable "name"</li>
|
||||
<li><tt>{{addvar::name::increment}}</tt> – replaced with empty strings, adds a numeric value of "increment" to the local variable "name"</li>
|
||||
<li><tt>{{incvar::name}}</tt> – replaced with the result of the increment of value of the variable "name" by 1</li>
|
||||
<li><tt>{{decvar::name}}</tt> – replaced with the result of the decrement of value of the variable "name" by 1</li>
|
||||
<li><tt>{{getglobalvar::name}}</tt> – replaced with the value of the global variable "name"</li>
|
||||
<li><tt>{{setglobalvar::name::value}}</tt> – replaced with empty string, sets the global variable "name" to "value"</li>
|
||||
<li><tt>{{addglobalvar::name::value}}</tt> – replaced with the result of addition numeric value of "increment" to the global variable "name"</li>
|
||||
<li><tt>{{addglobalvar::name::value}}</tt> – replaced with empty string, adds a numeric value of "increment" to the global variable "name"</li>
|
||||
<li><tt>{{incglobalvar::name}}</tt> – replaced with the result of the increment of value of the global variable "name" by 1</li>
|
||||
<li><tt>{{decglobalvar::name}}</tt> – replaced with the result of the decrement of value of the global variable "name" by 1</li>
|
||||
</ul>
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
import { getContext } from "./extensions.js";
|
||||
import { getRequestHeaders } from "../script.js";
|
||||
import { isMobile } from "./RossAscends-mods.js";
|
||||
import { collapseNewlines } from "./power-user.js";
|
||||
|
||||
/**
|
||||
* Pagination status string template.
|
||||
|
@ -1066,3 +1067,99 @@ export function uuidv4() {
|
|||
return v.toString(16);
|
||||
});
|
||||
}
|
||||
|
||||
function postProcessText(text) {
|
||||
// Collapse multiple newlines into one
|
||||
text = collapseNewlines(text);
|
||||
// Trim leading and trailing whitespace, and remove empty lines
|
||||
text = text.split('\n').map(l => l.trim()).filter(Boolean).join('\n');
|
||||
// Remove carriage returns
|
||||
text = text.replace(/\r/g, '');
|
||||
// Normalize unicode spaces
|
||||
text = text.replace(/\u00A0/g, ' ');
|
||||
// Collapse multiple spaces into one (except for newlines)
|
||||
text = text.replace(/ {2,}/g, ' ');
|
||||
// Remove leading and trailing spaces
|
||||
text = text.trim();
|
||||
return text;
|
||||
}
|
||||
|
||||
/**
|
||||
* Use pdf.js to load and parse text from PDF pages
|
||||
* @param {Blob} blob PDF file blob
|
||||
* @returns {Promise<string>} A promise that resolves to the parsed text.
|
||||
*/
|
||||
export async function extractTextFromPDF(blob) {
|
||||
async function initPdfJs() {
|
||||
const promises = [];
|
||||
|
||||
const workerPromise = new Promise((resolve, reject) => {
|
||||
const workerScript = document.createElement('script');
|
||||
workerScript.type = 'module';
|
||||
workerScript.async = true;
|
||||
workerScript.src = 'lib/pdf.worker.mjs';
|
||||
workerScript.onload = resolve;
|
||||
workerScript.onerror = reject;
|
||||
document.head.appendChild(workerScript);
|
||||
});
|
||||
|
||||
promises.push(workerPromise);
|
||||
|
||||
const pdfjsPromise = new Promise((resolve, reject) => {
|
||||
const pdfjsScript = document.createElement('script');
|
||||
pdfjsScript.type = 'module';
|
||||
pdfjsScript.async = true;
|
||||
pdfjsScript.src = 'lib/pdf.mjs';
|
||||
pdfjsScript.onload = resolve;
|
||||
pdfjsScript.onerror = reject;
|
||||
document.head.appendChild(pdfjsScript);
|
||||
});
|
||||
|
||||
promises.push(pdfjsPromise);
|
||||
|
||||
return Promise.all(promises);
|
||||
}
|
||||
|
||||
if (!('pdfjsLib' in window)) {
|
||||
await initPdfJs();
|
||||
}
|
||||
|
||||
const buffer = await getFileBuffer(blob);
|
||||
const pdf = await pdfjsLib.getDocument(buffer).promise;
|
||||
const pages = [];
|
||||
for (let i = 1; i <= pdf.numPages; i++) {
|
||||
const page = await pdf.getPage(i);
|
||||
const textContent = await page.getTextContent();
|
||||
const text = textContent.items.map(item => item.str).join(' ');
|
||||
pages.push(text);
|
||||
}
|
||||
return postProcessText(pages.join('\n'));
|
||||
}
|
||||
|
||||
/**
|
||||
* Use DOMParser to load and parse text from HTML
|
||||
* @param {Blob} blob HTML content blob
|
||||
* @returns {Promise<string>} A promise that resolves to the parsed text.
|
||||
*/
|
||||
export async function extractTextFromHTML(blob) {
|
||||
const html = await blob.text();
|
||||
const domParser = new DOMParser();
|
||||
const document = domParser.parseFromString(DOMPurify.sanitize(html), 'text/html');
|
||||
const text = postProcessText(document.body.textContent);
|
||||
return text;
|
||||
}
|
||||
|
||||
/**
|
||||
* Use showdown to load and parse text from Markdown
|
||||
* @param {Blob} blob Markdown content blob
|
||||
* @returns {Promise<string>} A promise that resolves to the parsed text.
|
||||
*/
|
||||
export async function extractTextFromMarkdown(blob) {
|
||||
const markdown = await blob.text();
|
||||
const converter = new showdown.Converter();
|
||||
const html = converter.makeHtml(markdown);
|
||||
const domParser = new DOMParser();
|
||||
const document = domParser.parseFromString(DOMPurify.sanitize(html), 'text/html');
|
||||
const text = postProcessText(document.body.textContent);
|
||||
return text;
|
||||
}
|
||||
|
|
|
@ -95,6 +95,10 @@ function decrementGlobalVariable(name) {
|
|||
* @returns {string} Variable value or the string literal
|
||||
*/
|
||||
export function resolveVariable(name) {
|
||||
if (name === undefined) {
|
||||
return '';
|
||||
}
|
||||
|
||||
if (existsLocalVariable(name)) {
|
||||
return getLocalVariable(name);
|
||||
}
|
||||
|
@ -137,6 +141,18 @@ export function replaceVariableMacros(input) {
|
|||
return '';
|
||||
});
|
||||
|
||||
// Replace {{incvar::name}} with empty string and increment the variable name by 1
|
||||
line = line.replace(/{{incvar::([^}]+)}}/gi, (_, name) => {
|
||||
name = name.trim();
|
||||
return incrementLocalVariable(name);
|
||||
});
|
||||
|
||||
// Replace {{decvar::name}} with empty string and decrement the variable name by 1
|
||||
line = line.replace(/{{decvar::([^}]+)}}/gi, (_, name) => {
|
||||
name = name.trim();
|
||||
return decrementLocalVariable(name);
|
||||
});
|
||||
|
||||
// Replace {{getglobalvar::name}} with the value of the global variable name
|
||||
line = line.replace(/{{getglobalvar::([^}]+)}}/gi, (_, name) => {
|
||||
name = name.trim();
|
||||
|
@ -157,6 +173,18 @@ export function replaceVariableMacros(input) {
|
|||
return '';
|
||||
});
|
||||
|
||||
// Replace {{incglobalvar::name}} with empty string and increment the global variable name by 1
|
||||
line = line.replace(/{{incglobalvar::([^}]+)}}/gi, (_, name) => {
|
||||
name = name.trim();
|
||||
return incrementGlobalVariable(name);
|
||||
});
|
||||
|
||||
// Replace {{decglobalvar::name}} with empty string and decrement the global variable name by 1
|
||||
line = line.replace(/{{decglobalvar::([^}]+)}}/gi, (_, name) => {
|
||||
name = name.trim();
|
||||
return decrementGlobalVariable(name);
|
||||
});
|
||||
|
||||
lines[i] = line;
|
||||
}
|
||||
|
||||
|
|
|
@ -8,6 +8,7 @@ import { FILTER_TYPES, FilterHelper } from "./filters.js";
|
|||
import { getTokenCount } from "./tokenizers.js";
|
||||
import { power_user } from "./power-user.js";
|
||||
import { getTagKeyForCharacter } from "./tags.js";
|
||||
import { resolveVariable } from "./variables.js";
|
||||
|
||||
export {
|
||||
world_info,
|
||||
|
@ -189,6 +190,154 @@ function setWorldInfoSettings(settings, data) {
|
|||
const hasWorldInfo = !!chat_metadata[METADATA_KEY] && world_names.includes(chat_metadata[METADATA_KEY]);
|
||||
$('.chat_lorebook_button').toggleClass('world_set', hasWorldInfo);
|
||||
});
|
||||
|
||||
// Add slash commands
|
||||
registerWorldInfoSlashCommands();
|
||||
}
|
||||
|
||||
function registerWorldInfoSlashCommands() {
|
||||
async function getEntriesFromFile(file) {
|
||||
if (!file || !world_names.includes(file)) {
|
||||
toastr.warning('Valid World Info file name is required');
|
||||
return '';
|
||||
}
|
||||
|
||||
const data = await loadWorldInfoData(file);
|
||||
|
||||
if (!data || !("entries" in data)) {
|
||||
toastr.warning('World Info file has an invalid format');
|
||||
return '';
|
||||
}
|
||||
|
||||
const entries = Object.values(data.entries);
|
||||
|
||||
if (!entries || entries.length === 0) {
|
||||
toastr.warning('World Info file has no entries');
|
||||
return '';
|
||||
}
|
||||
|
||||
return entries;
|
||||
}
|
||||
|
||||
async function getChatBookCallback() {
|
||||
const chatId = getCurrentChatId();
|
||||
|
||||
if (!chatId) {
|
||||
toastr.warning('Open a chat to get a name of the chat-bound lorebook');
|
||||
return '';
|
||||
}
|
||||
|
||||
if (chat_metadata[METADATA_KEY] && world_names.includes(chat_metadata[METADATA_KEY])) {
|
||||
return chat_metadata[METADATA_KEY];
|
||||
}
|
||||
|
||||
// Replace non-alphanumeric characters with underscores, cut to 64 characters
|
||||
const name = `Chat Book ${getCurrentChatId()}`.replace(/[^a-z0-9]/gi, '_').replace(/_{2,}/g, '_').substring(0, 64);
|
||||
await createNewWorldInfo(name);
|
||||
|
||||
chat_metadata[METADATA_KEY] = name;
|
||||
await saveMetadata();
|
||||
$('.chat_lorebook_button').addClass('world_set');
|
||||
return name;
|
||||
}
|
||||
|
||||
async function findBookEntryCallback(args, value) {
|
||||
const file = resolveVariable(args.file);
|
||||
const field = args.field || 'key';
|
||||
|
||||
const entries = await getEntriesFromFile(file);
|
||||
|
||||
if (!entries) {
|
||||
return '';
|
||||
}
|
||||
|
||||
const fuse = new Fuse(entries, {
|
||||
keys: [{ name: field, weight: 1 }],
|
||||
includeScore: true,
|
||||
threshold: 0.3,
|
||||
});
|
||||
|
||||
const results = fuse.search(value);
|
||||
|
||||
if (!results || results.length === 0) {
|
||||
return '';
|
||||
}
|
||||
|
||||
const result = results[0]?.item?.uid;
|
||||
|
||||
if (result === undefined) {
|
||||
return '';
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
async function getEntryFieldCallback(args, uid) {
|
||||
const file = resolveVariable(args.file);
|
||||
const field = args.field || 'content';
|
||||
|
||||
const entries = await getEntriesFromFile(file);
|
||||
|
||||
if (!entries) {
|
||||
return '';
|
||||
}
|
||||
|
||||
const entry = entries.find(x => x.uid === uid);
|
||||
|
||||
if (!entry) {
|
||||
return '';
|
||||
}
|
||||
|
||||
const fieldValue = entry[field];
|
||||
|
||||
if (fieldValue === undefined) {
|
||||
return '';
|
||||
}
|
||||
|
||||
if (Array.isArray(fieldValue)) {
|
||||
return fieldValue.map(x => substituteParams(x)).join(', ');
|
||||
}
|
||||
|
||||
return substituteParams(fieldValue);
|
||||
}
|
||||
|
||||
async function createEntryCallback(args, content) {
|
||||
const file = resolveVariable(args.file);
|
||||
const key = args.key;
|
||||
|
||||
const data = await loadWorldInfoData(file);
|
||||
|
||||
if (!data || !("entries" in data)) {
|
||||
toastr.warning('Valid World Info file name is required');
|
||||
return '';
|
||||
}
|
||||
|
||||
const entry = createWorldInfoEntry(file, data, true);
|
||||
|
||||
if (key) {
|
||||
entry.key.push(key);
|
||||
entry.addMemo = true;
|
||||
entry.comment = key;
|
||||
}
|
||||
|
||||
if (content) {
|
||||
entry.content = content;
|
||||
}
|
||||
|
||||
await saveWorldInfo(file, data, true);
|
||||
|
||||
const selectedIndex = world_names.indexOf(file);
|
||||
if (selectedIndex !== -1) {
|
||||
$('#world_editor_select').val(selectedIndex).trigger('change');
|
||||
}
|
||||
|
||||
return entry.uid;
|
||||
}
|
||||
|
||||
registerSlashCommand('getchatbook', getChatBookCallback, ['getchatlore', 'getchatwi'], '– get a name of the chat-bound lorebook or create a new one if was unbound, and pass it down the pipe', true, true);
|
||||
registerSlashCommand('findentry', findBookEntryCallback, ['findlore', 'findwi'], `<span class="monospace">(file=bookName field=field [texts])</span> – find a UID of the record from the specified book using the fuzzy match of a field value (default: key) and pass it down the pipe, e.g. <tt>/findentry file=chatLore field=key Shadowfang</tt>`, true, true);
|
||||
registerSlashCommand('getentryfield', getEntryFieldCallback, ['getlorefield', 'getwifield'], '<span class="monospace">(file=bookName field=field [UID])</span> – get a field value (default: content) of the record with the UID from the specified book and pass it down the pipe, e.g. <tt>/getentryfield file=chatLore field=content 123</tt>', true, true);
|
||||
registerSlashCommand('createentry', createEntryCallback, ['createlore', 'createwi'], '<span class="monospace">(file=bookName key=key [content])</span> – create a new record in the specified book with the key and content (both are optional) and pass the UID down the pipe, e.g. <tt>/createentry file=chatLore key=Shadowfang The sword of the king</tt>', true, true);
|
||||
}
|
||||
|
||||
// World Info Editor
|
||||
|
@ -1134,7 +1283,7 @@ async function deleteWorldInfoEntry(data, uid) {
|
|||
delete data.entries[uid];
|
||||
}
|
||||
|
||||
function createWorldInfoEntry(name, data) {
|
||||
function createWorldInfoEntry(name, data, fromSlashCommand = false) {
|
||||
const newEntryTemplate = {
|
||||
key: [],
|
||||
keysecondary: [],
|
||||
|
@ -1161,7 +1310,11 @@ function createWorldInfoEntry(name, data) {
|
|||
const newEntry = { uid: newUid, ...newEntryTemplate };
|
||||
data.entries[newUid] = newEntry;
|
||||
|
||||
updateEditor(newUid);
|
||||
if (!fromSlashCommand) {
|
||||
updateEditor(newUid);
|
||||
}
|
||||
|
||||
return newEntry;
|
||||
}
|
||||
|
||||
async function _save(name, data) {
|
||||
|
|
|
@ -213,8 +213,8 @@ var chatsPath = 'public/chats/';
|
|||
const SETTINGS_FILE = './public/settings.json';
|
||||
const AVATAR_WIDTH = 400;
|
||||
const AVATAR_HEIGHT = 600;
|
||||
const jsonParser = express.json({ limit: '100mb' });
|
||||
const urlencodedParser = express.urlencoded({ extended: true, limit: '100mb' });
|
||||
const jsonParser = express.json({ limit: '200mb' });
|
||||
const urlencodedParser = express.urlencoded({ extended: true, limit: '200mb' });
|
||||
const { DIRECTORIES, UPLOADS_PATH, PALM_SAFETY } = require('./src/constants');
|
||||
const { TavernCardValidator } = require("./src/validator/TavernCardValidator");
|
||||
|
||||
|
@ -3303,6 +3303,7 @@ app.post("/generate_openai", jsonParser, function (request, response_generate_op
|
|||
"top_k": request.body.top_k,
|
||||
"stop": isTextCompletion === false ? request.body.stop : undefined,
|
||||
"logit_bias": request.body.logit_bias,
|
||||
"seed": request.body.seed,
|
||||
...bodyParams,
|
||||
}),
|
||||
signal: controller.signal,
|
||||
|
|
|
@ -3,6 +3,7 @@ const fs = require('fs');
|
|||
const sanitize = require('sanitize-filename');
|
||||
const fetch = require('node-fetch').default;
|
||||
const { finished } = require('stream/promises');
|
||||
const writeFileSyncAtomic = require('write-file-atomic').sync;
|
||||
const { DIRECTORIES, UNSAFE_EXTENSIONS } = require('./constants');
|
||||
|
||||
const VALID_CATEGORIES = ["bgm", "ambient", "blip", "live2d"];
|
||||
|
@ -297,6 +298,32 @@ function registerEndpoints(app, jsonParser) {
|
|||
return response.sendStatus(500);
|
||||
}
|
||||
});
|
||||
|
||||
app.post('/api/file/upload', jsonParser, async (request, response) => {
|
||||
try {
|
||||
if (!request.body.name) {
|
||||
return response.status(400).send("No upload name specified");
|
||||
}
|
||||
|
||||
if (!request.body.data) {
|
||||
return response.status(400).send("No upload data specified");
|
||||
}
|
||||
|
||||
const safeInput = checkAssetFileName(request.body.name);
|
||||
|
||||
if (!safeInput) {
|
||||
return response.status(400).send("Invalid upload name");
|
||||
}
|
||||
|
||||
const pathToUpload = path.join(DIRECTORIES.files, safeInput);
|
||||
writeFileSyncAtomic(pathToUpload, request.body.data, 'base64');
|
||||
const url = path.normalize(pathToUpload.replace('public' + path.sep, ''));
|
||||
return response.send({ path: url });
|
||||
} catch (error) {
|
||||
console.log(error);
|
||||
return response.sendStatus(500);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
|
|
|
@ -24,6 +24,7 @@ const DIRECTORIES = {
|
|||
quickreplies: 'public/QuickReplies',
|
||||
assets: 'public/assets',
|
||||
comfyWorkflows: 'public/user/workflows',
|
||||
files: 'public/user/files',
|
||||
};
|
||||
|
||||
const UNSAFE_EXTENSIONS = [
|
||||
|
|
|
@ -30,34 +30,35 @@ async function getVector(source, text) {
|
|||
* @returns {Promise<vectra.LocalIndex>} - The index for the collection
|
||||
*/
|
||||
async function getIndex(collectionId, source, create = true) {
|
||||
const index = new vectra.LocalIndex(path.join(process.cwd(), 'vectors', sanitize(source), sanitize(collectionId)));
|
||||
const store = new vectra.LocalIndex(path.join(process.cwd(), 'vectors', sanitize(source), sanitize(collectionId)));
|
||||
|
||||
if (create && !await index.isIndexCreated()) {
|
||||
await index.createIndex();
|
||||
if (create && !await store.isIndexCreated()) {
|
||||
await store.createIndex();
|
||||
}
|
||||
|
||||
return index;
|
||||
return store;
|
||||
}
|
||||
|
||||
/**
|
||||
* Inserts items into the vector collection
|
||||
* @param {string} collectionId - The collection ID
|
||||
* @param {string} source - The source of the vector
|
||||
* @param {{ hash: number; text: string; }[]} items - The items to insert
|
||||
* @param {{ hash: number; text: string; index: number; }[]} items - The items to insert
|
||||
*/
|
||||
async function insertVectorItems(collectionId, source, items) {
|
||||
const index = await getIndex(collectionId, source);
|
||||
const store = await getIndex(collectionId, source);
|
||||
|
||||
await index.beginUpdate();
|
||||
await store.beginUpdate();
|
||||
|
||||
for (const item of items) {
|
||||
const text = item.text;
|
||||
const hash = item.hash;
|
||||
const index = item.index;
|
||||
const vector = await getVector(source, text);
|
||||
await index.upsertItem({ vector: vector, metadata: { hash, text } });
|
||||
await store.upsertItem({ vector: vector, metadata: { hash, text, index } });
|
||||
}
|
||||
|
||||
await index.endUpdate();
|
||||
await store.endUpdate();
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -67,9 +68,9 @@ async function insertVectorItems(collectionId, source, items) {
|
|||
* @returns {Promise<number[]>} - The hashes of the items in the collection
|
||||
*/
|
||||
async function getSavedHashes(collectionId, source) {
|
||||
const index = await getIndex(collectionId, source);
|
||||
const store = await getIndex(collectionId, source);
|
||||
|
||||
const items = await index.listItems();
|
||||
const items = await store.listItems();
|
||||
const hashes = items.map(x => Number(x.metadata.hash));
|
||||
|
||||
return hashes;
|
||||
|
@ -82,16 +83,16 @@ async function getSavedHashes(collectionId, source) {
|
|||
* @param {number[]} hashes - The hashes of the items to delete
|
||||
*/
|
||||
async function deleteVectorItems(collectionId, source, hashes) {
|
||||
const index = await getIndex(collectionId, source);
|
||||
const items = await index.listItemsByMetadata({ hash: { '$in': hashes } });
|
||||
const store = await getIndex(collectionId, source);
|
||||
const items = await store.listItemsByMetadata({ hash: { '$in': hashes } });
|
||||
|
||||
await index.beginUpdate();
|
||||
await store.beginUpdate();
|
||||
|
||||
for (const item of items) {
|
||||
await index.deleteItem(item.id);
|
||||
await store.deleteItem(item.id);
|
||||
}
|
||||
|
||||
await index.endUpdate();
|
||||
await store.endUpdate();
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -100,15 +101,16 @@ async function deleteVectorItems(collectionId, source, hashes) {
|
|||
* @param {string} source - The source of the vector
|
||||
* @param {string} searchText - The text to search for
|
||||
* @param {number} topK - The number of results to return
|
||||
* @returns {Promise<number[]>} - The hashes of the items that match the search text
|
||||
* @returns {Promise<{hashes: number[], metadata: object[]}>} - The metadata of the items that match the search text
|
||||
*/
|
||||
async function queryCollection(collectionId, source, searchText, topK) {
|
||||
const index = await getIndex(collectionId, source);
|
||||
const store = await getIndex(collectionId, source);
|
||||
const vector = await getVector(source, searchText);
|
||||
|
||||
const result = await index.queryItems(vector, topK);
|
||||
const result = await store.queryItems(vector, topK);
|
||||
const metadata = result.map(x => x.item.metadata);
|
||||
const hashes = result.map(x => Number(x.item.metadata.hash));
|
||||
return hashes;
|
||||
return { metadata, hashes };
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -143,7 +145,7 @@ async function registerEndpoints(app, jsonParser) {
|
|||
}
|
||||
|
||||
const collectionId = String(req.body.collectionId);
|
||||
const items = req.body.items.map(x => ({ hash: x.hash, text: x.text }));
|
||||
const items = req.body.items.map(x => ({ hash: x.hash, text: x.text, index: x.index }));
|
||||
const source = String(req.body.source) || 'transformers';
|
||||
|
||||
await insertVectorItems(collectionId, source, items);
|
||||
|
|
Loading…
Reference in New Issue