Merge branch 'staging' into release

This commit is contained in:
anachronos
2023-12-17 10:38:04 +01:00
committed by GitHub
78 changed files with 4592 additions and 2836 deletions

View File

@ -12,7 +12,7 @@ module.exports = {
overrides: [ overrides: [
{ {
// Server-side files (plus this configuration file) // Server-side files (plus this configuration file)
files: ['src/**/*.js', './*.js'], files: ['src/**/*.js', './*.js', 'plugins/**/*.js'],
env: { env: {
node: true, node: true,
}, },

4
.gitignore vendored
View File

@ -27,7 +27,8 @@ public/stats.json
public/settings.json public/settings.json
/thumbnails /thumbnails
whitelist.txt whitelist.txt
.vscode .vscode/**
!.vscode/extensions.json
.idea/ .idea/
secrets.json secrets.json
/dist /dist
@ -41,3 +42,4 @@ access.log
/vectors/ /vectors/
/cache/ /cache/
public/css/user.css public/css/user.css
/plugins/

11
.vscode/extensions.json vendored Normal file
View File

@ -0,0 +1,11 @@
{
// See https://go.microsoft.com/fwlink/?LinkId=827846 to learn about workspace recommendations.
// Extension identifier format: ${publisher}.${name}. Example: vscode.csharp
// List of extensions which should be recommended for users of this workspace.
"recommendations": [
"dbaeumer.vscode-eslint",
"EditorConfig.EditorConfig"
],
// List of extensions recommended by VS Code that should not be recommended for users of this workspace.
"unwantedRecommendations": []
}

View File

@ -31,10 +31,9 @@ RUN \
echo "*** Create symbolic links to config directory ***" && \ echo "*** Create symbolic links to config directory ***" && \
for R in $RESOURCES; do ln -s "../config/$R" "public/$R"; done || true && \ for R in $RESOURCES; do ln -s "../config/$R" "public/$R"; done || true && \
\ \
rm -f "config.yaml" "public/settings.json" "public/css/bg_load.css" || true && \ rm -f "config.yaml" "public/settings.json" || true && \
ln -s "./config/config.yaml" "config.yaml" || true && \ ln -s "./config/config.yaml" "config.yaml" || true && \
ln -s "../config/settings.json" "public/settings.json" || true && \ ln -s "../config/settings.json" "public/settings.json" || true && \
ln -s "../../config/bg_load.css" "public/css/bg_load.css" || true && \
mkdir "config" || true mkdir "config" || true
# Cleanup unnecessary files # Cleanup unnecessary files

View File

@ -1 +0,0 @@
#bg1 {background-image: url(../backgrounds/__transparent.png);}

View File

@ -58,3 +58,5 @@ openai:
deepl: deepl:
# Available options: default, more, less, prefer_more, prefer_less # Available options: default, more, less, prefer_more, prefer_less
formality: default formality: default
# -- SERVER PLUGIN CONFIGURATION --
enableServerPlugins: false

View File

@ -596,7 +596,6 @@
"openrouter_model": "OR_Website", "openrouter_model": "OR_Website",
"jailbreak_system": true, "jailbreak_system": true,
"reverse_proxy": "", "reverse_proxy": "",
"legacy_streaming": false,
"chat_completion_source": "openai", "chat_completion_source": "openai",
"max_context_unlocked": false, "max_context_unlocked": false,
"api_url_scale": "", "api_url_scale": "",

View File

@ -19,11 +19,6 @@ if [ ! -e "config/settings.json" ]; then
cp -r "default/settings.json" "config/settings.json" cp -r "default/settings.json" "config/settings.json"
fi fi
if [ ! -e "config/bg_load.css" ]; then
echo "Resource not found, copying from defaults: bg_load.css"
cp -r "default/bg_load.css" "config/bg_load.css"
fi
CONFIG_FILE="config.yaml" CONFIG_FILE="config.yaml"
echo "Starting with the following config:" echo "Starting with the following config:"

13
package-lock.json generated
View File

@ -1,12 +1,12 @@
{ {
"name": "sillytavern", "name": "sillytavern",
"version": "1.11.0", "version": "1.11.1",
"lockfileVersion": 3, "lockfileVersion": 3,
"requires": true, "requires": true,
"packages": { "packages": {
"": { "": {
"name": "sillytavern", "name": "sillytavern",
"version": "1.11.0", "version": "1.11.1",
"hasInstallScript": true, "hasInstallScript": true,
"license": "AGPL-3.0", "license": "AGPL-3.0",
"dependencies": { "dependencies": {
@ -20,7 +20,6 @@
"cookie-parser": "^1.4.6", "cookie-parser": "^1.4.6",
"cors": "^2.8.5", "cors": "^2.8.5",
"csrf-csrf": "^2.2.3", "csrf-csrf": "^2.2.3",
"device-detector-js": "^3.0.3",
"express": "^4.18.2", "express": "^4.18.2",
"form-data": "^4.0.0", "form-data": "^4.0.0",
"google-translate-api-browser": "^3.0.1", "google-translate-api-browser": "^3.0.1",
@ -1777,14 +1776,6 @@
"node": ">=8" "node": ">=8"
} }
}, },
"node_modules/device-detector-js": {
"version": "3.0.3",
"resolved": "https://registry.npmjs.org/device-detector-js/-/device-detector-js-3.0.3.tgz",
"integrity": "sha512-jM89LJAvP6uOd84at8OlD9dWP8KeYCCHUde0RT0HQo/stdoRH4b54Xl/fntx2nEXCmqiFhmo+/cJetS2VGUHPw==",
"engines": {
"node": ">= 8.11.4"
}
},
"node_modules/digest-fetch": { "node_modules/digest-fetch": {
"version": "1.3.0", "version": "1.3.0",
"resolved": "https://registry.npmjs.org/digest-fetch/-/digest-fetch-1.3.0.tgz", "resolved": "https://registry.npmjs.org/digest-fetch/-/digest-fetch-1.3.0.tgz",

View File

@ -10,7 +10,6 @@
"cookie-parser": "^1.4.6", "cookie-parser": "^1.4.6",
"cors": "^2.8.5", "cors": "^2.8.5",
"csrf-csrf": "^2.2.3", "csrf-csrf": "^2.2.3",
"device-detector-js": "^3.0.3",
"express": "^4.18.2", "express": "^4.18.2",
"form-data": "^4.0.0", "form-data": "^4.0.0",
"google-translate-api-browser": "^3.0.1", "google-translate-api-browser": "^3.0.1",
@ -52,7 +51,7 @@
"type": "git", "type": "git",
"url": "https://github.com/SillyTavern/SillyTavern.git" "url": "https://github.com/SillyTavern/SillyTavern.git"
}, },
"version": "1.11.0", "version": "1.11.1",
"scripts": { "scripts": {
"start": "node server.js", "start": "node server.js",
"start-multi": "node server.js --disableCsrf", "start-multi": "node server.js --disableCsrf",

0
plugins/.gitkeep Normal file
View File

View File

@ -107,7 +107,6 @@ function addMissingConfigValues() {
function createDefaultFiles() { function createDefaultFiles() {
const files = { const files = {
settings: './public/settings.json', settings: './public/settings.json',
bg_load: './public/css/bg_load.css',
config: './config.yaml', config: './config.yaml',
user: './public/css/user.css', user: './public/css/user.css',
}; };
@ -168,6 +167,29 @@ function copyWasmFiles() {
} }
} }
/**
* Moves the custom background into settings.json.
*/
function migrateBackground() {
if (!fs.existsSync('./public/css/bg_load.css')) return;
const bgCSS = fs.readFileSync('./public/css/bg_load.css', 'utf-8');
const bgMatch = /url\('([^']*)'\)/.exec(bgCSS);
if (!bgMatch) return;
const bgFilename = bgMatch[1].replace('../backgrounds/', '');
const settings = fs.readFileSync('./public/settings.json', 'utf-8');
const settingsJSON = JSON.parse(settings);
if (Object.hasOwn(settingsJSON, 'background')) {
console.log(color.yellow('Both bg_load.css and the "background" setting exist. Please delete bg_load.css manually.'));
return;
}
settingsJSON.background = { name: bgFilename, url: `url('backgrounds/${bgFilename}')` };
fs.writeFileSync('./public/settings.json', JSON.stringify(settingsJSON, null, 4));
fs.rmSync('./public/css/bg_load.css');
}
try { try {
// 0. Convert config.conf to config.yaml // 0. Convert config.conf to config.yaml
convertConfig(); convertConfig();
@ -177,6 +199,8 @@ try {
copyWasmFiles(); copyWasmFiles();
// 3. Add missing config values // 3. Add missing config values
addMissingConfigValues(); addMissingConfigValues();
// 4. Migrate bg_load.css to settings.json
migrateBackground();
} catch (error) { } catch (error) {
console.error(error); console.error(error);
} }

View File

@ -1,4 +1,4 @@
#loader { #loader, #preloader {
position: fixed; position: fixed;
margin: 0; margin: 0;
padding: 0; padding: 0;
@ -22,4 +22,4 @@
#load-spinner { #load-spinner {
transition: all 300ms ease-out; transition: all 300ms ease-out;
opacity: 1; opacity: 1;
} }

View File

@ -1,5 +1,9 @@
/*will apply to anything 1000px or less. this catches ipads, horizontal phones, and vertical phones)*/ /*will apply to anything 1000px or less. this catches ipads, horizontal phones, and vertical phones)*/
@media screen and (max-width: 1000px) { @media screen and (max-width: 1000px) {
#send_form.compact #leftSendForm, #send_form.compact #rightSendForm {
flex-wrap: nowrap;
width: unset;
}
.bg_button { .bg_button {
font-size: 15px; font-size: 15px;

38
public/img/makersuite.svg Normal file
View File

@ -0,0 +1,38 @@
<?xml version="1.0" standalone="no"?>
<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 20010904//EN"
"http://www.w3.org/TR/2001/REC-SVG-20010904/DTD/svg10.dtd">
<svg version="1.0" xmlns="http://www.w3.org/2000/svg"
width="590.000000pt" height="589.000000pt" viewBox="0 0 590.000000 589.000000"
preserveAspectRatio="xMidYMid meet">
<g transform="translate(0.000000,589.000000) scale(0.100000,-0.100000)" stroke="none">
<path d="M4436 5263 c-22 -2 -49 -11 -60 -19 -11 -8 -30 -14 -43 -14 -12 0
-32 -4 -45 -9 -42 -16 -113 -43 -133 -51 -61 -23 -191 -92 -267 -143 -181
-122 -327 -258 -472 -441 -71 -88 -216 -297 -216 -309 0 -4 -15 -30 -33 -59
-73 -115 -212 -419 -256 -558 -6 -19 -21 -64 -32 -100 -12 -36 -25 -81 -30
-100 -22 -96 -28 -122 -47 -215 -46 -228 -66 -517 -52 -743 15 -221 48 -473
71 -530 5 -13 9 -33 9 -44 0 -12 4 -35 9 -52 6 -17 17 -58 27 -91 73 -263 189
-540 299 -714 31 -50 136 -145 217 -197 38 -24 70 -44 73 -44 3 0 22 -10 43
-23 42 -26 202 -92 247 -102 16 -4 33 -12 37 -18 5 -8 58 -12 170 -12 l163 0
97 48 c76 37 111 62 169 120 71 70 179 221 179 249 0 8 4 18 9 23 30 33 61
177 61 282 0 95 -4 121 -41 241 -60 197 -199 365 -390 475 -24 14 -80 45 -124
70 -327 184 -411 248 -422 319 -9 60 2 80 81 145 148 124 337 231 541 308 55
20 108 41 117 46 10 5 27 9 37 9 11 0 23 5 26 10 3 6 14 10 22 10 9 0 55 12
102 26 47 14 102 29 121 34 19 4 62 15 95 24 69 19 117 32 178 47 82 20 263
81 317 107 187 88 391 273 468 427 41 81 63 136 78 197 9 35 21 66 27 70 15 9
14 341 -1 346 -6 2 -14 20 -18 40 -7 40 -28 113 -44 150 -6 12 -15 34 -21 50
-118 283 -358 521 -639 634 -82 32 -117 45 -155 53 -16 4 -33 12 -37 18 -7 12
-425 20 -512 10z"/>
<path d="M740 4521 c-14 -5 -50 -14 -80 -21 -157 -36 -350 -175 -454 -325 -70
-102 -145 -276 -166 -385 -12 -59 -12 -365 -1 -410 18 -69 57 -166 96 -235 40
-71 183 -225 209 -225 7 0 19 -6 25 -14 15 -17 110 -64 171 -83 59 -19 136
-33 235 -43 44 -5 114 -13 155 -19 41 -6 109 -16 150 -21 121 -17 191 -30 250
-46 30 -8 73 -20 95 -26 98 -25 297 -115 335 -150 25 -23 34 -74 27 -154 -6
-81 -77 -230 -173 -363 -86 -121 -93 -129 -191 -231 -84 -88 -139 -172 -162
-250 -7 -25 -17 -55 -22 -68 -13 -32 -11 -208 2 -251 50 -164 174 -284 347
-337 122 -37 290 6 442 112 127 89 252 228 352 396 15 24 57 143 97 273 17 55
40 159 51 235 6 41 16 107 23 145 16 94 16 485 -1 600 -46 318 -109 525 -241
795 -55 114 -184 322 -243 395 -168 206 -299 341 -413 425 -33 25 -62 48 -65
51 -3 3 -30 21 -60 41 -139 88 -288 150 -453 187 -54 12 -299 13 -337 2z"/>
</g>
</svg>

After

Width:  |  Height:  |  Size: 2.4 KiB

15
public/img/mistralai.svg Normal file
View File

@ -0,0 +1,15 @@
<?xml version="1.0" standalone="no"?>
<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 20010904//EN"
"http://www.w3.org/TR/2001/REC-SVG-20010904/DTD/svg10.dtd">
<svg version="1.0" xmlns="http://www.w3.org/2000/svg"
width="64.000000pt" height="64.000000pt" viewBox="0 0 53.000000 60.000000"
preserveAspectRatio="xMidYMid meet">
<g transform="translate(0.000000,63.000000) scale(0.100000,-0.100000)"
fill="#000000" stroke="none">
<path d="M40 320 l0 -240 70 0 70 0 0 95 c0 95 0 95 25 95 23 0 25 -3 25 -50
l0 -50 70 0 70 0 0 50 c0 47 2 50 25 50 25 0 25 0 25 -95 l0 -95 70 0 70 0 0
240 0 240 -70 0 -70 0 0 -44 0 -45 -47 -3 -48 -3 -3 -47 c-3 -43 -5 -48 -28
-48 -22 0 -24 4 -24 50 l0 50 -45 0 -45 0 0 45 0 45 -70 0 -70 0 0 -240z"/>
</g>
</svg>

After

Width:  |  Height:  |  Size: 731 B

View File

@ -61,7 +61,6 @@
<link rel="stylesheet" type="text/css" href="css/extensions-panel.css"> <link rel="stylesheet" type="text/css" href="css/extensions-panel.css">
<link rel="stylesheet" type="text/css" href="css/select2-overrides.css"> <link rel="stylesheet" type="text/css" href="css/select2-overrides.css">
<link rel="stylesheet" type="text/css" href="css/mobile-styles.css"> <link rel="stylesheet" type="text/css" href="css/mobile-styles.css">
<link rel="stylesheet" href="css/bg_load.css">
<link rel="stylesheet" type="text/css" href="css/user.css"> <link rel="stylesheet" type="text/css" href="css/user.css">
<link rel="icon" type="image/x-icon" href="favicon.ico"> <link rel="icon" type="image/x-icon" href="favicon.ico">
<script type="module" src="scripts/i18n.js"></script> <script type="module" src="scripts/i18n.js"></script>
@ -91,6 +90,7 @@
</head> </head>
<body class="no-blur"> <body class="no-blur">
<div id="preloader"></div>
<div id="bg_custom"></div> <div id="bg_custom"></div>
<div id="bg1"></div> <div id="bg1"></div>
<div id="character_context_menu" class="hidden"> <div id="character_context_menu" class="hidden">
@ -177,7 +177,7 @@
<div> <div>
<h4 class="margin0"><span data-i18n="openaipresets">Chat Completion Presets</span></h4> <h4 class="margin0"><span data-i18n="openaipresets">Chat Completion Presets</span></h4>
<div class="flex-container flexNoGap"> <div class="flex-container flexNoGap">
<select id="settings_preset_openai" class="flex1 text_pole"> <select id="settings_preset_openai" class="flex1 text_pole" data-preset-manager-for="openai">
<option value="gui" data-i18n="default">Default</option> <option value="gui" data-i18n="default">Default</option>
</select> </select>
<div class="flex-container flexBasis100p justifyCenter"> <div class="flex-container flexBasis100p justifyCenter">
@ -444,7 +444,7 @@
complete.</span> complete.</span>
</div> </div>
</div> </div>
<div class="range-block" data-source="openai,claude,windowai,openrouter,ai21,scale,palm"> <div class="range-block" data-source="openai,claude,windowai,openrouter,ai21,scale,makersuite,mistralai">
<div class="range-block-title" data-i18n="Temperature"> <div class="range-block-title" data-i18n="Temperature">
Temperature Temperature
</div> </div>
@ -496,7 +496,7 @@
</div> </div>
</div> </div>
</div> </div>
<div data-newbie-hidden class="range-block" data-source="claude,openrouter,ai21,palm"> <div data-newbie-hidden class="range-block" data-source="claude,openrouter,ai21,makersuite">
<div class="range-block-title" data-i18n="Top K"> <div class="range-block-title" data-i18n="Top K">
Top K Top K
</div> </div>
@ -509,7 +509,7 @@
</div> </div>
</div> </div>
</div> </div>
<div data-newbie-hidden class="range-block" data-source="openai,claude,openrouter,ai21,scale,palm,togetherai"> <div data-newbie-hidden class="range-block" data-source="openai,claude,openrouter,ai21,scale,makersuite,mistralai,togetherai">
<div class="range-block-title" data-i18n="Top-p"> <div class="range-block-title" data-i18n="Top-p">
Top P Top P
</div> </div>
@ -749,7 +749,7 @@
<div id="openai_proxy_password_show" title="Peek a password" class="menu_button fa-solid fa-eye-slash fa-fw"></div> <div id="openai_proxy_password_show" title="Peek a password" class="menu_button fa-solid fa-eye-slash fa-fw"></div>
</div> </div>
</div> </div>
<div data-newbie-hidden class="range-block" data-source="openai,openrouter,togetherai"> <div data-newbie-hidden class="range-block" data-source="openai,openrouter,mistralai,togetherai">
<div class="range-block-title justifyLeft" data-i18n="Seed"> <div class="range-block-title justifyLeft" data-i18n="Seed">
Seed Seed
</div> </div>
@ -760,19 +760,6 @@
<input type="number" id="seed_openai" name="seed_openai" class="text_pole" min="-1" max="2147483647" value="-1"> <input type="number" id="seed_openai" name="seed_openai" class="text_pole" min="-1" max="2147483647" value="-1">
</div> </div>
</div> </div>
<div data-newbie-hidden class="range-block" data-source="openai,claude">
<div class="range-block-title justifyLeft">
<label for="legacy_streaming" class="checkbox_label">
<input id="legacy_streaming" type="checkbox" />
<span data-i18n="Legacy Streaming Processing">
Legacy Streaming Processing
</span>
</label>
</div>
<div class="toggle-description justifyLeft" data-i18n="Enable this if the streaming doesn't work with your proxy">
Enable this if the streaming doesn't work with your proxy.
</div>
</div>
</div> </div>
</div> </div>
<div id="advanced-ai-config-block" class="width100p"> <div id="advanced-ai-config-block" class="width100p">
@ -1510,7 +1497,7 @@
</span> </span>
</div> </div>
</div> </div>
<div class="range-block" data-source="openai,openrouter"> <div class="range-block" data-source="openai,openrouter,makersuite">
<label for="openai_image_inlining" class="checkbox_label flexWrap widthFreeExpand"> <label for="openai_image_inlining" class="checkbox_label flexWrap widthFreeExpand">
<input id="openai_image_inlining" type="checkbox" /> <input id="openai_image_inlining" type="checkbox" />
<span data-i18n="Send inline images">Send inline images</span> <span data-i18n="Send inline images">Send inline images</span>
@ -1529,6 +1516,14 @@
<span data-i18n="Use the appropriate tokenizer for Jurassic models, which is more efficient than GPT's.">Use the appropriate tokenizer for Jurassic models, which is more efficient than GPT's.</span> <span data-i18n="Use the appropriate tokenizer for Jurassic models, which is more efficient than GPT's.">Use the appropriate tokenizer for Jurassic models, which is more efficient than GPT's.</span>
</div> </div>
</div> </div>
<div class="range-block" data-source="makersuite">
<label for="use_google_tokenizer" title="Use Google Tokenizer" class="checkbox_label widthFreeExpand">
<input id="use_google_tokenizer" type="checkbox" /><span data-i18n="Use Google Tokenizer">Use Google Tokenizer</span>
</label>
<div class="toggle-description justifyLeft">
<span data-i18n="Use the appropriate tokenizer for Google models via their API. Slower prompt processing, but offers much more accurate token counting.">Use the appropriate tokenizer for Google models via their API. Slower prompt processing, but offers much more accurate token counting.</span>
</div>
</div>
<div data-newbie-hidden class="range-block" data-source="claude"> <div data-newbie-hidden class="range-block" data-source="claude">
<label for="exclude_assistant" title="Exclude Assistant suffix" class="checkbox_label widthFreeExpand"> <label for="exclude_assistant" title="Exclude Assistant suffix" class="checkbox_label widthFreeExpand">
<input id="exclude_assistant" type="checkbox" /><span data-i18n="Exclude Assistant suffix">Exclude Assistant suffix</span> <input id="exclude_assistant" type="checkbox" /><span data-i18n="Exclude Assistant suffix">Exclude Assistant suffix</span>
@ -1598,7 +1593,7 @@
<option value="koboldhorde"><span data-i18n="KoboldAI Horde">KoboldAI Horde</span></option> <option value="koboldhorde"><span data-i18n="KoboldAI Horde">KoboldAI Horde</span></option>
<option value="novel"><span data-i18n="NovelAI">NovelAI</span></option> <option value="novel"><span data-i18n="NovelAI">NovelAI</span></option>
<option value="textgenerationwebui"><span data-i18n="Text Completion">Text Completion (ooba, Mancer, Aphrodite, TabbyAPI, KoboldCpp)</span></option> <option value="textgenerationwebui"><span data-i18n="Text Completion">Text Completion (ooba, Mancer, Aphrodite, TabbyAPI, KoboldCpp)</span></option>
<option value="openai"><span data-i18n="Chat Completion (OpenAI, Claude, Window/OpenRouter, Scale, AI21, TogetherAI)">Chat Completion (OpenAI, Claude, Window, OpenRouter, Scale, AI21, PaLM, TogetherAI)</span></option> <option value="openai"><span data-i18n="Chat Completion (OpenAI, Claude, Window/OpenRouter, Scale, AI21, Google MakerSuite, MistralAI, TogetherAI)">Chat Completion (OpenAI, Claude, Window, OpenRouter, Scale, AI21, Google MakerSuite, MistralAI)</span></option>
</select> </select>
</div> </div>
<div id="kobold_horde" style="position: relative;"> <!-- shows the kobold settings --> <div id="kobold_horde" style="position: relative;"> <!-- shows the kobold settings -->
@ -1846,7 +1841,8 @@
<option value="claude">Claude</option> <option value="claude">Claude</option>
<option value="scale">Scale</option> <option value="scale">Scale</option>
<option value="ai21">AI21</option> <option value="ai21">AI21</option>
<option value="palm">Google PaLM 2</option> <option value="makersuite">Google MakerSuite</option>
<option value="mistralai">MistralAI</option>
<option value="togetherai">TogetherAI</option> <option value="togetherai">TogetherAI</option>
</select> </select>
<form id="openai_form" data-source="openai" action="javascript:void(null);" method="post" enctype="multipart/form-data"> <form id="openai_form" data-source="openai" action="javascript:void(null);" method="post" enctype="multipart/form-data">
@ -2113,17 +2109,44 @@
</select> </select>
</div> </div>
</form> </form>
<form id="palm_form" data-source="palm" action="javascript:void(null);" method="post" enctype="multipart/form-data"> <form id="makersuite_form" data-source="makersuite" action="javascript:void(null);" method="post" enctype="multipart/form-data">
<h4 data-i18n="PaLM API Key">PaLM API Key</h4> <h4 data-i18n="MakerSuite API Key">MakerSuite API Key</h4>
<div class="flex-container"> <div class="flex-container">
<input id="api_key_palm" name="api_key_palm" class="text_pole flex1" maxlength="500" value="" type="text" autocomplete="off"> <input id="api_key_makersuite" name="api_key_makersuite" class="text_pole flex1" maxlength="500" value="" type="text" autocomplete="off">
<div title="Clear your API key" data-i18n="[title]Clear your API key" class="menu_button fa-solid fa-circle-xmark clear-api-key" data-key="api_key_palm"></div> <div title="Clear your API key" data-i18n="[title]Clear your API key" class="menu_button fa-solid fa-circle-xmark clear-api-key" data-key="api_key_makersuite"></div>
</div> </div>
<div data-for="api_key_palm" class="neutral_warning"> <div data-for="api_key_makersuite" class="neutral_warning">
For privacy reasons, your API key will be hidden after you reload the page. For privacy reasons, your API key will be hidden after you reload the page.
</div> </div>
<!-- Its only purpose is to trigger max context size check --> <div>
<select id="model_palm_select" class="displayNone"></select> <h4 data-i18n="Google Model">Google Model</h4>
<select id="model_google_select">
<option value="gemini-pro">Gemini Pro</option>
<option value="gemini-pro-vision">Gemini Pro Vision</option>
<option value="text-bison-001">Bison Text</option>
<option value="chat-bison-001">Bison Chat</option>
</select>
</div>
</form>
<form id="mistralai_form" data-source="mistralai" action="javascript:void(null);" method="post" enctype="multipart/form-data">
<h4 data-i18n="MistralAI API Key">MistralAI API Key</h4>
<div class="flex-container">
<input id="api_key_mistralai" name="api_key_mistralai" class="text_pole flex1" maxlength="500" value="" type="text" autocomplete="off">
<div title="Clear your API key" data-i18n="[title]Clear your API key" class="menu_button fa-solid fa-circle-xmark clear-api-key" data-key="api_key_mistralai"></div>
</div>
<div data-for="api_key_mistralai" class="neutral_warning">
For privacy reasons, your API key will be hidden after you reload the page.
</div>
<div>
<h4 data-i18n="MistralAI Model">MistralAI Model</h4>
<select id="model_mistralai_select">
<optgroup label="Latest">
<option value="mistral-tiny">mistral-tiny</option>
<option value="mistral-small">mistral-small</option>
<option value="mistral-medium">mistral-medium</option>
</optgroup>
</select>
</div>
</form> </form>
<form id="togetherai_form" data-source="togetherai" action="javascript:void(null);" method="post" enctype="multipart/form-data"> <form id="togetherai_form" data-source="togetherai" action="javascript:void(null);" method="post" enctype="multipart/form-data">
<h4 data-i18n="TogetherAI API Key">TogetherAI API Key</h4> <h4 data-i18n="TogetherAI API Key">TogetherAI API Key</h4>
@ -2792,9 +2815,12 @@
<div class="flex-container flexnowrap alignitemscenter"> <div class="flex-container flexnowrap alignitemscenter">
<select id="themes" class="margin0"> <select id="themes" class="margin0">
</select> </select>
<div id="ui-preset-save-button" title="Save changes to a new theme file" data-i18n="[title]Save changes to a new theme file" class="menu_button margin0"> <div id="ui-preset-update-button" title="Update a theme file" data-i18n="[title]Update a theme file" class="menu_button margin0">
<i class="fa-solid fa-save"></i> <i class="fa-solid fa-save"></i>
</div> </div>
<div id="ui-preset-save-button" title="Save as a new theme" data-i18n="[title]Save as a new theme" class="menu_button margin0">
<i class="fa-solid fa-paste"></i>
</div>
</div> </div>
</div> </div>
<div name="themeElements" data-newbie-hidden class="flex-container flexFlowColumn flexNoGap"> <div name="themeElements" data-newbie-hidden class="flex-container flexFlowColumn flexNoGap">
@ -2921,12 +2947,29 @@
</div> </div>
</div> </div>
</div> </div>
<div id="streaming-fps" class="range-block">
<div class="range-block-title" data-i18n="Streaming FPS">
Streaming FPS
</div>
<div class="range-block-range-and-counter">
<div class="range-block-range">
<input type="range" id="streaming_fps" name="streaming_fps" min="5" max="100" step="5">
</div>
<div class="range-block-counter">
<input type="number" min="5" max="100" step="1" data-for="streaming_fps" id="streaming_fps_counter">
</div>
</div>
</div>
</div> </div>
</div> </div>
</div> </div>
<div name="UserSettingsSecondColumn" id="UI-Customization" class="flex-container flexFlowColumn wide100p flexNoGap"> <div name="UserSettingsSecondColumn" id="UI-Customization" class="flex-container flexFlowColumn wide100p flexNoGap">
<div name="themeToggles"> <div name="themeToggles">
<h4 data-i18n="Theme Toggles">Theme Toggles</h4> <h4 data-i18n="Theme Toggles">Theme Toggles</h4>
<label for="reduced_motion" class="checkbox_label" title="Disables animations and transitions" data-i18n="[title]Disables animations and transitions">
<input id="reduced_motion" type="checkbox" />
<span data-i18n="Reduced Motion">Reduced Motion</span>
</label>
<label data-newbie-hidden for="fast_ui_mode" class="checkbox_label" title="removes blur from window backgrounds" data-i18n="[title]removes blur from window backgrounds"> <label data-newbie-hidden for="fast_ui_mode" class="checkbox_label" title="removes blur from window backgrounds" data-i18n="[title]removes blur from window backgrounds">
<input id="fast_ui_mode" type="checkbox" /> <input id="fast_ui_mode" type="checkbox" />
<span data-i18n="No Blur Effect">No Blur Effect</span> <span data-i18n="No Blur Effect">No Blur Effect</span>
@ -2971,6 +3014,10 @@
<input id="messageTokensEnabled" type="checkbox" /> <input id="messageTokensEnabled" type="checkbox" />
<span data-i18n="Show Message Token Count">Message Token Count</span> <span data-i18n="Show Message Token Count">Message Token Count</span>
</label> </label>
<label for="compact_input_area" class="checkbox_label">
<input id="compact_input_area" type="checkbox" />
<span data-i18n="Compact Input Area (Mobile)">Compact Input Area&nbsp;<i class="fa-solid fa-mobile-screen-button"></i></span>
</label>
<label data-newbie-hidden for="hotswapEnabled" class="checkbox_label"> <label data-newbie-hidden for="hotswapEnabled" class="checkbox_label">
<input id="hotswapEnabled" type="checkbox" /> <input id="hotswapEnabled" type="checkbox" />
<span data-i18n="Characters Hotswap">Characters Hotswap</span> <span data-i18n="Characters Hotswap">Characters Hotswap</span>
@ -3622,6 +3669,7 @@
<label id="rm_group_automode_label" class="checkbox_label whitespacenowrap"> <label id="rm_group_automode_label" class="checkbox_label whitespacenowrap">
<input id="rm_group_automode" type="checkbox" /> <input id="rm_group_automode" type="checkbox" />
<span data-i18n="Auto Mode">Auto Mode</span> <span data-i18n="Auto Mode">Auto Mode</span>
<input id="rm_group_automode_delay" class="text_pole textarea_compact widthUnset" type="number" min="1" max="999" step="1" value="5" title="Auto Mode delay" />
</label> </label>
<label id="rm_group_hidemutedsprites_label" class="checkbox_label whitespacenowrap"> <label id="rm_group_hidemutedsprites_label" class="checkbox_label whitespacenowrap">
<input id="rm_group_hidemutedsprites" type="checkbox" /> <input id="rm_group_hidemutedsprites" type="checkbox" />
@ -3794,7 +3842,7 @@
<span data-i18n="Personality summary">Personality summary</span> <span data-i18n="Personality summary">Personality summary</span>
<a href="https://docs.sillytavern.app/usage/core-concepts/characterdesign/#personality-summary" class="notes-link" target="_blank"><span class="fa-solid fa-circle-question note-link-span"></span></a> <a href="https://docs.sillytavern.app/usage/core-concepts/characterdesign/#personality-summary" class="notes-link" target="_blank"><span class="fa-solid fa-circle-question note-link-span"></span></a>
</h4> </h4>
<textarea id="personality_textarea" name="personality" data-i18n="[placeholder](A brief description of the personality)" placeholder="(A brief description of the personality)" form="form_create" class="text_pole" autocomplete="off" rows="1" maxlength="50000"></textarea> <textarea id="personality_textarea" name="personality" data-i18n="[placeholder](A brief description of the personality)" placeholder="(A brief description of the personality)" form="form_create" class="text_pole" autocomplete="off" rows="4" maxlength="50000"></textarea>
<div class="extension_token_counter"> <div class="extension_token_counter">
Tokens: <span data-token-counter="personality_textarea" data-token-permanent="true">counting...</span> Tokens: <span data-token-counter="personality_textarea" data-token-permanent="true">counting...</span>
</div> </div>
@ -3806,7 +3854,7 @@
<span class="fa-solid fa-circle-question note-link-span"></span> <span class="fa-solid fa-circle-question note-link-span"></span>
</a> </a>
</h4> </h4>
<textarea id="scenario_pole" name="scenario" data-i18n="[placeholder](Circumstances and context of the interaction)" placeholder="(Circumstances and context of the interaction)" class="text_pole" maxlength="50000" value="" autocomplete="off" form="form_create" rows="1"></textarea> <textarea id="scenario_pole" name="scenario" data-i18n="[placeholder](Circumstances and context of the interaction)" placeholder="(Circumstances and context of the interaction)" class="text_pole" maxlength="50000" value="" autocomplete="off" form="form_create" rows="4"></textarea>
<div class="extension_token_counter"> <div class="extension_token_counter">
Tokens: <span data-token-counter="scenario_pole" data-token-permanent="true">counting...</span> Tokens: <span data-token-counter="scenario_pole" data-token-permanent="true">counting...</span>
</div> </div>

14
public/lib/bowser.min.js vendored Normal file

File diff suppressed because one or more lines are too long

File diff suppressed because it is too large Load Diff

View File

@ -36,6 +36,7 @@ import { chat_completion_sources, oai_settings } from './openai.js';
import { getTokenCount } from './tokenizers.js'; import { getTokenCount } from './tokenizers.js';
import { textgen_types, textgenerationwebui_settings as textgen_settings } from './textgen-settings.js'; import { textgen_types, textgenerationwebui_settings as textgen_settings } from './textgen-settings.js';
import Bowser from '../lib/bowser.min.js';
var RPanelPin = document.getElementById('rm_button_panel_pin'); var RPanelPin = document.getElementById('rm_button_panel_pin');
var LPanelPin = document.getElementById('lm_button_panel_pin'); var LPanelPin = document.getElementById('lm_button_panel_pin');
@ -98,43 +99,22 @@ export function humanizeGenTime(total_gen_time) {
return time_spent; return time_spent;
} }
let parsedUA = null;
try {
parsedUA = Bowser.parse(navigator.userAgent);
} catch {
// In case the user agent is an empty string or Bowser can't parse it for some other reason
}
/** /**
* Checks if the device is a mobile device. * Checks if the device is a mobile device.
* @returns {boolean} - True if the device is a mobile device, false otherwise. * @returns {boolean} - True if the device is a mobile device, false otherwise.
*/ */
export function isMobile() { export function isMobile() {
const mobileTypes = ['smartphone', 'tablet', 'phablet', 'feature phone', 'portable media player']; const mobileTypes = ['mobile', 'tablet'];
const deviceInfo = getDeviceInfo();
return mobileTypes.includes(deviceInfo?.device?.type); return mobileTypes.includes(parsedUA?.platform?.type);
}
/**
* Loads device info from the server. Caches the result in sessionStorage.
* @returns {object} - The device info object.
*/
export function getDeviceInfo() {
let deviceInfo = null;
if (sessionStorage.getItem('deviceInfo')) {
deviceInfo = JSON.parse(sessionStorage.getItem('deviceInfo'));
} else {
$.ajax({
url: '/deviceinfo',
dataType: 'json',
async: false,
cache: true,
success: function (result) {
sessionStorage.setItem('deviceInfo', JSON.stringify(result));
deviceInfo = result;
},
error: function () {
console.log('Couldn\'t load device info. Defaulting to desktop');
deviceInfo = { device: { type: 'desktop' } };
},
});
}
return deviceInfo;
} }
function shouldSendOnEnter() { function shouldSendOnEnter() {
@ -415,7 +395,8 @@ function RA_autoconnect(PrevApi) {
|| (oai_settings.chat_completion_source == chat_completion_sources.WINDOWAI) || (oai_settings.chat_completion_source == chat_completion_sources.WINDOWAI)
|| (secret_state[SECRET_KEYS.OPENROUTER] && oai_settings.chat_completion_source == chat_completion_sources.OPENROUTER) || (secret_state[SECRET_KEYS.OPENROUTER] && oai_settings.chat_completion_source == chat_completion_sources.OPENROUTER)
|| (secret_state[SECRET_KEYS.AI21] && oai_settings.chat_completion_source == chat_completion_sources.AI21) || (secret_state[SECRET_KEYS.AI21] && oai_settings.chat_completion_source == chat_completion_sources.AI21)
|| (secret_state[SECRET_KEYS.PALM] && oai_settings.chat_completion_source == chat_completion_sources.PALM) || (secret_state[SECRET_KEYS.MAKERSUITE] && oai_settings.chat_completion_source == chat_completion_sources.MAKERSUITE)
|| (secret_state[SECRET_KEYS.MISTRALAI] && oai_settings.chat_completion_source == chat_completion_sources.MISTRALAI)
|| (secret_state[SECRET_KEYS.TOGETHERAI] && oai_settings.chat_completion_source == chat_completion_sources.TOGETHERAI) || (secret_state[SECRET_KEYS.TOGETHERAI] && oai_settings.chat_completion_source == chat_completion_sources.TOGETHERAI)
) { ) {
$('#api_button_openai').trigger('click'); $('#api_button_openai').trigger('click');
@ -432,8 +413,7 @@ function RA_autoconnect(PrevApi) {
} }
function OpenNavPanels() { function OpenNavPanels() {
const deviceInfo = getDeviceInfo(); if (!isMobile()) {
if (deviceInfo && deviceInfo.device.type === 'desktop') {
//auto-open R nav if locked and previously open //auto-open R nav if locked and previously open
if (LoadLocalBool('NavLockOn') == true && LoadLocalBool('NavOpened') == true) { if (LoadLocalBool('NavLockOn') == true && LoadLocalBool('NavOpened') == true) {
//console.log("RA -- clicking right nav to open"); //console.log("RA -- clicking right nav to open");
@ -509,7 +489,7 @@ export function dragElement(elmnt) {
|| Number((String(target.height).replace('px', ''))) < 50 || Number((String(target.height).replace('px', ''))) < 50
|| Number((String(target.width).replace('px', ''))) < 50 || Number((String(target.width).replace('px', ''))) < 50
|| power_user.movingUI === false || power_user.movingUI === false
|| isMobile() === true || isMobile()
) { ) {
console.debug('aborting mutator'); console.debug('aborting mutator');
return; return;
@ -717,7 +697,7 @@ export function dragElement(elmnt) {
} }
export async function initMovingUI() { export async function initMovingUI() {
if (isMobile() === false && power_user.movingUI === true) { if (!isMobile() && power_user.movingUI === true) {
console.debug('START MOVING UI'); console.debug('START MOVING UI');
dragElement($('#sheld')); dragElement($('#sheld'));
dragElement($('#left-nav-panel')); dragElement($('#left-nav-panel'));
@ -903,7 +883,7 @@ export function initRossMods() {
const chatBlock = $('#chat'); const chatBlock = $('#chat');
const originalScrollBottom = chatBlock[0].scrollHeight - (chatBlock.scrollTop() + chatBlock.outerHeight()); const originalScrollBottom = chatBlock[0].scrollHeight - (chatBlock.scrollTop() + chatBlock.outerHeight());
this.style.height = window.getComputedStyle(this).getPropertyValue('min-height'); this.style.height = window.getComputedStyle(this).getPropertyValue('min-height');
this.style.height = this.scrollHeight + 0.1 + 'px'; this.style.height = this.scrollHeight + 0.3 + 'px';
if (!isFirefox) { if (!isFirefox) {
const newScrollTop = Math.round(chatBlock[0].scrollHeight - (chatBlock.outerHeight() + originalScrollBottom)); const newScrollTop = Math.round(chatBlock[0].scrollHeight - (chatBlock.outerHeight() + originalScrollBottom));
@ -1124,12 +1104,14 @@ export function initRossMods() {
.not('#left-nav-panel') .not('#left-nav-panel')
.not('#right-nav-panel') .not('#right-nav-panel')
.not('#floatingPrompt') .not('#floatingPrompt')
.not('#cfgConfig')
.is(':visible')) { .is(':visible')) {
let visibleDrawerContent = $('.drawer-content:visible') let visibleDrawerContent = $('.drawer-content:visible')
.not('#WorldInfo') .not('#WorldInfo')
.not('#left-nav-panel') .not('#left-nav-panel')
.not('#right-nav-panel') .not('#right-nav-panel')
.not('#floatingPrompt'); .not('#floatingPrompt')
.not('#cfgConfig');
$(visibleDrawerContent).parent().find('.drawer-icon').trigger('click'); $(visibleDrawerContent).parent().find('.drawer-icon').trigger('click');
return; return;
} }
@ -1144,6 +1126,11 @@ export function initRossMods() {
return; return;
} }
if ($('#cfgConfig').is(':visible')) {
$('#CFGClose').trigger('click');
return;
}
if ($('#left-nav-panel').is(':visible') && if ($('#left-nav-panel').is(':visible') &&
$(LPanelPin).prop('checked') === false) { $(LPanelPin).prop('checked') === false) {
$('#leftNavDrawerIcon').trigger('click'); $('#leftNavDrawerIcon').trigger('click');

View File

@ -1,4 +1,5 @@
import { import {
animation_duration,
chat_metadata, chat_metadata,
eventSource, eventSource,
event_types, event_types,
@ -312,7 +313,7 @@ export function setFloatingPrompt() {
} }
} }
} }
context.setExtensionPrompt(MODULE_NAME, prompt, chat_metadata[metadata_keys.position], chat_metadata[metadata_keys.depth]); context.setExtensionPrompt(MODULE_NAME, prompt, chat_metadata[metadata_keys.position], chat_metadata[metadata_keys.depth], extension_settings.note.allowWIScan);
$('#extension_floating_counter').text(shouldAddPrompt ? '0' : messagesTillInsertion); $('#extension_floating_counter').text(shouldAddPrompt ? '0' : messagesTillInsertion);
} }
@ -325,7 +326,7 @@ function onANMenuItemClick() {
$('#floatingPrompt').css('opacity', 0.0); $('#floatingPrompt').css('opacity', 0.0);
$('#floatingPrompt').transition({ $('#floatingPrompt').transition({
opacity: 1.0, opacity: 1.0,
duration: 250, duration: animation_duration,
}, async function () { }, async function () {
await delay(50); await delay(50);
$('#floatingPrompt').removeClass('resizing'); $('#floatingPrompt').removeClass('resizing');
@ -343,7 +344,7 @@ function onANMenuItemClick() {
$('#floatingPrompt').addClass('resizing'); $('#floatingPrompt').addClass('resizing');
$('#floatingPrompt').transition({ $('#floatingPrompt').transition({
opacity: 0.0, opacity: 0.0,
duration: 250, duration: animation_duration,
}, },
async function () { async function () {
await delay(50); await delay(50);
@ -351,12 +352,12 @@ function onANMenuItemClick() {
}); });
setTimeout(function () { setTimeout(function () {
$('#floatingPrompt').hide(); $('#floatingPrompt').hide();
}, 250); }, animation_duration);
} }
//duplicate options menu close handler from script.js //duplicate options menu close handler from script.js
//because this listener takes priority //because this listener takes priority
$('#options').stop().fadeOut(250); $('#options').stop().fadeOut(animation_duration);
} else { } else {
toastr.warning('Select a character before trying to use Author\'s Note', '', { timeOut: 2000 }); toastr.warning('Select a character before trying to use Author\'s Note', '', { timeOut: 2000 });
} }
@ -415,10 +416,10 @@ export function initAuthorsNote() {
$('#ANClose').on('click', function () { $('#ANClose').on('click', function () {
$('#floatingPrompt').transition({ $('#floatingPrompt').transition({
opacity: 0, opacity: 0,
duration: 200, duration: animation_duration,
easing: 'ease-in-out', easing: 'ease-in-out',
}); });
setTimeout(function () { $('#floatingPrompt').hide(); }, 200); setTimeout(function () { $('#floatingPrompt').hide(); }, animation_duration);
}); });
$('#option_toggle_AN').on('click', onANMenuItemClick); $('#option_toggle_AN').on('click', onANMenuItemClick);

View File

@ -1,4 +1,4 @@
import { callPopup, chat_metadata, eventSource, event_types, generateQuietPrompt, getCurrentChatId, getRequestHeaders, getThumbnailUrl } from '../script.js'; import { callPopup, chat_metadata, eventSource, event_types, generateQuietPrompt, getCurrentChatId, getRequestHeaders, getThumbnailUrl, saveSettingsDebounced } from '../script.js';
import { saveMetadataDebounced } from './extensions.js'; import { saveMetadataDebounced } from './extensions.js';
import { registerSlashCommand } from './slash-commands.js'; import { registerSlashCommand } from './slash-commands.js';
import { stringFormat } from './utils.js'; import { stringFormat } from './utils.js';
@ -6,6 +6,19 @@ import { stringFormat } from './utils.js';
const BG_METADATA_KEY = 'custom_background'; const BG_METADATA_KEY = 'custom_background';
const LIST_METADATA_KEY = 'chat_backgrounds'; const LIST_METADATA_KEY = 'chat_backgrounds';
export let background_settings = {
name: '__transparent.png',
url: generateUrlParameter('__transparent.png', false),
};
export function loadBackgroundSettings(settings) {
let backgroundSettings = settings.background;
if (!backgroundSettings || !backgroundSettings.name || !backgroundSettings.url) {
backgroundSettings = background_settings;
}
setBackground(backgroundSettings.name, backgroundSettings.url);
}
/** /**
* Sets the background for the current chat and adds it to the list of custom backgrounds. * Sets the background for the current chat and adds it to the list of custom backgrounds.
* @param {{url: string, path:string}} backgroundInfo * @param {{url: string, path:string}} backgroundInfo
@ -141,9 +154,8 @@ function onSelectBackgroundClick() {
saveBackgroundMetadata(relativeBgImage); saveBackgroundMetadata(relativeBgImage);
setCustomBackground(); setCustomBackground();
highlightLockedBackground(); highlightLockedBackground();
} else {
highlightLockedBackground();
} }
highlightLockedBackground();
const customBg = window.getComputedStyle(document.getElementById('bg_custom')).backgroundImage; const customBg = window.getComputedStyle(document.getElementById('bg_custom')).backgroundImage;
@ -157,8 +169,7 @@ function onSelectBackgroundClick() {
// Fetching to browser memory to reduce flicker // Fetching to browser memory to reduce flicker
fetch(backgroundUrl).then(() => { fetch(backgroundUrl).then(() => {
$('#bg1').css('background-image', relativeBgImage); setBackground(bgFile, relativeBgImage);
setBackground(bgFile);
}).catch(() => { }).catch(() => {
console.log('Background could not be set: ' + backgroundUrl); console.log('Background could not be set: ' + backgroundUrl);
}); });
@ -333,7 +344,7 @@ export async function getBackgrounds() {
'': '', '': '',
}), }),
}); });
if (response.ok === true) { if (response.ok) {
const getData = await response.json(); const getData = await response.json();
//background = getData; //background = getData;
//console.log(getData.length); //console.log(getData.length);
@ -346,7 +357,7 @@ export async function getBackgrounds() {
} }
/** /**
* Gets the URL of the background * Gets the CSS URL of the background
* @param {Element} block * @param {Element} block
* @returns {string} URL of the background * @returns {string} URL of the background
*/ */
@ -354,6 +365,10 @@ function getUrlParameter(block) {
return $(block).closest('.bg_example').data('url'); return $(block).closest('.bg_example').data('url');
} }
function generateUrlParameter(bg, isCustom) {
return isCustom ? `url("${encodeURI(bg)}")` : `url("${getBackgroundPath(bg)}")`;
}
/** /**
* Instantiates a background template * Instantiates a background template
* @param {string} bg Path to background * @param {string} bg Path to background
@ -363,7 +378,7 @@ function getUrlParameter(block) {
function getBackgroundFromTemplate(bg, isCustom) { function getBackgroundFromTemplate(bg, isCustom) {
const template = $('#background_template .bg_example').clone(); const template = $('#background_template .bg_example').clone();
const thumbPath = isCustom ? bg : getThumbnailUrl('bg', bg); const thumbPath = isCustom ? bg : getThumbnailUrl('bg', bg);
const url = isCustom ? `url("${encodeURI(bg)}")` : `url("${getBackgroundPath(bg)}")`; const url = generateUrlParameter(bg, isCustom);
const title = isCustom ? bg.split('/').pop() : bg; const title = isCustom ? bg.split('/').pop() : bg;
const friendlyTitle = title.slice(0, title.lastIndexOf('.')); const friendlyTitle = title.slice(0, title.lastIndexOf('.'));
template.attr('title', title); template.attr('title', title);
@ -375,26 +390,11 @@ function getBackgroundFromTemplate(bg, isCustom) {
return template; return template;
} }
async function setBackground(bg) { async function setBackground(bg, url) {
jQuery.ajax({ $('#bg1').css('background-image', url);
type: 'POST', // background_settings.name = bg;
url: '/api/backgrounds/set', // background_settings.url = url;
data: JSON.stringify({ saveSettingsDebounced();
bg: bg,
}),
beforeSend: function () {
},
cache: false,
dataType: 'json',
contentType: 'application/json',
//processData: false,
success: function (html) { },
error: function (jqXHR, exception) {
console.log(exception);
console.log(jqXHR);
},
});
} }
async function delBackground(bg) { async function delBackground(bg) {
@ -435,8 +435,7 @@ function uploadBackground(formData) {
contentType: false, contentType: false,
processData: false, processData: false,
success: async function (bg) { success: async function (bg) {
setBackground(bg); setBackground(bg, generateUrlParameter(bg, false));
$('#bg1').css('background-image', `url("${getBackgroundPath(bg)}"`);
await getBackgrounds(); await getBackgrounds();
highlightNewBackground(bg); highlightNewBackground(bg);
}, },

View File

@ -5,6 +5,7 @@ import {
eventSource, eventSource,
event_types, event_types,
saveSettingsDebounced, saveSettingsDebounced,
animation_duration,
} from '../script.js'; } from '../script.js';
import { extension_settings, saveMetadataDebounced } from './extensions.js'; import { extension_settings, saveMetadataDebounced } from './extensions.js';
import { selected_group } from './group-chats.js'; import { selected_group } from './group-chats.js';
@ -120,7 +121,7 @@ function onCfgMenuItemClick() {
$('#cfgConfig').css('opacity', 0.0); $('#cfgConfig').css('opacity', 0.0);
$('#cfgConfig').transition({ $('#cfgConfig').transition({
opacity: 1.0, opacity: 1.0,
duration: 250, duration: animation_duration,
}, async function () { }, async function () {
await delay(50); await delay(50);
$('#cfgConfig').removeClass('resizing'); $('#cfgConfig').removeClass('resizing');
@ -138,7 +139,7 @@ function onCfgMenuItemClick() {
$('#cfgConfig').addClass('resizing'); $('#cfgConfig').addClass('resizing');
$('#cfgConfig').transition({ $('#cfgConfig').transition({
opacity: 0.0, opacity: 0.0,
duration: 250, duration: animation_duration,
}, },
async function () { async function () {
await delay(50); await delay(50);
@ -146,12 +147,12 @@ function onCfgMenuItemClick() {
}); });
setTimeout(function () { setTimeout(function () {
$('#cfgConfig').hide(); $('#cfgConfig').hide();
}, 250); }, animation_duration);
} }
//duplicate options menu close handler from script.js //duplicate options menu close handler from script.js
//because this listener takes priority //because this listener takes priority
$('#options').stop().fadeOut(250); $('#options').stop().fadeOut(animation_duration);
} else { } else {
toastr.warning('Select a character before trying to configure CFG', '', { timeOut: 2000 }); toastr.warning('Select a character before trying to configure CFG', '', { timeOut: 2000 });
} }
@ -281,10 +282,10 @@ export function initCfg() {
$('#CFGClose').on('click', function () { $('#CFGClose').on('click', function () {
$('#cfgConfig').transition({ $('#cfgConfig').transition({
opacity: 0, opacity: 0,
duration: 200, duration: animation_duration,
easing: 'ease-in-out', easing: 'ease-in-out',
}); });
setTimeout(function () { $('#cfgConfig').hide(); }, 200); setTimeout(function () { $('#cfgConfig').hide(); }, animation_duration);
}); });
$('#chat_cfg_guidance_scale').on('input', function() { $('#chat_cfg_guidance_scale').on('input', function() {

View File

@ -341,6 +341,25 @@ function embedMessageFile(messageId, messageBlock) {
} }
} }
/**
* Appends file content to the message text.
* @param {object} message Message object
* @param {string} messageText Message text
* @returns {Promise<string>} Message text with file content appended.
*/
export async function appendFileContent(message, messageText) {
if (message.extra?.file) {
const fileText = message.extra.file.text || (await getFileAttachment(message.extra.file.url));
if (fileText) {
const fileWrapped = `\`\`\`\n${fileText}\n\`\`\`\n\n`;
message.extra.fileLength = fileWrapped.length;
messageText = fileWrapped + messageText;
}
}
return messageText;
}
jQuery(function () { jQuery(function () {
$(document).on('click', '.mes_hide', async function () { $(document).on('click', '.mes_hide', async function () {
const messageBlock = $(this).closest('.mes'); const messageBlock = $(this).closest('.mes');
@ -380,6 +399,7 @@ jQuery(function () {
$(document).on('click', '.editor_maximize', function () { $(document).on('click', '.editor_maximize', function () {
const broId = $(this).attr('data-for'); const broId = $(this).attr('data-for');
const bro = $(`#${broId}`); const bro = $(`#${broId}`);
const withTab = $(this).attr('data-tab');
if (!bro.length) { if (!bro.length) {
console.error('Could not find editor with id', broId); console.error('Could not find editor with id', broId);
@ -392,11 +412,41 @@ jQuery(function () {
const textarea = document.createElement('textarea'); const textarea = document.createElement('textarea');
textarea.value = String(bro.val()); textarea.value = String(bro.val());
textarea.classList.add('height100p', 'wide100p'); textarea.classList.add('height100p', 'wide100p');
textarea.oninput = function () { textarea.addEventListener('input', function () {
bro.val(textarea.value).trigger('input'); bro.val(textarea.value).trigger('input');
}; });
wrapper.appendChild(textarea); wrapper.appendChild(textarea);
if (withTab) {
textarea.addEventListener('keydown', (evt) => {
if (evt.key == 'Tab' && !evt.shiftKey && !evt.ctrlKey && !evt.altKey) {
evt.preventDefault();
const start = textarea.selectionStart;
const end = textarea.selectionEnd;
if (end - start > 0 && textarea.value.substring(start, end).includes('\n')) {
const lineStart = textarea.value.lastIndexOf('\n', start);
const count = textarea.value.substring(lineStart, end).split('\n').length - 1;
textarea.value = `${textarea.value.substring(0, lineStart)}${textarea.value.substring(lineStart, end).replace(/\n/g, '\n\t')}${textarea.value.substring(end)}`;
textarea.selectionStart = start + 1;
textarea.selectionEnd = end + count;
} else {
textarea.value = `${textarea.value.substring(0, start)}\t${textarea.value.substring(end)}`;
textarea.selectionStart = start + 1;
textarea.selectionEnd = end + 1;
}
} else if (evt.key == 'Tab' && evt.shiftKey && !evt.ctrlKey && !evt.altKey) {
evt.preventDefault();
const start = textarea.selectionStart;
const end = textarea.selectionEnd;
const lineStart = textarea.value.lastIndexOf('\n', start);
const count = textarea.value.substring(lineStart, end).split('\n\t').length - 1;
textarea.value = `${textarea.value.substring(0, lineStart)}${textarea.value.substring(lineStart, end).replace(/\n\t/g, '\n')}${textarea.value.substring(end)}`;
textarea.selectionStart = start - 1;
textarea.selectionEnd = end - count;
}
});
}
callPopup(wrapper, 'text', '', { wide: true, large: true }); callPopup(wrapper, 'text', '', { wide: true, large: true });
}); });

View File

@ -879,7 +879,7 @@ async function runGenerationInterceptors(chat, contextSize) {
exitImmediately = immediately; exitImmediately = immediately;
}; };
for (const manifest of Object.values(manifests)) { for (const manifest of Object.values(manifests).sort((a, b) => a.loading_order - b.loading_order)) {
const interceptorKey = manifest.generate_interceptor; const interceptorKey = manifest.generate_interceptor;
if (typeof window[interceptorKey] === 'function') { if (typeof window[interceptorKey] === 'function') {
try { try {

View File

@ -134,7 +134,7 @@ async function doCaptionRequest(base64Img, fileData) {
case 'horde': case 'horde':
return await captionHorde(base64Img); return await captionHorde(base64Img);
case 'multimodal': case 'multimodal':
return await captionMultimodal(fileData); return await captionMultimodal(extension_settings.caption.multimodal_api === 'google' ? base64Img : fileData);
default: default:
throw new Error('Unknown caption source.'); throw new Error('Unknown caption source.');
} }
@ -273,6 +273,7 @@ jQuery(function () {
(modules.includes('caption') && extension_settings.caption.source === 'extras') || (modules.includes('caption') && extension_settings.caption.source === 'extras') ||
(extension_settings.caption.source === 'multimodal' && extension_settings.caption.multimodal_api === 'openai' && secret_state[SECRET_KEYS.OPENAI]) || (extension_settings.caption.source === 'multimodal' && extension_settings.caption.multimodal_api === 'openai' && secret_state[SECRET_KEYS.OPENAI]) ||
(extension_settings.caption.source === 'multimodal' && extension_settings.caption.multimodal_api === 'openrouter' && secret_state[SECRET_KEYS.OPENROUTER]) || (extension_settings.caption.source === 'multimodal' && extension_settings.caption.multimodal_api === 'openrouter' && secret_state[SECRET_KEYS.OPENROUTER]) ||
(extension_settings.caption.source === 'multimodal' && extension_settings.caption.multimodal_api === 'google' && secret_state[SECRET_KEYS.MAKERSUITE]) ||
extension_settings.caption.source === 'local' || extension_settings.caption.source === 'local' ||
extension_settings.caption.source === 'horde'; extension_settings.caption.source === 'horde';
@ -328,7 +329,7 @@ jQuery(function () {
<label for="caption_source">Source</label> <label for="caption_source">Source</label>
<select id="caption_source" class="text_pole"> <select id="caption_source" class="text_pole">
<option value="local">Local</option> <option value="local">Local</option>
<option value="multimodal">Multimodal (OpenAI / OpenRouter)</option> <option value="multimodal">Multimodal (OpenAI / OpenRouter / Google)</option>
<option value="extras">Extras</option> <option value="extras">Extras</option>
<option value="horde">Horde</option> <option value="horde">Horde</option>
</select> </select>
@ -338,12 +339,14 @@ jQuery(function () {
<select id="caption_multimodal_api" class="flex1 text_pole"> <select id="caption_multimodal_api" class="flex1 text_pole">
<option value="openai">OpenAI</option> <option value="openai">OpenAI</option>
<option value="openrouter">OpenRouter</option> <option value="openrouter">OpenRouter</option>
<option value="google">Google</option>
</select> </select>
</div> </div>
<div class="flex1 flex-container flexFlowColumn flexNoGap"> <div class="flex1 flex-container flexFlowColumn flexNoGap">
<label for="caption_multimodal_model">Model</label> <label for="caption_multimodal_model">Model</label>
<select id="caption_multimodal_model" class="flex1 text_pole"> <select id="caption_multimodal_model" class="flex1 text_pole">
<option data-type="openai" value="gpt-4-vision-preview">gpt-4-vision-preview</option> <option data-type="openai" value="gpt-4-vision-preview">gpt-4-vision-preview</option>
<option data-type="google" value="gemini-pro-vision">gemini-pro-vision</option>
<option data-type="openrouter" value="openai/gpt-4-vision-preview">openai/gpt-4-vision-preview</option> <option data-type="openrouter" value="openai/gpt-4-vision-preview">openai/gpt-4-vision-preview</option>
<option data-type="openrouter" value="haotian-liu/llava-13b">haotian-liu/llava-13b</option> <option data-type="openrouter" value="haotian-liu/llava-13b">haotian-liu/llava-13b</option>
</select> </select>

View File

@ -1,6 +1,6 @@
import { getStringHash, debounce, waitUntilCondition, extractAllWords } from '../../utils.js'; import { getStringHash, debounce, waitUntilCondition, extractAllWords } from '../../utils.js';
import { getContext, getApiUrl, extension_settings, doExtrasFetch, modules } from '../../extensions.js'; import { getContext, getApiUrl, extension_settings, doExtrasFetch, modules } from '../../extensions.js';
import { eventSource, event_types, extension_prompt_types, generateQuietPrompt, is_send_press, saveSettingsDebounced, substituteParams } from '../../../script.js'; import { animation_duration, eventSource, event_types, extension_prompt_types, generateQuietPrompt, is_send_press, saveSettingsDebounced, substituteParams } from '../../../script.js';
import { is_group_generating, selected_group } from '../../group-chats.js'; import { is_group_generating, selected_group } from '../../group-chats.js';
import { registerSlashCommand } from '../../slash-commands.js'; import { registerSlashCommand } from '../../slash-commands.js';
import { loadMovingUIState } from '../../power-user.js'; import { loadMovingUIState } from '../../power-user.js';
@ -109,16 +109,21 @@ function loadSettings() {
$('#memory_depth').val(extension_settings.memory.depth).trigger('input'); $('#memory_depth').val(extension_settings.memory.depth).trigger('input');
$(`input[name="memory_position"][value="${extension_settings.memory.position}"]`).prop('checked', true).trigger('input'); $(`input[name="memory_position"][value="${extension_settings.memory.position}"]`).prop('checked', true).trigger('input');
$('#memory_prompt_words_force').val(extension_settings.memory.promptForceWords).trigger('input'); $('#memory_prompt_words_force').val(extension_settings.memory.promptForceWords).trigger('input');
switchSourceControls(extension_settings.memory.source);
} }
function onSummarySourceChange(event) { function onSummarySourceChange(event) {
const value = event.target.value; const value = event.target.value;
extension_settings.memory.source = value; extension_settings.memory.source = value;
switchSourceControls(value);
saveSettingsDebounced();
}
function switchSourceControls(value) {
$('#memory_settings [data-source]').each((_, element) => { $('#memory_settings [data-source]').each((_, element) => {
const source = $(element).data('source'); const source = $(element).data('source');
$(element).toggle(source === value); $(element).toggle(source === value);
}); });
saveSettingsDebounced();
} }
function onMemoryShortInput() { function onMemoryShortInput() {
@ -317,6 +322,11 @@ async function onChatEvent() {
} }
async function forceSummarizeChat() { async function forceSummarizeChat() {
if (extension_settings.memory.source === summary_sources.extras) {
toastr.warning('Force summarization is not supported for Extras API');
return;
}
const context = getContext(); const context = getContext();
const skipWIAN = extension_settings.memory.SkipWIAN; const skipWIAN = extension_settings.memory.SkipWIAN;
@ -589,14 +599,14 @@ function doPopout(e) {
loadSettings(); loadSettings();
loadMovingUIState(); loadMovingUIState();
$('#summaryExtensionPopout').fadeIn(250); $('#summaryExtensionPopout').fadeIn(animation_duration);
dragElement(newElement); dragElement(newElement);
//setup listener for close button to restore extensions menu //setup listener for close button to restore extensions menu
$('#summaryExtensionPopoutClose').off('click').on('click', function () { $('#summaryExtensionPopoutClose').off('click').on('click', function () {
$('#summaryExtensionDrawerContents').removeClass('scrollableInnerFull'); $('#summaryExtensionDrawerContents').removeClass('scrollableInnerFull');
const summaryPopoutHTML = $('#summaryExtensionDrawerContents'); const summaryPopoutHTML = $('#summaryExtensionDrawerContents');
$('#summaryExtensionPopout').fadeOut(250, () => { $('#summaryExtensionPopout').fadeOut(animation_duration, () => {
originalElement.empty(); originalElement.empty();
originalElement.html(summaryPopoutHTML); originalElement.html(summaryPopoutHTML);
$('#summaryExtensionPopout').remove(); $('#summaryExtensionPopout').remove();
@ -605,7 +615,7 @@ function doPopout(e) {
}); });
} else { } else {
console.debug('saw existing popout, removing'); console.debug('saw existing popout, removing');
$('#summaryExtensionPopout').fadeOut(250, () => { $('#summaryExtensionPopoutClose').trigger('click'); }); $('#summaryExtensionPopout').fadeOut(animation_duration, () => { $('#summaryExtensionPopoutClose').trigger('click'); });
} }
} }
@ -659,7 +669,7 @@ jQuery(function () {
<textarea id="memory_contents" class="text_pole textarea_compact" rows="6" placeholder="Summary will be generated here..."></textarea> <textarea id="memory_contents" class="text_pole textarea_compact" rows="6" placeholder="Summary will be generated here..."></textarea>
<div class="memory_contents_controls"> <div class="memory_contents_controls">
<div id="memory_force_summarize" class="menu_button menu_button_icon"> <div id="memory_force_summarize" data-source="main" class="menu_button menu_button_icon">
<i class="fa-solid fa-database"></i> <i class="fa-solid fa-database"></i>
<span>Summarize now</span> <span>Summarize now</span>
</div> </div>

View File

@ -1,6 +1,6 @@
import { saveSettingsDebounced, callPopup, getRequestHeaders, substituteParams, eventSource, event_types } from '../../../script.js'; import { saveSettingsDebounced, callPopup, getRequestHeaders, substituteParams, eventSource, event_types, animation_duration } from '../../../script.js';
import { getContext, extension_settings } from '../../extensions.js'; import { getContext, extension_settings } from '../../extensions.js';
import { getSortableDelay, escapeHtml } from '../../utils.js'; import { getSortableDelay, escapeHtml, delay } from '../../utils.js';
import { executeSlashCommands, registerSlashCommand } from '../../slash-commands.js'; import { executeSlashCommands, registerSlashCommand } from '../../slash-commands.js';
import { ContextMenu } from './src/ContextMenu.js'; import { ContextMenu } from './src/ContextMenu.js';
import { MenuItem } from './src/MenuItem.js'; import { MenuItem } from './src/MenuItem.js';
@ -26,7 +26,7 @@ const defaultSettings = {
//method from worldinfo //method from worldinfo
async function updateQuickReplyPresetList() { async function updateQuickReplyPresetList() {
const result = await fetch('/getsettings', { const result = await fetch('/api/settings/get', {
method: 'POST', method: 'POST',
headers: getRequestHeaders(), headers: getRequestHeaders(),
body: JSON.stringify({}), body: JSON.stringify({}),
@ -388,7 +388,7 @@ async function doQuickReplyBarPopout() {
}); });
loadMovingUIState(); loadMovingUIState();
$('#quickReplyBarPopout').fadeIn(250); $('#quickReplyBarPopout').fadeIn(animation_duration);
dragElement(newElement); dragElement(newElement);
$('#quickReplyBarPopoutClose').off('click').on('click', function () { $('#quickReplyBarPopoutClose').off('click').on('click', function () {
@ -396,8 +396,8 @@ async function doQuickReplyBarPopout() {
let quickRepliesClone = $('#quickReplies').html(); let quickRepliesClone = $('#quickReplies').html();
$('#quickReplyBar').append(newQuickRepliesDiv); $('#quickReplyBar').append(newQuickRepliesDiv);
$('#quickReplies').prepend(quickRepliesClone); $('#quickReplies').prepend(quickRepliesClone);
$('#quickReplyBar').append(popoutButtonClone).fadeIn(250); $('#quickReplyBar').append(popoutButtonClone).fadeIn(animation_duration);
$('#quickReplyBarPopout').fadeOut(250, () => { $('#quickReplyBarPopout').remove(); }); $('#quickReplyBarPopout').fadeOut(animation_duration, () => { $('#quickReplyBarPopout').remove(); });
$('.quickReplyButton').on('click', function () { $('.quickReplyButton').on('click', function () {
let index = $(this).data('index'); let index = $(this).data('index');
sendQuickReply(index); sendQuickReply(index);
@ -639,7 +639,7 @@ function generateQuickReplyElements() {
<span class="drag-handle ui-sortable-handle">☰</span> <span class="drag-handle ui-sortable-handle">☰</span>
<input class="text_pole wide30p" id="quickReply${i}Label" placeholder="(Button label)"> <input class="text_pole wide30p" id="quickReply${i}Label" placeholder="(Button label)">
<span class="menu_button menu_button_icon" id="quickReply${i}CtxButton" title="Additional options: context menu, auto-execution">⋮</span> <span class="menu_button menu_button_icon" id="quickReply${i}CtxButton" title="Additional options: context menu, auto-execution">⋮</span>
<span class="menu_button menu_button_icon editor_maximize fa-solid fa-maximize" data-for="quickReply${i}Mes" id="quickReply${i}ExpandButton" title="Expand the editor"></span> <span class="menu_button menu_button_icon editor_maximize fa-solid fa-maximize" data-tab="true" data-for="quickReply${i}Mes" id="quickReply${i}ExpandButton" title="Expand the editor"></span>
<textarea id="quickReply${i}Mes" placeholder="(Custom message or /command)" class="text_pole widthUnset flex1" rows="2"></textarea> <textarea id="quickReply${i}Mes" placeholder="(Custom message or /command)" class="text_pole widthUnset flex1" rows="2"></textarea>
</div> </div>
`; `;
@ -717,6 +717,218 @@ function saveQROrder() {
}); });
} }
async function qrCreateCallback(args, mes) {
const qr = {
label: args.label ?? '',
mes: (mes ?? '')
.replace(/\\\|/g, '|')
.replace(/\\\{/g, '{')
.replace(/\\\}/g, '}')
,
title: args.title ?? '',
autoExecute_chatLoad: JSON.parse(args.load ?? false),
autoExecute_userMessage: JSON.parse(args.user ?? false),
autoExecute_botMessage: JSON.parse(args.bot ?? false),
autoExecute_appStartup: JSON.parse(args.startup ?? false),
hidden: JSON.parse(args.hidden ?? false),
};
const setName = args.set ?? selected_preset;
const preset = presets.find(x => x.name == setName);
if (!preset) {
toastr.warning('Confirm you are using proper case sensitivity!', `QR preset '${setName}' not found`);
return '';
}
preset.quickReplySlots.push(qr);
preset.numberOfSlots++;
await fetch('/savequickreply', {
method: 'POST',
headers: getRequestHeaders(),
body: JSON.stringify(preset),
});
saveSettingsDebounced();
await delay(400);
applyQuickReplyPreset(selected_preset);
return '';
}
async function qrUpdateCallback(args, mes) {
const setName = args.set ?? selected_preset;
const preset = presets.find(x => x.name == setName);
if (!preset) {
toastr.warning('Confirm you are using proper case sensitivity!', `QR preset '${setName}' not found`);
return '';
}
const idx = preset.quickReplySlots.findIndex(x => x.label == args.label);
const oqr = preset.quickReplySlots[idx];
const qr = {
label: args.newlabel ?? oqr.label ?? '',
mes: (mes ?? oqr.mes)
.replace('\\|', '|')
.replace('\\{', '{')
.replace('\\}', '}')
,
title: args.title ?? oqr.title ?? '',
autoExecute_chatLoad: JSON.parse(args.load ?? oqr.autoExecute_chatLoad ?? false),
autoExecute_userMessage: JSON.parse(args.user ?? oqr.autoExecute_userMessage ?? false),
autoExecute_botMessage: JSON.parse(args.bot ?? oqr.autoExecute_botMessage ?? false),
autoExecute_appStartup: JSON.parse(args.startup ?? oqr.autoExecute_appStartup ?? false),
hidden: JSON.parse(args.hidden ?? oqr.hidden ?? false),
};
preset.quickReplySlots[idx] = qr;
await fetch('/savequickreply', {
method: 'POST',
headers: getRequestHeaders(),
body: JSON.stringify(preset),
});
saveSettingsDebounced();
await delay(400);
applyQuickReplyPreset(selected_preset);
return '';
}
async function qrDeleteCallback(args, label) {
const setName = args.set ?? selected_preset;
const preset = presets.find(x => x.name == setName);
if (!preset) {
toastr.warning('Confirm you are using proper case sensitivity!', `QR preset '${setName}' not found`);
return '';
}
const idx = preset.quickReplySlots.findIndex(x => x.label == label);
preset.quickReplySlots.splice(idx, 1);
preset.numberOfSlots--;
await fetch('/savequickreply', {
method: 'POST',
headers: getRequestHeaders(),
body: JSON.stringify(preset),
});
saveSettingsDebounced();
await delay(400);
applyQuickReplyPreset(selected_preset);
return '';
}
async function qrContextAddCallback(args, presetName) {
const setName = args.set ?? selected_preset;
const preset = presets.find(x => x.name == setName);
if (!preset) {
toastr.warning('Confirm you are using proper case sensitivity!', `QR preset '${setName}' not found`);
return '';
}
const idx = preset.quickReplySlots.findIndex(x => x.label == args.label);
const oqr = preset.quickReplySlots[idx];
if (!oqr.contextMenu) {
oqr.contextMenu = [];
}
let item = oqr.contextMenu.find(it => it.preset == presetName);
if (item) {
item.chain = JSON.parse(args.chain ?? 'null') ?? item.chain ?? false;
} else {
oqr.contextMenu.push({ preset: presetName, chain: JSON.parse(args.chain ?? 'false') });
}
await fetch('/savequickreply', {
method: 'POST',
headers: getRequestHeaders(),
body: JSON.stringify(preset),
});
saveSettingsDebounced();
await delay(400);
applyQuickReplyPreset(selected_preset);
return '';
}
async function qrContextDeleteCallback(args, presetName) {
const setName = args.set ?? selected_preset;
const preset = presets.find(x => x.name == setName);
if (!preset) {
toastr.warning('Confirm you are using proper case sensitivity!', `QR preset '${setName}' not found`);
return '';
}
const idx = preset.quickReplySlots.findIndex(x => x.label == args.label);
const oqr = preset.quickReplySlots[idx];
if (!oqr.contextMenu) return;
const ctxIdx = oqr.contextMenu.findIndex(it => it.preset == presetName);
if (ctxIdx > -1) {
oqr.contextMenu.splice(ctxIdx, 1);
}
await fetch('/savequickreply', {
method: 'POST',
headers: getRequestHeaders(),
body: JSON.stringify(preset),
});
saveSettingsDebounced();
await delay(400);
applyQuickReplyPreset(selected_preset);
return '';
}
async function qrContextClearCallback(args, label) {
const setName = args.set ?? selected_preset;
const preset = presets.find(x => x.name == setName);
if (!preset) {
toastr.warning('Confirm you are using proper case sensitivity!', `QR preset '${setName}' not found`);
return '';
}
const idx = preset.quickReplySlots.findIndex(x => x.label == label);
const oqr = preset.quickReplySlots[idx];
oqr.contextMenu = [];
await fetch('/savequickreply', {
method: 'POST',
headers: getRequestHeaders(),
body: JSON.stringify(preset),
});
saveSettingsDebounced();
await delay(400);
applyQuickReplyPreset(selected_preset);
return '';
}
async function qrPresetAddCallback(args, name) {
const quickReplyPreset = {
name: name,
quickReplyEnabled: JSON.parse(args.enabled ?? null) ?? true,
quickActionEnabled: JSON.parse(args.nosend ?? null) ?? false,
placeBeforeInputEnabled: JSON.parse(args.before ?? null) ?? false,
quickReplySlots: [],
numberOfSlots: Number(args.slots ?? '0'),
AutoInputInject: JSON.parse(args.inject ?? 'false'),
};
await fetch('/savequickreply', {
method: 'POST',
headers: getRequestHeaders(),
body: JSON.stringify(quickReplyPreset),
});
await updateQuickReplyPresetList();
}
async function qrPresetUpdateCallback(args, name) {
const preset = presets.find(it => it.name == name);
const quickReplyPreset = {
name: preset.name,
quickReplyEnabled: JSON.parse(args.enabled ?? null) ?? preset.quickReplyEnabled,
quickActionEnabled: JSON.parse(args.nosend ?? null) ?? preset.quickActionEnabled,
placeBeforeInputEnabled: JSON.parse(args.before ?? null) ?? preset.placeBeforeInputEnabled,
quickReplySlots: preset.quickReplySlots,
numberOfSlots: Number(args.slots ?? preset.numberOfSlots),
AutoInputInject: JSON.parse(args.inject ?? 'null') ?? preset.AutoInputInject,
};
Object.assign(preset, quickReplyPreset);
await fetch('/savequickreply', {
method: 'POST',
headers: getRequestHeaders(),
body: JSON.stringify(quickReplyPreset),
});
}
let onMessageSentExecuting = false; let onMessageSentExecuting = false;
let onMessageReceivedExecuting = false; let onMessageReceivedExecuting = false;
let onChatChangedExecuting = false; let onChatChangedExecuting = false;
@ -901,4 +1113,33 @@ jQuery(async () => {
jQuery(() => { jQuery(() => {
registerSlashCommand('qr', doQR, [], '<span class="monospace">(number)</span> activates the specified Quick Reply', true, true); registerSlashCommand('qr', doQR, [], '<span class="monospace">(number)</span> activates the specified Quick Reply', true, true);
registerSlashCommand('qrset', doQRPresetSwitch, [], '<span class="monospace">(name)</span> swaps to the specified Quick Reply Preset', true, true); registerSlashCommand('qrset', doQRPresetSwitch, [], '<span class="monospace">(name)</span> swaps to the specified Quick Reply Preset', true, true);
const qrArgs = `
label - string - text on the button, e.g., label=MyButton
set - string - name of the QR set, e.g., set=PresetName1
hidden - bool - whether the button should be hidden, e.g., hidden=true
startup - bool - auto execute on app startup, e.g., startup=true
user - bool - auto execute on user message, e.g., user=true
bot - bool - auto execute on AI message, e.g., bot=true
load - bool - auto execute on chat load, e.g., load=true
title - bool - title / tooltip to be shown on button, e.g., title="My Fancy Button"
`.trim();
const qrUpdateArgs = `
newlabel - string - new text fort the button, e.g. newlabel=MyRenamedButton
${qrArgs}
`.trim();
registerSlashCommand('qr-create', qrCreateCallback, [], `<span class="monospace" style="white-space:pre-line;">(arguments [message])\n arguments:\n ${qrArgs}</span> creates a new Quick Reply, example: <tt>/qr-create set=MyPreset label=MyButton /echo 123</tt>`, true, true);
registerSlashCommand('qr-update', qrUpdateCallback, [], `<span class="monospace" style="white-space:pre-line;">(arguments [message])\n arguments:\n ${qrUpdateArgs}</span> updates Quick Reply, example: <tt>/qr-update set=MyPreset label=MyButton newlabel=MyRenamedButton /echo 123</tt>`, true, true);
registerSlashCommand('qr-delete', qrDeleteCallback, [], '<span class="monospace">(set=string [label])</span> deletes Quick Reply', true, true);
registerSlashCommand('qr-contextadd', qrContextAddCallback, [], '<span class="monospace">(set=string label=string chain=bool [preset name])</span> add context menu preset to a QR, example: <tt>/qr-contextadd set=MyPreset label=MyButton chain=true MyOtherPreset</tt>', true, true);
registerSlashCommand('qr-contextdel', qrContextDeleteCallback, [], '<span class="monospace">(set=string label=string [preset name])</span> remove context menu preset from a QR, example: <tt>/qr-contextdel set=MyPreset label=MyButton MyOtherPreset</tt>', true, true);
registerSlashCommand('qr-contextclear', qrContextClearCallback, [], '<span class="monospace">(set=string [label])</span> remove all context menu presets from a QR, example: <tt>/qr-contextclear set=MyPreset MyButton</tt>', true, true);
const presetArgs = `
enabled - bool - enable or disable the preset
nosend - bool - disable send / insert in user input (invalid for slash commands)
before - bool - place QR before user input
slots - int - number of slots
inject - bool - inject user input automatically (if disabled use {{input}})
`.trim();
registerSlashCommand('qr-presetadd', qrPresetAddCallback, [], `<span class="monospace" style="white-space:pre-line;">(arguments [label])\n arguments:\n ${presetArgs}</span> create a new preset (overrides existing ones), example: <tt>/qr-presetadd slots=3 MyNewPreset</tt>`, true, true);
registerSlashCommand('qr-presetupdate', qrPresetUpdateCallback, [], `<span class="monospace" style="white-space:pre-line;">(arguments [label])\n arguments:\n ${presetArgs}</span> update an existing preset, example: <tt>/qr-presetupdate enabled=false MyPreset</tt>`, true, true);
}); });

View File

@ -18,22 +18,35 @@ export async function getMultimodalCaption(base64Img, prompt) {
throw new Error('OpenRouter API key is not set.'); throw new Error('OpenRouter API key is not set.');
} }
// OpenRouter has a payload limit of ~2MB if (extension_settings.caption.multimodal_api === 'google' && !secret_state[SECRET_KEYS.MAKERSUITE]) {
const base64Bytes = base64Img.length * 0.75; throw new Error('MakerSuite API key is not set.');
const compressionLimit = 2 * 1024 * 1024;
if (extension_settings.caption.multimodal_api === 'openrouter' && base64Bytes > compressionLimit) {
const maxSide = 1024;
base64Img = await createThumbnail(base64Img, maxSide, maxSide, 'image/jpeg');
} }
const apiResult = await fetch('/api/openai/caption-image', { // OpenRouter has a payload limit of ~2MB. Google is 4MB, but we love democracy.
const isGoogle = extension_settings.caption.multimodal_api === 'google';
const base64Bytes = base64Img.length * 0.75;
const compressionLimit = 2 * 1024 * 1024;
if (['google', 'openrouter'].includes(extension_settings.caption.multimodal_api) && base64Bytes > compressionLimit) {
const maxSide = 1024;
base64Img = await createThumbnail(base64Img, maxSide, maxSide, 'image/jpeg');
if (isGoogle) {
base64Img = base64Img.split(',')[1];
}
}
const apiResult = await fetch(`/api/${isGoogle ? 'google' : 'openai'}/caption-image`, {
method: 'POST', method: 'POST',
headers: getRequestHeaders(), headers: getRequestHeaders(),
body: JSON.stringify({ body: JSON.stringify({
image: base64Img, image: base64Img,
prompt: prompt, prompt: prompt,
api: extension_settings.caption.multimodal_api || 'openai', ...(isGoogle
model: extension_settings.caption.multimodal_model || 'gpt-4-vision-preview', ? {}
: {
api: extension_settings.caption.multimodal_api || 'openai',
model: extension_settings.caption.multimodal_model || 'gpt-4-vision-preview',
}),
}), }),
}); });

View File

@ -1711,7 +1711,7 @@ async function getPrompt(generationType, message, trigger, quietPrompt) {
prompt = message || getRawLastMessage(); prompt = message || getRawLastMessage();
break; break;
case generationMode.FREE: case generationMode.FREE:
prompt = trigger.trim(); prompt = generateFreeModePrompt(trigger.trim());
break; break;
case generationMode.FACE_MULTIMODAL: case generationMode.FACE_MULTIMODAL:
case generationMode.CHARACTER_MULTIMODAL: case generationMode.CHARACTER_MULTIMODAL:
@ -1730,6 +1730,36 @@ async function getPrompt(generationType, message, trigger, quietPrompt) {
return prompt; return prompt;
} }
/**
* Generates a free prompt with a character-specific prompt prefix support.
* @param {string} trigger - The prompt to use for the image generation.
* @returns {string}
*/
function generateFreeModePrompt(trigger) {
return trigger
.replace(/(?:^char(\s|,)|\{\{charPrefix\}\})/gi, (_, suffix) => {
const getLastCharacterKey = () => {
if (typeof this_chid !== 'undefined') {
return getCharaFilename(this_chid);
}
const context = getContext();
for (let i = context.chat.length - 1; i >= 0; i--) {
const message = context.chat[i];
if (message.is_user || message.is_system) {
continue;
} else if (typeof message.original_avatar === 'string') {
return message.original_avatar.replace(/\.[^/.]+$/, '');
}
}
throw new Error('No usable messages found.');
};
const key = getLastCharacterKey();
const value = (extension_settings.sd.character_prompts[key] || '').trim();
return value ? value + (suffix || '') : '';
});
}
/** /**
* Generates a prompt using multimodal captioning. * Generates a prompt using multimodal captioning.
* @param {number} generationType - The type of image generation to perform. * @param {number} generationType - The type of image generation to perform.
@ -1756,22 +1786,28 @@ async function generateMultimodalPrompt(generationType, quietPrompt) {
} }
} }
const response = await fetch(avatarUrl); try {
const response = await fetch(avatarUrl);
if (!response.ok) { if (!response.ok) {
throw new Error('Could not fetch avatar image.'); throw new Error('Could not fetch avatar image.');
} }
const avatarBlob = await response.blob(); const avatarBlob = await response.blob();
const avatarBase64 = await getBase64Async(avatarBlob); const avatarBase64 = await getBase64Async(avatarBlob);
const caption = await getMultimodalCaption(avatarBase64, quietPrompt); const caption = await getMultimodalCaption(avatarBase64, quietPrompt);
if (!caption) { if (!caption) {
throw new Error('No caption returned from the API.');
}
return caption;
} catch (error) {
console.error(error);
toastr.error('Multimodal captioning failed. Please try again.', 'Image Generation');
throw new Error('Multimodal captioning failed.'); throw new Error('Multimodal captioning failed.');
} }
return caption;
} }
/** /**
@ -1781,7 +1817,14 @@ async function generateMultimodalPrompt(generationType, quietPrompt) {
*/ */
async function generatePrompt(quietPrompt) { async function generatePrompt(quietPrompt) {
const reply = await generateQuietPrompt(quietPrompt, false, false); const reply = await generateQuietPrompt(quietPrompt, false, false);
return processReply(reply); const processedReply = processReply(reply);
if (!processedReply) {
toastr.error('Prompt generation produced no text. Make sure you\'re using a valid instruct template and try again', 'Image Generation');
throw new Error('Prompt generation failed.');
}
return processedReply;
} }
async function sendGenerationRequest(generationType, prompt, characterName = null, callback) { async function sendGenerationRequest(generationType, prompt, characterName = null, callback) {

View File

@ -1,6 +1,6 @@
import { callPopup, cancelTtsPlay, eventSource, event_types, name2, saveSettingsDebounced } from '../../../script.js'; import { callPopup, cancelTtsPlay, eventSource, event_types, name2, saveSettingsDebounced } from '../../../script.js';
import { ModuleWorkerWrapper, doExtrasFetch, extension_settings, getApiUrl, getContext, modules } from '../../extensions.js'; import { ModuleWorkerWrapper, doExtrasFetch, extension_settings, getApiUrl, getContext, modules } from '../../extensions.js';
import { escapeRegex, getStringHash } from '../../utils.js'; import { delay, escapeRegex, getStringHash } from '../../utils.js';
import { EdgeTtsProvider } from './edge.js'; import { EdgeTtsProvider } from './edge.js';
import { ElevenLabsTtsProvider } from './elevenlabs.js'; import { ElevenLabsTtsProvider } from './elevenlabs.js';
import { SileroTtsProvider } from './silerotts.js'; import { SileroTtsProvider } from './silerotts.js';
@ -482,6 +482,12 @@ async function processTtsQueue() {
console.debug('New message found, running TTS'); console.debug('New message found, running TTS');
currentTtsJob = ttsJobQueue.shift(); currentTtsJob = ttsJobQueue.shift();
let text = extension_settings.tts.narrate_translated_only ? (currentTtsJob?.extra?.display_text || currentTtsJob.mes) : currentTtsJob.mes; let text = extension_settings.tts.narrate_translated_only ? (currentTtsJob?.extra?.display_text || currentTtsJob.mes) : currentTtsJob.mes;
if (extension_settings.tts.skip_codeblocks) {
text = text.replace(/^\s{4}.*$/gm, '').trim();
text = text.replace(/```.*?```/gs, '').trim();
}
text = extension_settings.tts.narrate_dialogues_only text = extension_settings.tts.narrate_dialogues_only
? text.replace(/\*[^*]*?(\*|$)/g, '').trim() // remove asterisks content ? text.replace(/\*[^*]*?(\*|$)/g, '').trim() // remove asterisks content
: text.replaceAll('*', '').trim(); // remove just the asterisks : text.replaceAll('*', '').trim(); // remove just the asterisks
@ -639,6 +645,11 @@ function onNarrateTranslatedOnlyClick() {
saveSettingsDebounced(); saveSettingsDebounced();
} }
function onSkipCodeblocksClick() {
extension_settings.tts.skip_codeblocks = !!$('#tts_skip_codeblocks').prop('checked');
saveSettingsDebounced();
}
//##############// //##############//
// TTS Provider // // TTS Provider //
//##############// //##############//
@ -687,7 +698,8 @@ export function saveTtsProviderSettings() {
async function onChatChanged() { async function onChatChanged() {
await resetTtsPlayback(); await resetTtsPlayback();
await initVoiceMap(); const voiceMapInit = initVoiceMap();
await Promise.race([voiceMapInit, delay(1000)]);
ttsLastMessage = null; ttsLastMessage = null;
} }
@ -952,6 +964,10 @@ $(document).ready(function () {
<input type="checkbox" id="tts_narrate_translated_only"> <input type="checkbox" id="tts_narrate_translated_only">
<small>Narrate only the translated text</small> <small>Narrate only the translated text</small>
</label> </label>
<label class="checkbox_label" for="tts_skip_codeblocks">
<input type="checkbox" id="tts_skip_codeblocks">
<small>Skip codeblocks</small>
</label>
</div> </div>
<div id="tts_voicemap_block"> <div id="tts_voicemap_block">
</div> </div>
@ -972,6 +988,7 @@ $(document).ready(function () {
$('#tts_narrate_dialogues').on('click', onNarrateDialoguesClick); $('#tts_narrate_dialogues').on('click', onNarrateDialoguesClick);
$('#tts_narrate_quoted').on('click', onNarrateQuotedClick); $('#tts_narrate_quoted').on('click', onNarrateQuotedClick);
$('#tts_narrate_translated_only').on('click', onNarrateTranslatedOnlyClick); $('#tts_narrate_translated_only').on('click', onNarrateTranslatedOnlyClick);
$('#tts_skip_codeblocks').on('click', onSkipCodeblocksClick);
$('#tts_auto_generation').on('click', onAutoGenerationClick); $('#tts_auto_generation').on('click', onAutoGenerationClick);
$('#tts_narrate_user').on('click', onNarrateUserClick); $('#tts_narrate_user').on('click', onNarrateUserClick);
$('#tts_voices').on('click', onTtsVoicesClick); $('#tts_voices').on('click', onTtsVoicesClick);

View File

@ -44,7 +44,7 @@ class OpenAITtsProvider {
</div> </div>
<div> <div>
<label for="openai-tts-speed">Speed: <span id="openai-tts-speed-output"></span></label> <label for="openai-tts-speed">Speed: <span id="openai-tts-speed-output"></span></label>
<input type="range" id="openai-tts-speed" value="1" min="0.25" max="4" step="0.25"> <input type="range" id="openai-tts-speed" value="1" min="0.25" max="4" step="0.05">
</div>`; </div>`;
return html; return html;
} }

View File

@ -11,6 +11,7 @@ export const EXTENSION_PROMPT_TAG = '3_vectors';
const settings = { const settings = {
// For both // For both
source: 'transformers', source: 'transformers',
include_wi: false,
// For chats // For chats
enabled_chats: false, enabled_chats: false,
@ -254,7 +255,7 @@ async function vectorizeFile(fileText, fileName, collectionId) {
async function rearrangeChat(chat) { async function rearrangeChat(chat) {
try { try {
// Clear the extension prompt // Clear the extension prompt
setExtensionPrompt(EXTENSION_PROMPT_TAG, '', extension_prompt_types.IN_PROMPT, 0); setExtensionPrompt(EXTENSION_PROMPT_TAG, '', extension_prompt_types.IN_PROMPT, 0, settings.include_wi);
if (settings.enabled_files) { if (settings.enabled_files) {
await processFiles(chat); await processFiles(chat);
@ -319,7 +320,7 @@ async function rearrangeChat(chat) {
// Format queried messages into a single string // Format queried messages into a single string
const insertedText = getPromptText(queriedMessages); const insertedText = getPromptText(queriedMessages);
setExtensionPrompt(EXTENSION_PROMPT_TAG, insertedText, settings.position, settings.depth); setExtensionPrompt(EXTENSION_PROMPT_TAG, insertedText, settings.position, settings.depth, settings.include_wi);
} catch (error) { } catch (error) {
console.error('Vectors: Failed to rearrange chat', error); console.error('Vectors: Failed to rearrange chat', error);
} }
@ -392,9 +393,10 @@ async function getSavedHashes(collectionId) {
* @returns {Promise<void>} * @returns {Promise<void>}
*/ */
async function insertVectorItems(collectionId, items) { async function insertVectorItems(collectionId, items) {
if ((settings.source === 'openai' && !secret_state[SECRET_KEYS.OPENAI]) || if (settings.source === 'openai' && !secret_state[SECRET_KEYS.OPENAI] ||
(settings.source === 'palm' && !secret_state[SECRET_KEYS.PALM]) || settings.source === 'palm' && !secret_state[SECRET_KEYS.MAKERSUITE] ||
(settings.source === 'togetherai' && !secret_state[SECRET_KEYS.TOGETHERAI])) { settings.source === 'mistral' && !secret_state[SECRET_KEYS.MISTRALAI] ||
settings.source === 'togetherai' && !secret_state[SECRET_KEYS.TOGETHERAI]) {
throw new Error('Vectors: API key missing', { cause: 'api_key_missing' }); throw new Error('Vectors: API key missing', { cause: 'api_key_missing' });
} }
@ -575,6 +577,12 @@ jQuery(async () => {
saveSettingsDebounced(); saveSettingsDebounced();
}); });
$('#vectors_include_wi').prop('checked', settings.include_wi).on('input', () => {
settings.include_wi = !!$('#vectors_include_wi').prop('checked');
Object.assign(extension_settings.vectors, settings);
saveSettingsDebounced();
});
toggleSettings(); toggleSettings();
eventSource.on(event_types.MESSAGE_DELETED, onChatEvent); eventSource.on(event_types.MESSAGE_DELETED, onChatEvent);
eventSource.on(event_types.MESSAGE_EDITED, onChatEvent); eventSource.on(event_types.MESSAGE_EDITED, onChatEvent);

View File

@ -13,6 +13,7 @@
<option value="transformers">Local (Transformers)</option> <option value="transformers">Local (Transformers)</option>
<option value="openai">OpenAI</option> <option value="openai">OpenAI</option>
<option value="palm">Google MakerSuite (PaLM)</option> <option value="palm">Google MakerSuite (PaLM)</option>
<option value="mistral">MistralAI</option>
<option value="togetherai">Together AI</option> <option value="togetherai">Together AI</option>
</select> </select>
</div> </div>
@ -24,6 +25,11 @@
<input type="number" id="vectors_query" class="text_pole widthUnset" min="1" max="99" /> <input type="number" id="vectors_query" class="text_pole widthUnset" min="1" max="99" />
</div> </div>
<label class="checkbox_label" for="vectors_include_wi" title="Query results can activate World Info entries.">
<input id="vectors_include_wi" type="checkbox" class="checkbox">
Include in World Info Scanning
</label>
<hr> <hr>
<h4> <h4>

View File

@ -8,7 +8,6 @@ import {
extractAllWords, extractAllWords,
saveBase64AsFile, saveBase64AsFile,
PAGINATION_TEMPLATE, PAGINATION_TEMPLATE,
waitUntilCondition,
getBase64Async, getBase64Async,
} from './utils.js'; } from './utils.js';
import { RA_CountCharTokens, humanizedDateTime, dragElement, favsToHotswap, getMessageTimeStamp } from './RossAscends-mods.js'; import { RA_CountCharTokens, humanizedDateTime, dragElement, favsToHotswap, getMessageTimeStamp } from './RossAscends-mods.js';
@ -46,7 +45,6 @@ import {
updateChatMetadata, updateChatMetadata,
isStreamingEnabled, isStreamingEnabled,
getThumbnailUrl, getThumbnailUrl,
streamingProcessor,
getRequestHeaders, getRequestHeaders,
setMenuType, setMenuType,
menu_type, menu_type,
@ -69,6 +67,7 @@ import {
baseChatReplace, baseChatReplace,
depth_prompt_depth_default, depth_prompt_depth_default,
loadItemizedPrompts, loadItemizedPrompts,
animation_duration,
} from '../script.js'; } from '../script.js';
import { appendTagToList, createTagMapFromList, getTagsList, applyTagsOnCharacterSelect, tag_map } from './tags.js'; import { appendTagToList, createTagMapFromList, getTagsList, applyTagsOnCharacterSelect, tag_map } from './tags.js';
import { FILTER_TYPES, FilterHelper } from './filters.js'; import { FILTER_TYPES, FilterHelper } from './filters.js';
@ -111,10 +110,18 @@ export const group_generation_mode = {
APPEND: 1, APPEND: 1,
}; };
const DEFAULT_AUTO_MODE_DELAY = 5;
export const groupCandidatesFilter = new FilterHelper(debounce(printGroupCandidates, 100)); export const groupCandidatesFilter = new FilterHelper(debounce(printGroupCandidates, 100));
setInterval(groupChatAutoModeWorker, 5000); let autoModeWorker = null;
const saveGroupDebounced = debounce(async (group, reload) => await _save(group, reload), 500); const saveGroupDebounced = debounce(async (group, reload) => await _save(group, reload), 500);
function setAutoModeWorker() {
clearInterval(autoModeWorker);
const autoModeDelay = groups.find(x => x.id === selected_group)?.auto_mode_delay ?? DEFAULT_AUTO_MODE_DELAY;
autoModeWorker = setInterval(groupChatAutoModeWorker, autoModeDelay * 1000);
}
async function _save(group, reload = true) { async function _save(group, reload = true) {
await fetch('/api/groups/edit', { await fetch('/api/groups/edit', {
method: 'POST', method: 'POST',
@ -611,14 +618,20 @@ function getGroupChatNames(groupId) {
} }
async function generateGroupWrapper(by_auto_mode, type = null, params = {}) { async function generateGroupWrapper(by_auto_mode, type = null, params = {}) {
function throwIfAborted() {
if (params.signal instanceof AbortSignal && params.signal.aborted) {
throw new Error('AbortSignal was fired. Group generation stopped');
}
}
if (online_status === 'no_connection') { if (online_status === 'no_connection') {
is_group_generating = false; is_group_generating = false;
setSendButtonState(false); setSendButtonState(false);
return; return Promise.resolve();
} }
if (is_group_generating) { if (is_group_generating) {
return false; return Promise.resolve();
} }
// Auto-navigate back to group menu // Auto-navigate back to group menu
@ -629,13 +642,15 @@ async function generateGroupWrapper(by_auto_mode, type = null, params = {}) {
const group = groups.find((x) => x.id === selected_group); const group = groups.find((x) => x.id === selected_group);
let typingIndicator = $('#chat .typing_indicator'); let typingIndicator = $('#chat .typing_indicator');
let textResult = '';
if (!group || !Array.isArray(group.members) || !group.members.length) { if (!group || !Array.isArray(group.members) || !group.members.length) {
sendSystemMessage(system_message_types.EMPTY, '', { isSmallSys: true }); sendSystemMessage(system_message_types.EMPTY, '', { isSmallSys: true });
return; return Promise.resolve();
} }
try { try {
throwIfAborted();
hideSwipeButtons(); hideSwipeButtons();
is_group_generating = true; is_group_generating = true;
setCharacterName(''); setCharacterName('');
@ -653,50 +668,18 @@ async function generateGroupWrapper(by_auto_mode, type = null, params = {}) {
// id of this specific batch for regeneration purposes // id of this specific batch for regeneration purposes
group_generation_id = Date.now(); group_generation_id = Date.now();
const lastMessage = chat[chat.length - 1]; const lastMessage = chat[chat.length - 1];
let messagesBefore = chat.length;
let lastMessageText = lastMessage?.mes || '';
let activationText = ''; let activationText = '';
let isUserInput = false; let isUserInput = false;
let isGenerationDone = false;
let isGenerationAborted = false;
if (userInput?.length && !by_auto_mode) { if (userInput?.length && !by_auto_mode) {
isUserInput = true; isUserInput = true;
activationText = userInput; activationText = userInput;
messagesBefore++;
} else { } else {
if (lastMessage && !lastMessage.is_system) { if (lastMessage && !lastMessage.is_system) {
activationText = lastMessage.mes; activationText = lastMessage.mes;
} }
} }
const resolveOriginal = params.resolve;
const rejectOriginal = params.reject;
if (params.signal instanceof AbortSignal) {
if (params.signal.aborted) {
throw new Error('Already aborted signal passed. Group generation stopped');
}
params.signal.onabort = () => {
isGenerationAborted = true;
};
}
if (typeof params.resolve === 'function') {
params.resolve = function () {
isGenerationDone = true;
resolveOriginal.apply(this, arguments);
};
}
if (typeof params.reject === 'function') {
params.reject = function () {
isGenerationDone = true;
rejectOriginal.apply(this, arguments);
};
}
const activationStrategy = Number(group.activation_strategy ?? group_activation_strategy.NATURAL); const activationStrategy = Number(group.activation_strategy ?? group_activation_strategy.NATURAL);
const enabledMembers = group.members.filter(x => !group.disabled_members.includes(x)); const enabledMembers = group.members.filter(x => !group.disabled_members.includes(x));
let activatedMembers = []; let activatedMembers = [];
@ -741,14 +724,12 @@ async function generateGroupWrapper(by_auto_mode, type = null, params = {}) {
// now the real generation begins: cycle through every activated character // now the real generation begins: cycle through every activated character
for (const chId of activatedMembers) { for (const chId of activatedMembers) {
throwIfAborted();
deactivateSendButtons(); deactivateSendButtons();
isGenerationDone = false;
const generateType = type == 'swipe' || type == 'impersonate' || type == 'quiet' || type == 'continue' ? type : 'group_chat'; const generateType = type == 'swipe' || type == 'impersonate' || type == 'quiet' || type == 'continue' ? type : 'group_chat';
setCharacterId(chId); setCharacterId(chId);
setCharacterName(characters[chId].name); setCharacterName(characters[chId].name);
await Generate(generateType, { automatic_trigger: by_auto_mode, ...(params || {}) });
if (type !== 'swipe' && type !== 'impersonate' && !isStreamingEnabled()) { if (type !== 'swipe' && type !== 'impersonate' && !isStreamingEnabled()) {
// update indicator and scroll down // update indicator and scroll down
typingIndicator typingIndicator
@ -757,75 +738,9 @@ async function generateGroupWrapper(by_auto_mode, type = null, params = {}) {
typingIndicator.show(); typingIndicator.show();
} }
// TODO: This is awful. Refactor this // Wait for generation to finish
while (true) { const generateFinished = await Generate(generateType, { automatic_trigger: by_auto_mode, ...(params || {}) });
deactivateSendButtons(); textResult = await generateFinished;
if (isGenerationAborted) {
throw new Error('Group generation aborted');
}
// if not swipe - check if message generated already
if (generateType === 'group_chat' && chat.length == messagesBefore) {
await delay(100);
}
// if swipe - see if message changed
else if (type === 'swipe') {
if (isStreamingEnabled()) {
if (streamingProcessor && !streamingProcessor.isFinished) {
await delay(100);
}
else {
break;
}
}
else {
if (lastMessageText === chat[chat.length - 1].mes) {
await delay(100);
}
else {
break;
}
}
}
else if (type === 'impersonate') {
if (isStreamingEnabled()) {
if (streamingProcessor && !streamingProcessor.isFinished) {
await delay(100);
}
else {
break;
}
}
else {
if (!$('#send_textarea').val() || $('#send_textarea').val() == userInput) {
await delay(100);
}
else {
break;
}
}
}
else if (type === 'quiet') {
if (isGenerationDone) {
break;
} else {
await delay(100);
}
}
else if (isStreamingEnabled()) {
if (streamingProcessor && !streamingProcessor.isFinished) {
await delay(100);
} else {
await waitUntilCondition(() => streamingProcessor == null, 1000, 10);
messagesBefore++;
break;
}
}
else {
messagesBefore++;
break;
}
}
} }
} finally { } finally {
typingIndicator.hide(); typingIndicator.hide();
@ -838,6 +753,8 @@ async function generateGroupWrapper(by_auto_mode, type = null, params = {}) {
activateSendButtons(); activateSendButtons();
showSwipeButtons(); showSwipeButtons();
} }
return Promise.resolve(textResult);
} }
function getLastMessageGenerationId() { function getLastMessageGenerationId() {
@ -860,12 +777,35 @@ function activateImpersonate(members) {
return memberIds; return memberIds;
} }
/**
* Activates a group member based on the last message.
* @param {string[]} members Array of group member avatar ids
* @returns {number[]} Array of character ids
*/
function activateSwipe(members) { function activateSwipe(members) {
let activatedNames = []; let activatedNames = [];
const lastMessage = chat[chat.length - 1];
if (lastMessage.is_user || lastMessage.is_system || lastMessage.extra?.type === system_message_types.NARRATOR) {
for (const message of chat.slice().reverse()) {
if (message.is_user || message.is_system || message.extra?.type === system_message_types.NARRATOR) {
continue;
}
if (message.original_avatar) {
activatedNames.push(message.original_avatar);
break;
}
}
if (activatedNames.length === 0) {
activatedNames.push(shuffle(members.slice())[0]);
}
}
// pre-update group chat swipe // pre-update group chat swipe
if (!chat[chat.length - 1].original_avatar) { if (!lastMessage.original_avatar) {
const matches = characters.filter(x => x.name == chat[chat.length - 1].name); const matches = characters.filter(x => x.name == lastMessage.name);
for (const match of matches) { for (const match of matches) {
if (members.includes(match.avatar)) { if (members.includes(match.avatar)) {
@ -875,7 +815,7 @@ function activateSwipe(members) {
} }
} }
else { else {
activatedNames.push(chat[chat.length - 1].original_avatar); activatedNames.push(lastMessage.original_avatar);
} }
const memberIds = activatedNames const memberIds = activatedNames
@ -1103,6 +1043,15 @@ async function onGroupGenerationModeInput(e) {
} }
} }
async function onGroupAutoModeDelayInput(e) {
if (openGroupId) {
let _thisGroup = groups.find((x) => x.id == openGroupId);
_thisGroup.auto_mode_delay = Number(e.target.value);
await editGroup(openGroupId, false, false);
setAutoModeWorker();
}
}
async function onGroupNameInput() { async function onGroupNameInput() {
if (openGroupId) { if (openGroupId) {
let _thisGroup = groups.find((x) => x.id == openGroupId); let _thisGroup = groups.find((x) => x.id == openGroupId);
@ -1299,6 +1248,7 @@ function select_group_chats(groupId, skipAnimation) {
$('#rm_group_submit').prop('disabled', !groupHasMembers); $('#rm_group_submit').prop('disabled', !groupHasMembers);
$('#rm_group_allow_self_responses').prop('checked', group && group.allow_self_responses); $('#rm_group_allow_self_responses').prop('checked', group && group.allow_self_responses);
$('#rm_group_hidemutedsprites').prop('checked', group && group.hideMutedSprites); $('#rm_group_hidemutedsprites').prop('checked', group && group.hideMutedSprites);
$('#rm_group_automode_delay').val(group?.auto_mode_delay ?? DEFAULT_AUTO_MODE_DELAY);
// bottom buttons // bottom buttons
if (openGroupId) { if (openGroupId) {
@ -1317,6 +1267,7 @@ function select_group_chats(groupId, skipAnimation) {
} }
updateFavButtonState(group?.fav ?? false); updateFavButtonState(group?.fav ?? false);
setAutoModeWorker();
// top bar // top bar
if (group) { if (group) {
@ -1509,6 +1460,7 @@ async function createGroup() {
let allowSelfResponses = !!$('#rm_group_allow_self_responses').prop('checked'); let allowSelfResponses = !!$('#rm_group_allow_self_responses').prop('checked');
let activationStrategy = Number($('#rm_group_activation_strategy').find(':selected').val()) ?? group_activation_strategy.NATURAL; let activationStrategy = Number($('#rm_group_activation_strategy').find(':selected').val()) ?? group_activation_strategy.NATURAL;
let generationMode = Number($('#rm_group_generation_mode').find(':selected').val()) ?? group_generation_mode.SWAP; let generationMode = Number($('#rm_group_generation_mode').find(':selected').val()) ?? group_generation_mode.SWAP;
let autoModeDelay = Number($('#rm_group_automode_delay').val()) ?? DEFAULT_AUTO_MODE_DELAY;
const members = newGroupMembers; const members = newGroupMembers;
const memberNames = characters.filter(x => members.includes(x.avatar)).map(x => x.name).join(', '); const memberNames = characters.filter(x => members.includes(x.avatar)).map(x => x.name).join(', ');
@ -1537,6 +1489,7 @@ async function createGroup() {
fav: fav_grp_checked, fav: fav_grp_checked,
chat_id: chatName, chat_id: chatName,
chats: chats, chats: chats,
auto_mode_delay: autoModeDelay,
}), }),
}); });
@ -1768,17 +1721,17 @@ function doCurMemberListPopout() {
$('body').append(newElement); $('body').append(newElement);
loadMovingUIState(); loadMovingUIState();
$('#groupMemberListPopout').fadeIn(250); $('#groupMemberListPopout').fadeIn(animation_duration);
dragElement(newElement); dragElement(newElement);
$('#groupMemberListPopoutClose').off('click').on('click', function () { $('#groupMemberListPopoutClose').off('click').on('click', function () {
$('#groupMemberListPopout').fadeOut(250, () => { $('#groupMemberListPopout').remove(); }); $('#groupMemberListPopout').fadeOut(animation_duration, () => { $('#groupMemberListPopout').remove(); });
}); });
// Re-add pagination not working in popout // Re-add pagination not working in popout
printGroupMembers(); printGroupMembers();
} else { } else {
console.debug('saw existing popout, removing'); console.debug('saw existing popout, removing');
$('#groupMemberListPopout').fadeOut(250, () => { $('#groupMemberListPopout').remove(); }); $('#groupMemberListPopout').fadeOut(animation_duration, () => { $('#groupMemberListPopout').remove(); });
} }
} }
@ -1809,6 +1762,7 @@ jQuery(() => {
$('#rm_group_allow_self_responses').on('input', onGroupSelfResponsesClick); $('#rm_group_allow_self_responses').on('input', onGroupSelfResponsesClick);
$('#rm_group_activation_strategy').on('change', onGroupActivationStrategyInput); $('#rm_group_activation_strategy').on('change', onGroupActivationStrategyInput);
$('#rm_group_generation_mode').on('change', onGroupGenerationModeInput); $('#rm_group_generation_mode').on('change', onGroupGenerationModeInput);
$('#rm_group_automode_delay').on('input', onGroupAutoModeDelayInput);
$('#group_avatar_button').on('input', uploadGroupAvatar); $('#group_avatar_button').on('input', uploadGroupAvatar);
$('#rm_group_restore_avatar').on('click', restoreGroupAvatar); $('#rm_group_restore_avatar').on('click', restoreGroupAvatar);
$(document).on('click', '.group_member .right_menu_button', onGroupActionClick); $(document).on('click', '.group_member .right_menu_button', onGroupActionClick);

View File

@ -9,7 +9,7 @@ import {
} from '../script.js'; } from '../script.js';
import { SECRET_KEYS, writeSecret } from './secrets.js'; import { SECRET_KEYS, writeSecret } from './secrets.js';
import { delay } from './utils.js'; import { delay } from './utils.js';
import { getDeviceInfo } from './RossAscends-mods.js'; import { isMobile } from './RossAscends-mods.js';
import { autoSelectInstructPreset } from './instruct-mode.js'; import { autoSelectInstructPreset } from './instruct-mode.js';
export { export {
@ -41,7 +41,7 @@ const getRequestArgs = () => ({
}, },
}); });
async function getWorkers() { async function getWorkers(workerType) {
const response = await fetch('https://horde.koboldai.net/api/v2/workers?type=text', getRequestArgs()); const response = await fetch('https://horde.koboldai.net/api/v2/workers?type=text', getRequestArgs());
const data = await response.json(); const data = await response.json();
return data; return data;
@ -303,8 +303,7 @@ jQuery(function () {
$('#horde_kudos').on('click', showKudos); $('#horde_kudos').on('click', showKudos);
// Not needed on mobile // Not needed on mobile
const deviceInfo = getDeviceInfo(); if (!isMobile()) {
if (deviceInfo && deviceInfo.device.type === 'desktop') {
$('#horde_model').select2({ $('#horde_model').select2({
width: '100%', width: '100%',
placeholder: 'Select Horde models', placeholder: 'Select Horde models',

View File

@ -10,6 +10,7 @@ import {
import { import {
power_user, power_user,
} from './power-user.js'; } from './power-user.js';
import EventSourceStream from './sse-stream.js';
import { getSortableDelay } from './utils.js'; import { getSortableDelay } from './utils.js';
export const kai_settings = { export const kai_settings = {
@ -128,13 +129,6 @@ export function getKoboldGenerationData(finalPrompt, settings, maxLength, maxCon
top_p: kai_settings.top_p, top_p: kai_settings.top_p,
min_p: (kai_flags.can_use_min_p || isHorde) ? kai_settings.min_p : undefined, min_p: (kai_flags.can_use_min_p || isHorde) ? kai_settings.min_p : undefined,
typical: kai_settings.typical, typical: kai_settings.typical,
s1: sampler_order[0],
s2: sampler_order[1],
s3: sampler_order[2],
s4: sampler_order[3],
s5: sampler_order[4],
s6: sampler_order[5],
s7: sampler_order[6],
use_world_info: false, use_world_info: false,
singleline: false, singleline: false,
stop_sequence: (kai_flags.can_use_stop_sequence || isHorde) ? getStoppingStrings(isImpersonate, isContinue) : undefined, stop_sequence: (kai_flags.can_use_stop_sequence || isHorde) ? getStoppingStrings(isImpersonate, isContinue) : undefined,
@ -153,44 +147,50 @@ export function getKoboldGenerationData(finalPrompt, settings, maxLength, maxCon
return generate_data; return generate_data;
} }
function tryParseStreamingError(response, decoded) {
try {
const data = JSON.parse(decoded);
if (!data) {
return;
}
if (data.error) {
toastr.error(data.error.message || response.statusText, 'KoboldAI API');
throw new Error(data);
}
}
catch {
// No JSON. Do nothing.
}
}
export async function generateKoboldWithStreaming(generate_data, signal) { export async function generateKoboldWithStreaming(generate_data, signal) {
const response = await fetch('/generate', { const response = await fetch('/api/backends/kobold/generate', {
headers: getRequestHeaders(), headers: getRequestHeaders(),
body: JSON.stringify(generate_data), body: JSON.stringify(generate_data),
method: 'POST', method: 'POST',
signal: signal, signal: signal,
}); });
if (!response.ok) {
tryParseStreamingError(response, await response.text());
throw new Error(`Got response status ${response.status}`);
}
const eventStream = new EventSourceStream();
response.body.pipeThrough(eventStream);
const reader = eventStream.readable.getReader();
return async function* streamData() { return async function* streamData() {
const decoder = new TextDecoder(); let text = '';
const reader = response.body.getReader();
let getMessage = '';
let messageBuffer = '';
while (true) { while (true) {
const { done, value } = await reader.read(); const { done, value } = await reader.read();
let response = decoder.decode(value); if (done) return;
let eventList = [];
// ReadableStream's buffer is not guaranteed to contain full SSE messages as they arrive in chunks const data = JSON.parse(value.data);
// We need to buffer chunks until we have one or more full messages (separated by double newlines) if (data?.token) {
messageBuffer += response; text += data.token;
eventList = messageBuffer.split('\n\n');
// Last element will be an empty string or a leftover partial message
messageBuffer = eventList.pop();
for (let event of eventList) {
for (let subEvent of event.split('\n')) {
if (subEvent.startsWith('data')) {
let data = JSON.parse(subEvent.substring(5));
getMessage += (data?.token || '');
yield { text: getMessage, swipes: [] };
}
}
}
if (done) {
return;
} }
yield { text, swipes: [] };
} }
}; };
} }
@ -310,87 +310,24 @@ const sliders = [
}, },
]; ];
export function setKoboldFlags(version, koboldVersion) { export function setKoboldFlags(koboldUnitedVersion, koboldCppVersion) {
kai_flags.can_use_stop_sequence = canUseKoboldStopSequence(version); kai_flags.can_use_stop_sequence = versionCompare(koboldUnitedVersion, MIN_STOP_SEQUENCE_VERSION);
kai_flags.can_use_streaming = canUseKoboldStreaming(koboldVersion); kai_flags.can_use_streaming = versionCompare(koboldCppVersion, MIN_STREAMING_KCPPVERSION);
kai_flags.can_use_tokenization = canUseKoboldTokenization(koboldVersion); kai_flags.can_use_tokenization = versionCompare(koboldCppVersion, MIN_TOKENIZATION_KCPPVERSION);
kai_flags.can_use_default_badwordsids = canUseDefaultBadwordIds(version); kai_flags.can_use_default_badwordsids = versionCompare(koboldUnitedVersion, MIN_UNBAN_VERSION);
kai_flags.can_use_mirostat = canUseMirostat(koboldVersion); kai_flags.can_use_mirostat = versionCompare(koboldCppVersion, MIN_MIROSTAT_KCPPVERSION);
kai_flags.can_use_grammar = canUseGrammar(koboldVersion); kai_flags.can_use_grammar = versionCompare(koboldCppVersion, MIN_GRAMMAR_KCPPVERSION);
kai_flags.can_use_min_p = canUseMinP(koboldVersion); kai_flags.can_use_min_p = versionCompare(koboldCppVersion, MIN_MIN_P_KCPPVERSION);
} }
/** /**
* Determines if the Kobold stop sequence can be used with the given version. * Compares two version numbers, returning true if srcVersion >= minVersion
* @param {string} version KoboldAI version to check. * @param {string} srcVersion The current version.
* @returns {boolean} True if the Kobold stop sequence can be used, false otherwise. * @param {string} minVersion The target version number to test against
* @returns {boolean} True if srcVersion >= minVersion, false if not
*/ */
function canUseKoboldStopSequence(version) { function versionCompare(srcVersion, minVersion) {
return (version || '0.0.0').localeCompare(MIN_STOP_SEQUENCE_VERSION, undefined, { numeric: true, sensitivity: 'base' }) > -1; return (srcVersion || '0.0.0').localeCompare(minVersion, undefined, { numeric: true, sensitivity: 'base' }) > -1;
}
/**
* Determines if the Kobold default badword ids can be used with the given version.
* @param {string} version KoboldAI version to check.
* @returns {boolean} True if the Kobold default badword ids can be used, false otherwise.
*/
function canUseDefaultBadwordIds(version) {
return (version || '0.0.0').localeCompare(MIN_UNBAN_VERSION, undefined, { numeric: true, sensitivity: 'base' }) > -1;
}
/**
* Determines if the Kobold streaming API can be used with the given version.
* @param {{ result: string; version: string; }} koboldVersion KoboldAI version object.
* @returns {boolean} True if the Kobold streaming API can be used, false otherwise.
*/
function canUseKoboldStreaming(koboldVersion) {
if (koboldVersion && koboldVersion.result == 'KoboldCpp') {
return (koboldVersion.version || '0.0').localeCompare(MIN_STREAMING_KCPPVERSION, undefined, { numeric: true, sensitivity: 'base' }) > -1;
} else return false;
}
/**
* Determines if the Kobold tokenization API can be used with the given version.
* @param {{ result: string; version: string; }} koboldVersion KoboldAI version object.
* @returns {boolean} True if the Kobold tokenization API can be used, false otherwise.
*/
function canUseKoboldTokenization(koboldVersion) {
if (koboldVersion && koboldVersion.result == 'KoboldCpp') {
return (koboldVersion.version || '0.0').localeCompare(MIN_TOKENIZATION_KCPPVERSION, undefined, { numeric: true, sensitivity: 'base' }) > -1;
} else return false;
}
/**
* Determines if the Kobold mirostat can be used with the given version.
* @param {{result: string; version: string;}} koboldVersion KoboldAI version object.
* @returns {boolean} True if the Kobold mirostat API can be used, false otherwise.
*/
function canUseMirostat(koboldVersion) {
if (koboldVersion && koboldVersion.result == 'KoboldCpp') {
return (koboldVersion.version || '0.0').localeCompare(MIN_MIROSTAT_KCPPVERSION, undefined, { numeric: true, sensitivity: 'base' }) > -1;
} else return false;
}
/**
* Determines if the Kobold grammar can be used with the given version.
* @param {{result: string; version:string;}} koboldVersion KoboldAI version object.
* @returns {boolean} True if the Kobold grammar can be used, false otherwise.
*/
function canUseGrammar(koboldVersion) {
if (koboldVersion && koboldVersion.result == 'KoboldCpp') {
return (koboldVersion.version || '0.0').localeCompare(MIN_GRAMMAR_KCPPVERSION, undefined, { numeric: true, sensitivity: 'base' }) > -1;
} else return false;
}
/**
* Determines if the Kobold min_p can be used with the given version.
* @param {{result:string, version:string;}} koboldVersion KoboldAI version object.
* @returns {boolean} True if the Kobold min_p can be used, false otherwise.
*/
function canUseMinP(koboldVersion) {
if (koboldVersion && koboldVersion.result == 'KoboldCpp') {
return (koboldVersion.version || '0.0').localeCompare(MIN_MIN_P_KCPPVERSION, undefined, { numeric: true, sensitivity: 'base' }) > -1;
} else return false;
} }
/** /**

View File

@ -1,5 +1,5 @@
import { setGenerationParamsFromPreset } from '../script.js'; import { setGenerationParamsFromPreset } from '../script.js';
import { getDeviceInfo } from './RossAscends-mods.js'; import { isMobile } from './RossAscends-mods.js';
import { textgenerationwebui_settings as textgen_settings } from './textgen-settings.js'; import { textgenerationwebui_settings as textgen_settings } from './textgen-settings.js';
let models = []; let models = [];
@ -52,8 +52,7 @@ function getMancerModelTemplate(option) {
jQuery(function () { jQuery(function () {
$('#mancer_model').on('change', onMancerModelSelect); $('#mancer_model').on('change', onMancerModelSelect);
const deviceInfo = getDeviceInfo(); if (!isMobile()) {
if (deviceInfo && deviceInfo.device.type === 'desktop') {
$('#mancer_model').select2({ $('#mancer_model').select2({
placeholder: 'Select a model', placeholder: 'Select a model',
searchInputPlaceholder: 'Search models...', searchInputPlaceholder: 'Search models...',

View File

@ -10,6 +10,7 @@ import {
import { getCfgPrompt } from './cfg-scale.js'; import { getCfgPrompt } from './cfg-scale.js';
import { MAX_CONTEXT_DEFAULT, MAX_RESPONSE_DEFAULT } from './power-user.js'; import { MAX_CONTEXT_DEFAULT, MAX_RESPONSE_DEFAULT } from './power-user.js';
import { getTextTokens, tokenizers } from './tokenizers.js'; import { getTextTokens, tokenizers } from './tokenizers.js';
import EventSourceStream from './sse-stream.js';
import { import {
getSortableDelay, getSortableDelay,
getStringHash, getStringHash,
@ -663,7 +664,7 @@ export function adjustNovelInstructionPrompt(prompt) {
return stripedPrompt; return stripedPrompt;
} }
function tryParseStreamingError(decoded) { function tryParseStreamingError(response, decoded) {
try { try {
const data = JSON.parse(decoded); const data = JSON.parse(decoded);
@ -671,8 +672,8 @@ function tryParseStreamingError(decoded) {
return; return;
} }
if (data.message && data.statusCode >= 400) { if (data.message || data.error) {
toastr.error(data.message, 'Error'); toastr.error(data.message || data.error?.message || response.statusText, 'NovelAI API');
throw new Error(data); throw new Error(data);
} }
} }
@ -690,39 +691,27 @@ export async function generateNovelWithStreaming(generate_data, signal) {
method: 'POST', method: 'POST',
signal: signal, signal: signal,
}); });
if (!response.ok) {
tryParseStreamingError(response, await response.text());
throw new Error(`Got response status ${response.status}`);
}
const eventStream = new EventSourceStream();
response.body.pipeThrough(eventStream);
const reader = eventStream.readable.getReader();
return async function* streamData() { return async function* streamData() {
const decoder = new TextDecoder(); let text = '';
const reader = response.body.getReader();
let getMessage = '';
let messageBuffer = '';
while (true) { while (true) {
const { done, value } = await reader.read(); const { done, value } = await reader.read();
let decoded = decoder.decode(value); if (done) return;
let eventList = [];
tryParseStreamingError(decoded); const data = JSON.parse(value.data);
// ReadableStream's buffer is not guaranteed to contain full SSE messages as they arrive in chunks if (data.token) {
// We need to buffer chunks until we have one or more full messages (separated by double newlines) text += data.token;
messageBuffer += decoded;
eventList = messageBuffer.split('\n\n');
// Last element will be an empty string or a leftover partial message
messageBuffer = eventList.pop();
for (let event of eventList) {
for (let subEvent of event.split('\n')) {
if (subEvent.startsWith('data')) {
let data = JSON.parse(subEvent.substring(5));
getMessage += (data?.token || '');
yield { text: getMessage, swipes: [] };
}
}
} }
if (done) { yield { text, swipes: [] };
return;
}
} }
}; };
} }

View File

@ -37,13 +37,14 @@ import {
chatCompletionDefaultPrompts, chatCompletionDefaultPrompts,
INJECTION_POSITION, INJECTION_POSITION,
Prompt, Prompt,
promptManagerDefaultPromptOrders,
PromptManager, PromptManager,
promptManagerDefaultPromptOrders,
} from './PromptManager.js'; } from './PromptManager.js';
import { getCustomStoppingStrings, persona_description_positions, power_user } from './power-user.js'; import { getCustomStoppingStrings, persona_description_positions, power_user } from './power-user.js';
import { SECRET_KEYS, secret_state, writeSecret } from './secrets.js'; import { SECRET_KEYS, secret_state, writeSecret } from './secrets.js';
import EventSourceStream from './sse-stream.js';
import { import {
delay, delay,
download, download,
@ -113,7 +114,6 @@ const max_128k = 128 * 1000;
const max_200k = 200 * 1000; const max_200k = 200 * 1000;
const scale_max = 8191; const scale_max = 8191;
const claude_max = 9000; // We have a proper tokenizer, so theoretically could be larger (up to 9k) const claude_max = 9000; // We have a proper tokenizer, so theoretically could be larger (up to 9k)
const palm2_max = 7400; // The real context window is 8192, spare some for padding due to using turbo tokenizer
const claude_100k_max = 99000; const claude_100k_max = 99000;
let ai21_max = 9200; //can easily fit 9k gpt tokens because j2's tokenizer is efficient af let ai21_max = 9200; //can easily fit 9k gpt tokens because j2's tokenizer is efficient af
const unlocked_max = 100 * 1024; const unlocked_max = 100 * 1024;
@ -163,7 +163,8 @@ export const chat_completion_sources = {
SCALE: 'scale', SCALE: 'scale',
OPENROUTER: 'openrouter', OPENROUTER: 'openrouter',
AI21: 'ai21', AI21: 'ai21',
PALM: 'palm', MAKERSUITE: 'makersuite',
MISTRALAI: 'mistralai',
TOGETHERAI: 'togetherai', TOGETHERAI: 'togetherai',
}; };
@ -207,8 +208,10 @@ const default_settings = {
personality_format: default_personality_format, personality_format: default_personality_format,
openai_model: 'gpt-3.5-turbo', openai_model: 'gpt-3.5-turbo',
claude_model: 'claude-instant-v1', claude_model: 'claude-instant-v1',
google_model: 'gemini-pro',
ai21_model: 'j2-ultra', ai21_model: 'j2-ultra',
togetherai_model: 'togethercomputer/GPT-NeoXT-Chat-Base-20B', // unsure here togetherai_model: 'togethercomputer/GPT-NeoXT-Chat-Base-20B', // unsure here
mistralai_model: 'mistral-medium',
windowai_model: '', windowai_model: '',
openrouter_model: openrouter_website_model, openrouter_model: openrouter_website_model,
openrouter_use_fallback: false, openrouter_use_fallback: false,
@ -217,7 +220,6 @@ const default_settings = {
openrouter_sort_models: 'alphabetically', openrouter_sort_models: 'alphabetically',
jailbreak_system: false, jailbreak_system: false,
reverse_proxy: '', reverse_proxy: '',
legacy_streaming: false,
chat_completion_source: chat_completion_sources.OPENAI, chat_completion_source: chat_completion_sources.OPENAI,
max_context_unlocked: false, max_context_unlocked: false,
api_url_scale: '', api_url_scale: '',
@ -225,6 +227,7 @@ const default_settings = {
proxy_password: '', proxy_password: '',
assistant_prefill: '', assistant_prefill: '',
use_ai21_tokenizer: false, use_ai21_tokenizer: false,
use_google_tokenizer: false,
exclude_assistant: false, exclude_assistant: false,
use_alt_scale: false, use_alt_scale: false,
squash_system_messages: false, squash_system_messages: false,
@ -262,8 +265,10 @@ const oai_settings = {
personality_format: default_personality_format, personality_format: default_personality_format,
openai_model: 'gpt-3.5-turbo', openai_model: 'gpt-3.5-turbo',
claude_model: 'claude-instant-v1', claude_model: 'claude-instant-v1',
google_model: 'gemini-pro',
ai21_model: 'j2-ultra', ai21_model: 'j2-ultra',
togetherai_model: 'togethercomputer/GPT-NeoXT-Chat-Base-20B', // unsure here togetherai_model: 'togethercomputer/GPT-NeoXT-Chat-Base-20B', // unsure here
mistralai_model: 'mistral-medium',
windowai_model: '', windowai_model: '',
openrouter_model: openrouter_website_model, openrouter_model: openrouter_website_model,
openrouter_use_fallback: false, openrouter_use_fallback: false,
@ -272,7 +277,6 @@ const oai_settings = {
openrouter_sort_models: 'alphabetically', openrouter_sort_models: 'alphabetically',
jailbreak_system: false, jailbreak_system: false,
reverse_proxy: '', reverse_proxy: '',
legacy_streaming: false,
chat_completion_source: chat_completion_sources.OPENAI, chat_completion_source: chat_completion_sources.OPENAI,
max_context_unlocked: false, max_context_unlocked: false,
api_url_scale: '', api_url_scale: '',
@ -280,6 +284,7 @@ const oai_settings = {
proxy_password: '', proxy_password: '',
assistant_prefill: '', assistant_prefill: '',
use_ai21_tokenizer: false, use_ai21_tokenizer: false,
use_google_tokenizer: false,
exclude_assistant: false, exclude_assistant: false,
use_alt_scale: false, use_alt_scale: false,
squash_system_messages: false, squash_system_messages: false,
@ -917,6 +922,7 @@ function preparePromptsForChatCompletion({ Scenario, charPersonality, name2, wor
const scenarioText = Scenario && oai_settings.scenario_format ? substituteParams(oai_settings.scenario_format) : ''; const scenarioText = Scenario && oai_settings.scenario_format ? substituteParams(oai_settings.scenario_format) : '';
const charPersonalityText = charPersonality && oai_settings.personality_format ? substituteParams(oai_settings.personality_format) : ''; const charPersonalityText = charPersonality && oai_settings.personality_format ? substituteParams(oai_settings.personality_format) : '';
const groupNudge = substituteParams(oai_settings.group_nudge_prompt); const groupNudge = substituteParams(oai_settings.group_nudge_prompt);
const impersonationPrompt = oai_settings.impersonation_prompt ? substituteParams(oai_settings.impersonation_prompt) : '';
// Create entries for system prompts // Create entries for system prompts
const systemPrompts = [ const systemPrompts = [
@ -928,7 +934,7 @@ function preparePromptsForChatCompletion({ Scenario, charPersonality, name2, wor
{ role: 'system', content: scenarioText, identifier: 'scenario' }, { role: 'system', content: scenarioText, identifier: 'scenario' },
{ role: 'system', content: personaDescription, identifier: 'personaDescription' }, { role: 'system', content: personaDescription, identifier: 'personaDescription' },
// Unordered prompts without marker // Unordered prompts without marker
{ role: 'system', content: oai_settings.impersonation_prompt, identifier: 'impersonate' }, { role: 'system', content: impersonationPrompt, identifier: 'impersonate' },
{ role: 'system', content: quietPrompt, identifier: 'quietPrompt' }, { role: 'system', content: quietPrompt, identifier: 'quietPrompt' },
{ role: 'system', content: bias, identifier: 'bias' }, { role: 'system', content: bias, identifier: 'bias' },
{ role: 'system', content: groupNudge, identifier: 'groupNudge' }, { role: 'system', content: groupNudge, identifier: 'groupNudge' },
@ -1127,7 +1133,7 @@ function tryParseStreamingError(response, decoded) {
checkQuotaError(data); checkQuotaError(data);
if (data.error) { if (data.error) {
toastr.error(data.error.message || response.statusText, 'API returned an error'); toastr.error(data.error.message || response.statusText, 'Chat Completion API');
throw new Error(data); throw new Error(data);
} }
} }
@ -1255,14 +1261,16 @@ function getChatCompletionModel() {
return oai_settings.windowai_model; return oai_settings.windowai_model;
case chat_completion_sources.SCALE: case chat_completion_sources.SCALE:
return ''; return '';
case chat_completion_sources.PALM: case chat_completion_sources.MAKERSUITE:
return ''; return oai_settings.google_model;
case chat_completion_sources.OPENROUTER: case chat_completion_sources.OPENROUTER:
return oai_settings.openrouter_model !== openrouter_website_model ? oai_settings.openrouter_model : null; return oai_settings.openrouter_model !== openrouter_website_model ? oai_settings.openrouter_model : null;
case chat_completion_sources.AI21: case chat_completion_sources.AI21:
return oai_settings.ai21_model; return oai_settings.ai21_model;
case chat_completion_sources.TOGETHERAI: case chat_completion_sources.TOGETHERAI:
return oai_settings.togetherai_model; return oai_settings.togetherai_model;
case chat_completion_sources.MISTRALAI:
return oai_settings.mistralai_model;
default: default:
throw new Error(`Unknown chat completion source: ${oai_settings.chat_completion_source}`); throw new Error(`Unknown chat completion source: ${oai_settings.chat_completion_source}`);
} }
@ -1387,7 +1395,7 @@ function openRouterGroupByVendor(array) {
} }
async function sendAltScaleRequest(messages, logit_bias, signal, type) { async function sendAltScaleRequest(messages, logit_bias, signal, type) {
const generate_url = '/generate_altscale'; const generate_url = '/api/backends/scale-alt/generate';
let firstSysMsgs = []; let firstSysMsgs = [];
for (let msg of messages) { for (let msg of messages) {
@ -1448,21 +1456,22 @@ async function sendOpenAIRequest(type, messages, signal) {
const isOpenRouter = oai_settings.chat_completion_source == chat_completion_sources.OPENROUTER; const isOpenRouter = oai_settings.chat_completion_source == chat_completion_sources.OPENROUTER;
const isScale = oai_settings.chat_completion_source == chat_completion_sources.SCALE; const isScale = oai_settings.chat_completion_source == chat_completion_sources.SCALE;
const isAI21 = oai_settings.chat_completion_source == chat_completion_sources.AI21; const isAI21 = oai_settings.chat_completion_source == chat_completion_sources.AI21;
const isPalm = oai_settings.chat_completion_source == chat_completion_sources.PALM; const isGoogle = oai_settings.chat_completion_source == chat_completion_sources.MAKERSUITE;
const isOAI = oai_settings.chat_completion_source == chat_completion_sources.OPENAI; const isOAI = oai_settings.chat_completion_source == chat_completion_sources.OPENAI;
const isTogetherAI = oai_settings.chat_completion_source == chat_completion_sources.TOGETHERAI; const isTogetherAI = oai_settings.chat_completion_source == chat_completion_sources.TOGETHERAI;
const isMistral = oai_settings.chat_completion_source == chat_completion_sources.MISTRALAI;
const isTextCompletion = (isOAI && textCompletionModels.includes(oai_settings.openai_model)) || (isOpenRouter && oai_settings.openrouter_force_instruct && power_user.instruct.enabled); const isTextCompletion = (isOAI && textCompletionModels.includes(oai_settings.openai_model)) || (isOpenRouter && oai_settings.openrouter_force_instruct && power_user.instruct.enabled);
const isQuiet = type === 'quiet'; const isQuiet = type === 'quiet';
const isImpersonate = type === 'impersonate'; const isImpersonate = type === 'impersonate';
const isContinue = type === 'continue'; const isContinue = type === 'continue';
const stream = oai_settings.stream_openai && !isQuiet && !isScale && !isAI21 && !isPalm; const stream = oai_settings.stream_openai && !isQuiet && !isScale && !isAI21 && !(isGoogle && oai_settings.google_model.includes('bison'));
if (isTextCompletion && isOpenRouter) { if (isTextCompletion && isOpenRouter) {
messages = convertChatCompletionToInstruct(messages, type); messages = convertChatCompletionToInstruct(messages, type);
replaceItemizedPromptText(messageId, messages); replaceItemizedPromptText(messageId, messages);
} }
if (isAI21 || isPalm) { if (isAI21) {
const joinedMsgs = messages.reduce((acc, obj) => { const joinedMsgs = messages.reduce((acc, obj) => {
const prefix = prefixMap[obj.role]; const prefix = prefixMap[obj.role];
return acc + (prefix ? (selected_group ? '\n' : prefix + ' ') : '') + obj.content + '\n'; return acc + (prefix ? (selected_group ? '\n' : prefix + ' ') : '') + obj.content + '\n';
@ -1545,7 +1554,7 @@ async function sendOpenAIRequest(type, messages, signal) {
generate_data['api_url_scale'] = oai_settings.api_url_scale; generate_data['api_url_scale'] = oai_settings.api_url_scale;
} }
if (isPalm) { if (isGoogle) {
const nameStopString = isImpersonate ? `\n${name2}:` : `\n${name1}:`; const nameStopString = isImpersonate ? `\n${name2}:` : `\n${name1}:`;
const stopStringsLimit = 3; // 5 - 2 (nameStopString and new_chat_prompt) const stopStringsLimit = 3; // 5 - 2 (nameStopString and new_chat_prompt)
generate_data['top_k'] = Number(oai_settings.top_k_openai); generate_data['top_k'] = Number(oai_settings.top_k_openai);
@ -1558,11 +1567,15 @@ async function sendOpenAIRequest(type, messages, signal) {
generate_data['stop_tokens'] = [name1 + ':', oai_settings.new_chat_prompt, oai_settings.new_group_chat_prompt]; generate_data['stop_tokens'] = [name1 + ':', oai_settings.new_chat_prompt, oai_settings.new_group_chat_prompt];
} }
if ((isOAI || isOpenRouter || isTogetherAI) && oai_settings.seed >= 0) { if (isMistral) {
generate_data['safe_mode'] = false; // already defaults to false, but just incase they change that in the future.
}
if ((isOAI || isOpenRouter || isMistral || isTogetherAI) && oai_settings.seed >= 0) {
generate_data['seed'] = oai_settings.seed; generate_data['seed'] = oai_settings.seed;
} }
const generate_url = '/generate_openai'; const generate_url = '/api/backends/chat-completions/generate';
const response = await fetch(generate_url, { const response = await fetch(generate_url, {
method: 'POST', method: 'POST',
body: JSON.stringify(generate_data), body: JSON.stringify(generate_data),
@ -1570,58 +1583,31 @@ async function sendOpenAIRequest(type, messages, signal) {
signal: signal, signal: signal,
}); });
if (!response.ok) {
tryParseStreamingError(response, await response.text());
throw new Error(`Got response status ${response.status}`);
}
if (stream) { if (stream) {
let reader;
let isSSEStream = oai_settings.chat_completion_source !== chat_completion_sources.MAKERSUITE;
if (isSSEStream) {
const eventStream = new EventSourceStream();
response.body.pipeThrough(eventStream);
reader = eventStream.readable.getReader();
} else {
reader = response.body.getReader();
}
return async function* streamData() { return async function* streamData() {
const decoder = new TextDecoder(); let text = '';
const reader = response.body.getReader(); let utf8Decoder = new TextDecoder();
let getMessage = '';
let messageBuffer = '';
while (true) { while (true) {
const { done, value } = await reader.read(); const { done, value } = await reader.read();
let decoded = decoder.decode(value); if (done) return;
const rawData = isSSEStream ? value.data : utf8Decoder.decode(value, { stream: true });
// Claude's streaming SSE messages are separated by \r if (isSSEStream && rawData === '[DONE]') return;
if (oai_settings.chat_completion_source == chat_completion_sources.CLAUDE) { tryParseStreamingError(response, rawData);
decoded = decoded.replace(/\r/g, ''); text += getStreamingReply(JSON.parse(rawData));
} yield { text, swipes: [] };
tryParseStreamingError(response, decoded);
let eventList = [];
// ReadableStream's buffer is not guaranteed to contain full SSE messages as they arrive in chunks
// We need to buffer chunks until we have one or more full messages (separated by double newlines)
if (!oai_settings.legacy_streaming) {
messageBuffer += decoded;
eventList = messageBuffer.split('\n\n');
// Last element will be an empty string or a leftover partial message
messageBuffer = eventList.pop();
} else {
eventList = decoded.split('\n');
}
for (let event of eventList) {
if (event.startsWith('event: completion')) {
event = event.split('\n')[1];
}
if (typeof event !== 'string' || !event.length)
continue;
if (!event.startsWith('data'))
continue;
if (event == 'data: [DONE]') {
return;
}
let data = JSON.parse(event.substring(6));
// the first and last messages are undefined, protect against that
getMessage = getStreamingReply(getMessage, data);
yield { text: getMessage, swipes: [] };
}
if (done) {
return;
}
} }
}; };
} }
@ -1639,13 +1625,14 @@ async function sendOpenAIRequest(type, messages, signal) {
} }
} }
function getStreamingReply(getMessage, data) { function getStreamingReply(data) {
if (oai_settings.chat_completion_source == chat_completion_sources.CLAUDE) { if (oai_settings.chat_completion_source == chat_completion_sources.CLAUDE) {
getMessage += data?.completion || ''; return data?.completion || '';
} else if (oai_settings.chat_completion_source == chat_completion_sources.MAKERSUITE) {
return data?.candidates[0].content.parts[0].text || '';
} else { } else {
getMessage += data.choices[0]?.delta?.content || data.choices[0]?.message?.content || data.choices[0]?.text || ''; return data.choices[0]?.delta?.content || data.choices[0]?.message?.content || data.choices[0]?.text || '';
} }
return getMessage;
} }
function handleWindowError(err) { function handleWindowError(err) {
@ -1683,7 +1670,7 @@ async function calculateLogitBias() {
let result = {}; let result = {};
try { try {
const reply = await fetch(`/openai_bias?model=${getTokenizerModel()}`, { const reply = await fetch(`/api/backends/chat-completions/bias?model=${getTokenizerModel()}`, {
method: 'POST', method: 'POST',
headers: getRequestHeaders(), headers: getRequestHeaders(),
body, body,
@ -1824,13 +1811,15 @@ class Message {
async addImage(image) { async addImage(image) {
const textContent = this.content; const textContent = this.content;
const isDataUrl = isDataURL(image); const isDataUrl = isDataURL(image);
if (!isDataUrl) { if (!isDataUrl) {
try { try {
const response = await fetch(image, { method: 'GET', cache: 'force-cache' }); const response = await fetch(image, { method: 'GET', cache: 'force-cache' });
if (!response.ok) throw new Error('Failed to fetch image'); if (!response.ok) throw new Error('Failed to fetch image');
const blob = await response.blob(); const blob = await response.blob();
image = await getBase64Async(blob); image = await getBase64Async(blob);
if (oai_settings.chat_completion_source === chat_completion_sources.MAKERSUITE) {
image = image.split(',')[1];
}
} catch (error) { } catch (error) {
console.error('Image adding skipped', error); console.error('Image adding skipped', error);
return; return;
@ -2313,7 +2302,6 @@ function loadOpenAISettings(data, settings) {
oai_settings.openai_max_tokens = settings.openai_max_tokens ?? default_settings.openai_max_tokens; oai_settings.openai_max_tokens = settings.openai_max_tokens ?? default_settings.openai_max_tokens;
oai_settings.bias_preset_selected = settings.bias_preset_selected ?? default_settings.bias_preset_selected; oai_settings.bias_preset_selected = settings.bias_preset_selected ?? default_settings.bias_preset_selected;
oai_settings.bias_presets = settings.bias_presets ?? default_settings.bias_presets; oai_settings.bias_presets = settings.bias_presets ?? default_settings.bias_presets;
oai_settings.legacy_streaming = settings.legacy_streaming ?? default_settings.legacy_streaming;
oai_settings.max_context_unlocked = settings.max_context_unlocked ?? default_settings.max_context_unlocked; oai_settings.max_context_unlocked = settings.max_context_unlocked ?? default_settings.max_context_unlocked;
oai_settings.send_if_empty = settings.send_if_empty ?? default_settings.send_if_empty; oai_settings.send_if_empty = settings.send_if_empty ?? default_settings.send_if_empty;
oai_settings.wi_format = settings.wi_format ?? default_settings.wi_format; oai_settings.wi_format = settings.wi_format ?? default_settings.wi_format;
@ -2328,6 +2316,8 @@ function loadOpenAISettings(data, settings) {
oai_settings.openrouter_use_fallback = settings.openrouter_use_fallback ?? default_settings.openrouter_use_fallback; oai_settings.openrouter_use_fallback = settings.openrouter_use_fallback ?? default_settings.openrouter_use_fallback;
oai_settings.openrouter_force_instruct = settings.openrouter_force_instruct ?? default_settings.openrouter_force_instruct; oai_settings.openrouter_force_instruct = settings.openrouter_force_instruct ?? default_settings.openrouter_force_instruct;
oai_settings.ai21_model = settings.ai21_model ?? default_settings.ai21_model; oai_settings.ai21_model = settings.ai21_model ?? default_settings.ai21_model;
oai_settings.mistralai_model = settings.mistralai_model ?? default_settings.mistralai_model;
oai_settings.google_model = settings.google_model ?? default_settings.google_model;
oai_settings.chat_completion_source = settings.chat_completion_source ?? default_settings.chat_completion_source; oai_settings.chat_completion_source = settings.chat_completion_source ?? default_settings.chat_completion_source;
oai_settings.api_url_scale = settings.api_url_scale ?? default_settings.api_url_scale; oai_settings.api_url_scale = settings.api_url_scale ?? default_settings.api_url_scale;
oai_settings.show_external_models = settings.show_external_models ?? default_settings.show_external_models; oai_settings.show_external_models = settings.show_external_models ?? default_settings.show_external_models;
@ -2350,6 +2340,7 @@ function loadOpenAISettings(data, settings) {
if (settings.names_in_completion !== undefined) oai_settings.names_in_completion = !!settings.names_in_completion; if (settings.names_in_completion !== undefined) oai_settings.names_in_completion = !!settings.names_in_completion;
if (settings.openai_model !== undefined) oai_settings.openai_model = settings.openai_model; if (settings.openai_model !== undefined) oai_settings.openai_model = settings.openai_model;
if (settings.use_ai21_tokenizer !== undefined) { oai_settings.use_ai21_tokenizer = !!settings.use_ai21_tokenizer; oai_settings.use_ai21_tokenizer ? ai21_max = 8191 : ai21_max = 9200; } if (settings.use_ai21_tokenizer !== undefined) { oai_settings.use_ai21_tokenizer = !!settings.use_ai21_tokenizer; oai_settings.use_ai21_tokenizer ? ai21_max = 8191 : ai21_max = 9200; }
if (settings.use_google_tokenizer !== undefined) oai_settings.use_google_tokenizer = !!settings.use_google_tokenizer;
if (settings.exclude_assistant !== undefined) oai_settings.exclude_assistant = !!settings.exclude_assistant; if (settings.exclude_assistant !== undefined) oai_settings.exclude_assistant = !!settings.exclude_assistant;
if (settings.use_alt_scale !== undefined) { oai_settings.use_alt_scale = !!settings.use_alt_scale; updateScaleForm(); } if (settings.use_alt_scale !== undefined) { oai_settings.use_alt_scale = !!settings.use_alt_scale; updateScaleForm(); }
$('#stream_toggle').prop('checked', oai_settings.stream_openai); $('#stream_toggle').prop('checked', oai_settings.stream_openai);
@ -2365,10 +2356,14 @@ function loadOpenAISettings(data, settings) {
$(`#model_claude_select option[value="${oai_settings.claude_model}"`).attr('selected', true); $(`#model_claude_select option[value="${oai_settings.claude_model}"`).attr('selected', true);
$('#model_windowai_select').val(oai_settings.windowai_model); $('#model_windowai_select').val(oai_settings.windowai_model);
$(`#model_windowai_select option[value="${oai_settings.windowai_model}"`).attr('selected', true); $(`#model_windowai_select option[value="${oai_settings.windowai_model}"`).attr('selected', true);
$('#model_google_select').val(oai_settings.google_model);
$(`#model_google_select option[value="${oai_settings.google_model}"`).attr('selected', true);
$('#model_ai21_select').val(oai_settings.ai21_model); $('#model_ai21_select').val(oai_settings.ai21_model);
$(`#model_ai21_select option[value="${oai_settings.ai21_model}"`).attr('selected', true); $(`#model_ai21_select option[value="${oai_settings.ai21_model}"`).attr('selected', true);
$('#model_togetherai_select').val(oai_settings.togetherai_model); $('#model_togetherai_select').val(oai_settings.togetherai_model);
$(`#model_togetherai_select option[value="${oai_settings.togetherai_model}"`).attr('selected', true); $(`#model_togetherai_select option[value="${oai_settings.togetherai_model}"`).attr('selected', true);
$('#model_mistralai_select').val(oai_settings.mistralai_model);
$(`#model_mistralai_select option[value="${oai_settings.mistralai_model}"`).attr('selected', true);
$('#openai_max_context').val(oai_settings.openai_max_context); $('#openai_max_context').val(oai_settings.openai_max_context);
$('#openai_max_context_counter').val(`${oai_settings.openai_max_context}`); $('#openai_max_context_counter').val(`${oai_settings.openai_max_context}`);
$('#model_openrouter_select').val(oai_settings.openrouter_model); $('#model_openrouter_select').val(oai_settings.openrouter_model);
@ -2379,10 +2374,10 @@ function loadOpenAISettings(data, settings) {
$('#wrap_in_quotes').prop('checked', oai_settings.wrap_in_quotes); $('#wrap_in_quotes').prop('checked', oai_settings.wrap_in_quotes);
$('#names_in_completion').prop('checked', oai_settings.names_in_completion); $('#names_in_completion').prop('checked', oai_settings.names_in_completion);
$('#jailbreak_system').prop('checked', oai_settings.jailbreak_system); $('#jailbreak_system').prop('checked', oai_settings.jailbreak_system);
$('#legacy_streaming').prop('checked', oai_settings.legacy_streaming);
$('#openai_show_external_models').prop('checked', oai_settings.show_external_models); $('#openai_show_external_models').prop('checked', oai_settings.show_external_models);
$('#openai_external_category').toggle(oai_settings.show_external_models); $('#openai_external_category').toggle(oai_settings.show_external_models);
$('#use_ai21_tokenizer').prop('checked', oai_settings.use_ai21_tokenizer); $('#use_ai21_tokenizer').prop('checked', oai_settings.use_ai21_tokenizer);
$('#use_google_tokenizer').prop('checked', oai_settings.use_google_tokenizer);
$('#exclude_assistant').prop('checked', oai_settings.exclude_assistant); $('#exclude_assistant').prop('checked', oai_settings.exclude_assistant);
$('#scale-alt').prop('checked', oai_settings.use_alt_scale); $('#scale-alt').prop('checked', oai_settings.use_alt_scale);
$('#openrouter_use_fallback').prop('checked', oai_settings.openrouter_use_fallback); $('#openrouter_use_fallback').prop('checked', oai_settings.openrouter_use_fallback);
@ -2438,6 +2433,11 @@ function loadOpenAISettings(data, settings) {
} }
$('#openai_logit_bias_preset').trigger('change'); $('#openai_logit_bias_preset').trigger('change');
// Upgrade Palm to Makersuite
if (oai_settings.chat_completion_source === 'palm') {
oai_settings.chat_completion_source = chat_completion_sources.MAKERSUITE;
}
$('#chat_completion_source').val(oai_settings.chat_completion_source).trigger('change'); $('#chat_completion_source').val(oai_settings.chat_completion_source).trigger('change');
$('#oai_max_context_unlocked').prop('checked', oai_settings.max_context_unlocked); $('#oai_max_context_unlocked').prop('checked', oai_settings.max_context_unlocked);
} }
@ -2458,7 +2458,7 @@ async function getStatusOpen() {
return resultCheckStatus(); return resultCheckStatus();
} }
const noValidateSources = [chat_completion_sources.SCALE, chat_completion_sources.CLAUDE, chat_completion_sources.AI21, chat_completion_sources.PALM]; const noValidateSources = [chat_completion_sources.SCALE, chat_completion_sources.CLAUDE, chat_completion_sources.AI21, chat_completion_sources.MAKERSUITE];
if (noValidateSources.includes(oai_settings.chat_completion_source)) { if (noValidateSources.includes(oai_settings.chat_completion_source)) {
let status = 'Unable to verify key; press "Test Message" to validate.'; let status = 'Unable to verify key; press "Test Message" to validate.';
setOnlineStatus(status); setOnlineStatus(status);
@ -2471,7 +2471,7 @@ async function getStatusOpen() {
chat_completion_source: oai_settings.chat_completion_source, chat_completion_source: oai_settings.chat_completion_source,
}; };
if (oai_settings.reverse_proxy && oai_settings.chat_completion_source !== chat_completion_sources.OPENROUTER) { if (oai_settings.reverse_proxy && (oai_settings.chat_completion_source === chat_completion_sources.OPENAI || oai_settings.chat_completion_source === chat_completion_sources.CLAUDE)) {
validateReverseProxy(); validateReverseProxy();
} }
@ -2481,7 +2481,7 @@ async function getStatusOpen() {
} }
try { try {
const response = await fetch('/getstatus_openai', { const response = await fetch('/api/backends/chat-completions/status', {
method: 'POST', method: 'POST',
headers: getRequestHeaders(), headers: getRequestHeaders(),
body: JSON.stringify(data), body: JSON.stringify(data),
@ -2521,28 +2521,6 @@ function showWindowExtensionError() {
}); });
} }
function trySelectPresetByName(name) {
let preset_found = null;
for (const key in openai_setting_names) {
if (name.trim() == key.trim()) {
preset_found = key;
break;
}
}
// Don't change if the current preset is the same
if (preset_found && preset_found === oai_settings.preset_settings_openai) {
return;
}
if (preset_found) {
oai_settings.preset_settings_openai = preset_found;
const value = openai_setting_names[preset_found];
$(`#settings_preset_openai option[value="${value}"]`).attr('selected', true);
$('#settings_preset_openai').val(value).trigger('change');
}
}
/** /**
* Persist a settings preset with the given name * Persist a settings preset with the given name
* *
@ -2564,6 +2542,8 @@ async function saveOpenAIPreset(name, settings, triggerUi = true) {
openrouter_sort_models: settings.openrouter_sort_models, openrouter_sort_models: settings.openrouter_sort_models,
ai21_model: settings.ai21_model, ai21_model: settings.ai21_model,
togetherai_model: settings.togetherai_model, togetherai_model: settings.togetherai_model,
mistralai_model: settings.mistralai_model,
google_model: settings.google_model,
temperature: settings.temp_openai, temperature: settings.temp_openai,
frequency_penalty: settings.freq_pen_openai, frequency_penalty: settings.freq_pen_openai,
presence_penalty: settings.pres_pen_openai, presence_penalty: settings.pres_pen_openai,
@ -2585,7 +2565,6 @@ async function saveOpenAIPreset(name, settings, triggerUi = true) {
bias_preset_selected: settings.bias_preset_selected, bias_preset_selected: settings.bias_preset_selected,
reverse_proxy: settings.reverse_proxy, reverse_proxy: settings.reverse_proxy,
proxy_password: settings.proxy_password, proxy_password: settings.proxy_password,
legacy_streaming: settings.legacy_streaming,
max_context_unlocked: settings.max_context_unlocked, max_context_unlocked: settings.max_context_unlocked,
wi_format: settings.wi_format, wi_format: settings.wi_format,
scenario_format: settings.scenario_format, scenario_format: settings.scenario_format,
@ -2598,6 +2577,7 @@ async function saveOpenAIPreset(name, settings, triggerUi = true) {
show_external_models: settings.show_external_models, show_external_models: settings.show_external_models,
assistant_prefill: settings.assistant_prefill, assistant_prefill: settings.assistant_prefill,
use_ai21_tokenizer: settings.use_ai21_tokenizer, use_ai21_tokenizer: settings.use_ai21_tokenizer,
use_google_tokenizer: settings.use_google_tokenizer,
exclude_assistant: settings.exclude_assistant, exclude_assistant: settings.exclude_assistant,
use_alt_scale: settings.use_alt_scale, use_alt_scale: settings.use_alt_scale,
squash_system_messages: settings.squash_system_messages, squash_system_messages: settings.squash_system_messages,
@ -2935,6 +2915,8 @@ function onSettingsPresetChange() {
openrouter_sort_models: ['#openrouter_sort_models', 'openrouter_sort_models', false], openrouter_sort_models: ['#openrouter_sort_models', 'openrouter_sort_models', false],
ai21_model: ['#model_ai21_select', 'ai21_model', false], ai21_model: ['#model_ai21_select', 'ai21_model', false],
togetherai_model: ['#model_togetherai_select', 'togetherai_model', false], togetherai_model: ['#model_togetherai_select', 'togetherai_model', false],
mistralai_model: ['#model_mistralai_select', 'mistralai_model', false],
google_model: ['#model_google_select', 'google_model', false],
openai_max_context: ['#openai_max_context', 'openai_max_context', false], openai_max_context: ['#openai_max_context', 'openai_max_context', false],
openai_max_tokens: ['#openai_max_tokens', 'openai_max_tokens', false], openai_max_tokens: ['#openai_max_tokens', 'openai_max_tokens', false],
wrap_in_quotes: ['#wrap_in_quotes', 'wrap_in_quotes', true], wrap_in_quotes: ['#wrap_in_quotes', 'wrap_in_quotes', true],
@ -2947,7 +2929,6 @@ function onSettingsPresetChange() {
continue_nudge_prompt: ['#continue_nudge_prompt_textarea', 'continue_nudge_prompt', false], continue_nudge_prompt: ['#continue_nudge_prompt_textarea', 'continue_nudge_prompt', false],
bias_preset_selected: ['#openai_logit_bias_preset', 'bias_preset_selected', false], bias_preset_selected: ['#openai_logit_bias_preset', 'bias_preset_selected', false],
reverse_proxy: ['#openai_reverse_proxy', 'reverse_proxy', false], reverse_proxy: ['#openai_reverse_proxy', 'reverse_proxy', false],
legacy_streaming: ['#legacy_streaming', 'legacy_streaming', true],
wi_format: ['#wi_format_textarea', 'wi_format', false], wi_format: ['#wi_format_textarea', 'wi_format', false],
scenario_format: ['#scenario_format_textarea', 'scenario_format', false], scenario_format: ['#scenario_format_textarea', 'scenario_format', false],
personality_format: ['#personality_format_textarea', 'personality_format', false], personality_format: ['#personality_format_textarea', 'personality_format', false],
@ -2960,6 +2941,7 @@ function onSettingsPresetChange() {
proxy_password: ['#openai_proxy_password', 'proxy_password', false], proxy_password: ['#openai_proxy_password', 'proxy_password', false],
assistant_prefill: ['#claude_assistant_prefill', 'assistant_prefill', false], assistant_prefill: ['#claude_assistant_prefill', 'assistant_prefill', false],
use_ai21_tokenizer: ['#use_ai21_tokenizer', 'use_ai21_tokenizer', true], use_ai21_tokenizer: ['#use_ai21_tokenizer', 'use_ai21_tokenizer', true],
use_google_tokenizer: ['#use_google_tokenizer', 'use_google_tokenizer', true],
exclude_assistant: ['#exclude_assistant', 'exclude_assistant', true], exclude_assistant: ['#exclude_assistant', 'exclude_assistant', true],
use_alt_scale: ['#use_alt_scale', 'use_alt_scale', true], use_alt_scale: ['#use_alt_scale', 'use_alt_scale', true],
squash_system_messages: ['#squash_system_messages', 'squash_system_messages', true], squash_system_messages: ['#squash_system_messages', 'squash_system_messages', true],
@ -3068,7 +3050,7 @@ function getMaxContextWindowAI(value) {
return max_8k; return max_8k;
} }
else if (value.includes('palm-2')) { else if (value.includes('palm-2')) {
return palm2_max; return max_8k;
} }
else if (value.includes('GPT-NeoXT')) { else if (value.includes('GPT-NeoXT')) {
return max_2k; return max_2k;
@ -3112,11 +3094,21 @@ async function onModelChange() {
console.log('AI21 model changed to', value); console.log('AI21 model changed to', value);
oai_settings.ai21_model = value; oai_settings.ai21_model = value;
} }
if ($(this).is('#model_togetherai_select')) { if ($(this).is('#model_togetherai_select')) {
console.log('TogetherAI model changed to', value); console.log('TogetherAI model changed to', value);
oai_settings.togetherai_model = value; oai_settings.togetherai_model = value;
} }
if ($(this).is('#model_google_select')) {
console.log('Google model changed to', value);
oai_settings.google_model = value;
}
if ($(this).is('#model_mistralai_select')) {
console.log('MistralAI model changed to', value);
oai_settings.mistralai_model = value;
}
if (oai_settings.chat_completion_source == chat_completion_sources.SCALE) { if (oai_settings.chat_completion_source == chat_completion_sources.SCALE) {
if (oai_settings.max_context_unlocked) { if (oai_settings.max_context_unlocked) {
@ -3128,13 +3120,18 @@ async function onModelChange() {
$('#openai_max_context').val(oai_settings.openai_max_context).trigger('input'); $('#openai_max_context').val(oai_settings.openai_max_context).trigger('input');
} }
if (oai_settings.chat_completion_source == chat_completion_sources.PALM) { if (oai_settings.chat_completion_source == chat_completion_sources.MAKERSUITE) {
if (oai_settings.max_context_unlocked) { if (oai_settings.max_context_unlocked) {
$('#openai_max_context').attr('max', unlocked_max); $('#openai_max_context').attr('max', unlocked_max);
} else if (value === 'gemini-pro') {
$('#openai_max_context').attr('max', max_32k);
} else if (value === 'gemini-pro-vision') {
$('#openai_max_context').attr('max', max_16k);
} else { } else {
$('#openai_max_context').attr('max', palm2_max); $('#openai_max_context').attr('max', max_8k);
} }
oai_settings.temp_openai = Math.min(claude_max_temp, oai_settings.temp_openai);
$('#temp_openai').attr('max', claude_max_temp).val(oai_settings.temp_openai).trigger('input');
oai_settings.openai_max_context = Math.min(Number($('#openai_max_context').attr('max')), oai_settings.openai_max_context); oai_settings.openai_max_context = Math.min(Number($('#openai_max_context').attr('max')), oai_settings.openai_max_context);
$('#openai_max_context').val(oai_settings.openai_max_context).trigger('input'); $('#openai_max_context').val(oai_settings.openai_max_context).trigger('input');
} }
@ -3226,6 +3223,16 @@ async function onModelChange() {
$('#temp_openai').attr('max', oai_max_temp).val(oai_settings.temp_openai).trigger('input'); $('#temp_openai').attr('max', oai_max_temp).val(oai_settings.temp_openai).trigger('input');
} }
if (oai_settings.chat_completion_source === chat_completion_sources.MISTRALAI) {
$('#openai_max_context').attr('max', max_32k);
oai_settings.openai_max_context = Math.min(oai_settings.openai_max_context, Number($('#openai_max_context').attr('max')));
$('#openai_max_context').val(oai_settings.openai_max_context).trigger('input');
//mistral also caps temp at 1.0
oai_settings.temp_openai = Math.min(claude_max_temp, oai_settings.temp_openai);
$('#temp_openai').attr('max', claude_max_temp).val(oai_settings.temp_openai).trigger('input');
}
if (oai_settings.chat_completion_source == chat_completion_sources.AI21) { if (oai_settings.chat_completion_source == chat_completion_sources.AI21) {
if (oai_settings.max_context_unlocked) { if (oai_settings.max_context_unlocked) {
$('#openai_max_context').attr('max', unlocked_max); $('#openai_max_context').attr('max', unlocked_max);
@ -3335,15 +3342,15 @@ async function onConnectButtonClick(e) {
} }
} }
if (oai_settings.chat_completion_source == chat_completion_sources.PALM) { if (oai_settings.chat_completion_source == chat_completion_sources.MAKERSUITE) {
const api_key_palm = String($('#api_key_palm').val()).trim(); const api_key_makersuite = String($('#api_key_makersuite').val()).trim();
if (api_key_palm.length) { if (api_key_makersuite.length) {
await writeSecret(SECRET_KEYS.PALM, api_key_palm); await writeSecret(SECRET_KEYS.MAKERSUITE, api_key_makersuite);
} }
if (!secret_state[SECRET_KEYS.PALM]) { if (!secret_state[SECRET_KEYS.MAKERSUITE]) {
console.log('No secret key saved for PALM'); console.log('No secret key saved for MakerSuite');
return; return;
} }
} }
@ -3396,6 +3403,18 @@ async function onConnectButtonClick(e) {
if (!secret_state[SECRET_KEYS.TOGETHERAI]) { if (!secret_state[SECRET_KEYS.TOGETHERAI]) {
console.log('No secret key saved for TogetherAI'); console.log('No secret key saved for TogetherAI');
}
}
if (oai_settings.chat_completion_source == chat_completion_sources.MISTRALAI) {
const api_key_mistralai = String($('#api_key_mistralai').val()).trim();
if (api_key_mistralai.length) {
await writeSecret(SECRET_KEYS.MISTRALAI, api_key_mistralai);
}
if (!secret_state[SECRET_KEYS.MISTRALAI]) {
console.log('No secret key saved for MistralAI');
return; return;
} }
} }
@ -3423,8 +3442,8 @@ function toggleChatCompletionForms() {
else if (oai_settings.chat_completion_source == chat_completion_sources.SCALE) { else if (oai_settings.chat_completion_source == chat_completion_sources.SCALE) {
$('#model_scale_select').trigger('change'); $('#model_scale_select').trigger('change');
} }
else if (oai_settings.chat_completion_source == chat_completion_sources.PALM) { else if (oai_settings.chat_completion_source == chat_completion_sources.MAKERSUITE) {
$('#model_palm_select').trigger('change'); $('#model_google_select').trigger('change');
} }
else if (oai_settings.chat_completion_source == chat_completion_sources.OPENROUTER) { else if (oai_settings.chat_completion_source == chat_completion_sources.OPENROUTER) {
$('#model_openrouter_select').trigger('change'); $('#model_openrouter_select').trigger('change');
@ -3435,6 +3454,9 @@ function toggleChatCompletionForms() {
else if (oai_settings.chat_completion_source == chat_completion_sources.TOGETHERAI) { else if (oai_settings.chat_completion_source == chat_completion_sources.TOGETHERAI) {
$('#model_togetherai_select').trigger('change'); $('#model_togetherai_select').trigger('change');
} }
else if (oai_settings.chat_completion_source == chat_completion_sources.MISTRALAI) {
$('#model_mistralai_select').trigger('change');
}
$('[data-source]').each(function () { $('[data-source]').each(function () {
const validSources = $(this).data('source').split(','); const validSources = $(this).data('source').split(',');
$(this).toggle(validSources.includes(oai_settings.chat_completion_source)); $(this).toggle(validSources.includes(oai_settings.chat_completion_source));
@ -3495,6 +3517,7 @@ export function isImageInliningSupported() {
} }
const gpt4v = 'gpt-4-vision'; const gpt4v = 'gpt-4-vision';
const geminiProV = 'gemini-pro-vision';
const llava13b = 'llava-13b'; const llava13b = 'llava-13b';
if (!oai_settings.image_inlining) { if (!oai_settings.image_inlining) {
@ -3504,6 +3527,8 @@ export function isImageInliningSupported() {
switch (oai_settings.chat_completion_source) { switch (oai_settings.chat_completion_source) {
case chat_completion_sources.OPENAI: case chat_completion_sources.OPENAI:
return oai_settings.openai_model.includes(gpt4v); return oai_settings.openai_model.includes(gpt4v);
case chat_completion_sources.MAKERSUITE:
return oai_settings.google_model.includes(geminiProV);
case chat_completion_sources.OPENROUTER: case chat_completion_sources.OPENROUTER:
return oai_settings.openrouter_model.includes(gpt4v) || oai_settings.openrouter_model.includes(llava13b); return oai_settings.openrouter_model.includes(gpt4v) || oai_settings.openrouter_model.includes(llava13b);
default: default:
@ -3588,6 +3613,11 @@ $(document).ready(async function () {
saveSettingsDebounced(); saveSettingsDebounced();
}); });
$('#use_google_tokenizer').on('change', function () {
oai_settings.use_google_tokenizer = !!$('#use_google_tokenizer').prop('checked');
saveSettingsDebounced();
});
$('#exclude_assistant').on('change', function () { $('#exclude_assistant').on('change', function () {
oai_settings.exclude_assistant = !!$('#exclude_assistant').prop('checked'); oai_settings.exclude_assistant = !!$('#exclude_assistant').prop('checked');
$('#claude_assistant_prefill_block').toggle(!oai_settings.exclude_assistant); $('#claude_assistant_prefill_block').toggle(!oai_settings.exclude_assistant);
@ -3649,29 +3679,6 @@ $(document).ready(async function () {
saveSettingsDebounced(); saveSettingsDebounced();
}); });
// auto-select a preset based on character/group name
$(document).on('click', '.character_select', function () {
const chid = $(this).attr('chid');
const name = characters[chid]?.name;
if (!name) {
return;
}
trySelectPresetByName(name);
});
$(document).on('click', '.group_select', function () {
const grid = $(this).data('id');
const name = groups.find(x => x.id === grid)?.name;
if (!name) {
return;
}
trySelectPresetByName(name);
});
$('#update_oai_preset').on('click', async function () { $('#update_oai_preset').on('click', async function () {
const name = oai_settings.preset_settings_openai; const name = oai_settings.preset_settings_openai;
await saveOpenAIPreset(name, oai_settings); await saveOpenAIPreset(name, oai_settings);
@ -3732,11 +3739,6 @@ $(document).ready(async function () {
saveSettingsDebounced(); saveSettingsDebounced();
}); });
$('#legacy_streaming').on('input', function () {
oai_settings.legacy_streaming = !!$(this).prop('checked');
saveSettingsDebounced();
});
$('#openai_bypass_status_check').on('input', function () { $('#openai_bypass_status_check').on('input', function () {
oai_settings.bypass_status_check = !!$(this).prop('checked'); oai_settings.bypass_status_check = !!$(this).prop('checked');
getStatusOpen(); getStatusOpen();
@ -3827,12 +3829,13 @@ $(document).ready(async function () {
$('#model_claude_select').on('change', onModelChange); $('#model_claude_select').on('change', onModelChange);
$('#model_windowai_select').on('change', onModelChange); $('#model_windowai_select').on('change', onModelChange);
$('#model_scale_select').on('change', onModelChange); $('#model_scale_select').on('change', onModelChange);
$('#model_palm_select').on('change', onModelChange); $('#model_google_select').on('change', onModelChange);
$('#model_openrouter_select').on('change', onModelChange); $('#model_openrouter_select').on('change', onModelChange);
$('#openrouter_group_models').on('change', onOpenrouterModelSortChange); $('#openrouter_group_models').on('change', onOpenrouterModelSortChange);
$('#openrouter_sort_models').on('change', onOpenrouterModelSortChange); $('#openrouter_sort_models').on('change', onOpenrouterModelSortChange);
$('#model_ai21_select').on('change', onModelChange); $('#model_ai21_select').on('change', onModelChange);
$('#model_togetherai_select').on('change', onModelChange); $('#model_togetherai_select').on('change', onModelChange);
$('#model_mistralai_select').on('change', onModelChange);
$('#settings_preset_openai').on('change', onSettingsPresetChange); $('#settings_preset_openai').on('change', onSettingsPresetChange);
$('#new_oai_preset').on('click', onNewPresetClick); $('#new_oai_preset').on('click', onNewPresetClick);
$('#delete_oai_preset').on('click', onDeletePresetClick); $('#delete_oai_preset').on('click', onDeletePresetClick);

View File

@ -19,6 +19,8 @@ import {
showMoreMessages, showMoreMessages,
saveSettings, saveSettings,
saveChatConditional, saveChatConditional,
setAnimationDuration,
ANIMATION_DURATION_DEFAULT,
} from '../script.js'; } from '../script.js';
import { isMobile, initMovingUI, favsToHotswap } from './RossAscends-mods.js'; import { isMobile, initMovingUI, favsToHotswap } from './RossAscends-mods.js';
import { import {
@ -35,7 +37,7 @@ import { registerSlashCommand } from './slash-commands.js';
import { tags } from './tags.js'; import { tags } from './tags.js';
import { tokenizers } from './tokenizers.js'; import { tokenizers } from './tokenizers.js';
import { countOccurrences, debounce, delay, isOdd, resetScrollHeight, sortMoments, stringToRange, timestampToMoment } from './utils.js'; import { countOccurrences, debounce, delay, isOdd, resetScrollHeight, shuffle, sortMoments, stringToRange, timestampToMoment } from './utils.js';
export { export {
loadPowerUserSettings, loadPowerUserSettings,
@ -55,7 +57,7 @@ const MAX_CONTEXT_UNLOCKED = 200 * 1024;
const MAX_RESPONSE_UNLOCKED = 16 * 1024; const MAX_RESPONSE_UNLOCKED = 16 * 1024;
const unlockedMaxContextStep = 512; const unlockedMaxContextStep = 512;
const maxContextMin = 512; const maxContextMin = 512;
const maxContextStep = 256; const maxContextStep = 64;
const defaultStoryString = '{{#if system}}{{system}}\n{{/if}}{{#if description}}{{description}}\n{{/if}}{{#if personality}}{{char}}\'s personality: {{personality}}\n{{/if}}{{#if scenario}}Scenario: {{scenario}}\n{{/if}}{{#if persona}}{{persona}}\n{{/if}}'; const defaultStoryString = '{{#if system}}{{system}}\n{{/if}}{{#if description}}{{description}}\n{{/if}}{{#if personality}}{{char}}\'s personality: {{personality}}\n{{/if}}{{#if scenario}}Scenario: {{scenario}}\n{{/if}}{{#if persona}}{{persona}}\n{{/if}}';
const defaultExampleSeparator = '***'; const defaultExampleSeparator = '***';
@ -112,6 +114,7 @@ let power_user = {
}, },
markdown_escape_strings: '', markdown_escape_strings: '',
chat_truncation: 100, chat_truncation: 100,
streaming_fps: 30,
ui_mode: ui_mode.POWER, ui_mode: ui_mode.POWER,
fast_ui_mode: true, fast_ui_mode: true,
@ -228,6 +231,8 @@ let power_user = {
bogus_folders: false, bogus_folders: false,
aux_field: 'character_version', aux_field: 'character_version',
restore_user_input: true, restore_user_input: true,
reduced_motion: false,
compact_input_area: true,
}; };
let themes = []; let themes = [];
@ -268,6 +273,8 @@ const storage_keys = {
expand_message_actions: 'ExpandMessageActions', expand_message_actions: 'ExpandMessageActions',
enableZenSliders: 'enableZenSliders', enableZenSliders: 'enableZenSliders',
enableLabMode: 'enableLabMode', enableLabMode: 'enableLabMode',
reduced_motion: 'reduced_motion',
compact_input_area: 'compact_input_area',
}; };
const contextControls = [ const contextControls = [
@ -436,6 +443,22 @@ function switchMessageActions() {
$('.extraMesButtons, .extraMesButtonsHint').removeAttr('style'); $('.extraMesButtons, .extraMesButtonsHint').removeAttr('style');
} }
function switchReducedMotion() {
const value = localStorage.getItem(storage_keys.reduced_motion);
power_user.reduced_motion = value === null ? false : value == 'true';
jQuery.fx.off = power_user.reduced_motion;
const overrideDuration = power_user.reduced_motion ? 0 : ANIMATION_DURATION_DEFAULT;
setAnimationDuration(overrideDuration);
$('#reduced_motion').prop('checked', power_user.reduced_motion);
}
function switchCompactInputArea() {
const value = localStorage.getItem(storage_keys.compact_input_area);
power_user.compact_input_area = value === null ? true : value == 'true';
$('#send_form').toggleClass('compact', power_user.compact_input_area);
$('#compact_input_area').prop('checked', power_user.compact_input_area);
}
var originalSliderValues = []; var originalSliderValues = [];
async function switchLabMode() { async function switchLabMode() {
@ -533,7 +556,7 @@ async function CreateZenSliders(elmnt) {
var sliderMax = Number(originalSlider.attr('max')); var sliderMax = Number(originalSlider.attr('max'));
var sliderValue = originalSlider.val(); var sliderValue = originalSlider.val();
var sliderRange = sliderMax - sliderMin; var sliderRange = sliderMax - sliderMin;
var numSteps = 10; var numSteps = 20;
var decimals = 2; var decimals = 2;
var offVal, allVal; var offVal, allVal;
var stepScale; var stepScale;
@ -1227,6 +1250,22 @@ async function applyTheme(name) {
await printCharacters(true); await printCharacters(true);
}, },
}, },
{
key: 'reduced_motion',
action: async () => {
localStorage.setItem(storage_keys.reduced_motion, String(power_user.reduced_motion));
$('#reduced_motion').prop('checked', power_user.reduced_motion);
switchReducedMotion();
},
},
{
key: 'compact_input_area',
action: async () => {
localStorage.setItem(storage_keys.compact_input_area, String(power_user.compact_input_area));
$('#compact_input_area').prop('checked', power_user.compact_input_area);
switchCompactInputArea();
},
},
]; ];
for (const { key, selector, type, action } of themeProperties) { for (const { key, selector, type, action } of themeProperties) {
@ -1440,6 +1479,9 @@ function loadPowerUserSettings(settings, data) {
$('#chat_truncation').val(power_user.chat_truncation); $('#chat_truncation').val(power_user.chat_truncation);
$('#chat_truncation_counter').val(power_user.chat_truncation); $('#chat_truncation_counter').val(power_user.chat_truncation);
$('#streaming_fps').val(power_user.streaming_fps);
$('#streaming_fps_counter').val(power_user.streaming_fps);
$('#font_scale').val(power_user.font_scale); $('#font_scale').val(power_user.font_scale);
$('#font_scale_counter').val(power_user.font_scale); $('#font_scale_counter').val(power_user.font_scale);
@ -1459,6 +1501,7 @@ function loadPowerUserSettings(settings, data) {
$('#shadow-color-picker').attr('color', power_user.shadow_color); $('#shadow-color-picker').attr('color', power_user.shadow_color);
$('#border-color-picker').attr('color', power_user.border_color); $('#border-color-picker').attr('color', power_user.border_color);
$('#ui_mode_select').val(power_user.ui_mode).find(`option[value="${power_user.ui_mode}"]`).attr('selected', true); $('#ui_mode_select').val(power_user.ui_mode).find(`option[value="${power_user.ui_mode}"]`).attr('selected', true);
$('#reduced_motion').prop('checked', power_user.reduced_motion);
for (const theme of themes) { for (const theme of themes) {
const option = document.createElement('option'); const option = document.createElement('option');
@ -1478,6 +1521,8 @@ function loadPowerUserSettings(settings, data) {
$(`#character_sort_order option[data-order="${power_user.sort_order}"][data-field="${power_user.sort_field}"]`).prop('selected', true); $(`#character_sort_order option[data-order="${power_user.sort_order}"][data-field="${power_user.sort_field}"]`).prop('selected', true);
switchReducedMotion();
switchCompactInputArea();
reloadMarkdownProcessor(power_user.render_formulas); reloadMarkdownProcessor(power_user.render_formulas);
loadInstructMode(data); loadInstructMode(data);
loadContextSettings(); loadContextSettings();
@ -1504,7 +1549,7 @@ async function loadCharListState() {
} }
function loadMovingUIState() { function loadMovingUIState() {
if (isMobile() === false if (!isMobile()
&& power_user.movingUIState && power_user.movingUIState
&& power_user.movingUI === true) { && power_user.movingUI === true) {
console.debug('loading movingUI state'); console.debug('loading movingUI state');
@ -1818,10 +1863,6 @@ export function renderStoryString(params) {
const sortFunc = (a, b) => power_user.sort_order == 'asc' ? compareFunc(a, b) : compareFunc(b, a); const sortFunc = (a, b) => power_user.sort_order == 'asc' ? compareFunc(a, b) : compareFunc(b, a);
const compareFunc = (first, second) => { const compareFunc = (first, second) => {
if (power_user.sort_order == 'random') {
return Math.random() > 0.5 ? 1 : -1;
}
const a = first[power_user.sort_field]; const a = first[power_user.sort_field];
const b = second[power_user.sort_field]; const b = second[power_user.sort_field];
@ -1853,6 +1894,11 @@ function sortEntitiesList(entities) {
return; return;
} }
if (power_user.sort_order === 'random') {
shuffle(entities);
return;
}
entities.sort((a, b) => { entities.sort((a, b) => {
if (a.type === 'tag' && b.type !== 'tag') { if (a.type === 'tag' && b.type !== 'tag') {
return -1; return -1;
@ -1866,11 +1912,26 @@ function sortEntitiesList(entities) {
}); });
} }
async function saveTheme() { /**
const name = await callPopup('Enter a theme preset name:', 'input'); * Updates the current UI theme file.
*/
async function updateTheme() {
await saveTheme(power_user.theme);
toastr.success('Theme saved.');
}
if (!name) { /**
return; * Saves the current theme to the server.
* @param {string|undefined} name Theme name. If undefined, a popup will be shown to enter a name.
* @returns {Promise<void>} A promise that resolves when the theme is saved.
*/
async function saveTheme(name = undefined) {
if (typeof name !== 'string') {
name = await callPopup('Enter a theme preset name:', 'input', power_user.theme);
if (!name) {
return;
}
} }
const theme = { const theme = {
@ -1905,6 +1966,8 @@ async function saveTheme() {
hotswap_enabled: power_user.hotswap_enabled, hotswap_enabled: power_user.hotswap_enabled,
custom_css: power_user.custom_css, custom_css: power_user.custom_css,
bogus_folders: power_user.bogus_folders, bogus_folders: power_user.bogus_folders,
reduced_motion: power_user.reduced_motion,
compact_input_area: power_user.compact_input_area,
}; };
const response = await fetch('/savetheme', { const response = await fetch('/savetheme', {
@ -2678,6 +2741,12 @@ $(document).ready(() => {
saveSettingsDebounced(); saveSettingsDebounced();
}); });
$('#streaming_fps').on('input', function () {
power_user.streaming_fps = Number($('#streaming_fps').val());
$('#streaming_fps_counter').val(power_user.streaming_fps);
saveSettingsDebounced();
});
$('input[name="font_scale"]').on('input', async function (e) { $('input[name="font_scale"]').on('input', async function (e) {
power_user.font_scale = Number(e.target.value); power_user.font_scale = Number(e.target.value);
$('#font_scale_counter').val(power_user.font_scale); $('#font_scale_counter').val(power_user.font_scale);
@ -2771,7 +2840,8 @@ $(document).ready(() => {
saveSettingsDebounced(); saveSettingsDebounced();
}); });
$('#ui-preset-save-button').on('click', saveTheme); $('#ui-preset-save-button').on('click', () => saveTheme());
$('#ui-preset-update-button').on('click', () => updateTheme());
$('#movingui-preset-save-button').on('click', saveMovingUI); $('#movingui-preset-save-button').on('click', saveMovingUI);
$('#never_resize_avatars').on('input', function () { $('#never_resize_avatars').on('input', function () {
@ -3111,6 +3181,20 @@ $(document).ready(() => {
saveSettingsDebounced(); saveSettingsDebounced();
}); });
$('#reduced_motion').on('input', function () {
power_user.reduced_motion = !!$(this).prop('checked');
localStorage.setItem(storage_keys.reduced_motion, String(power_user.reduced_motion));
switchReducedMotion();
saveSettingsDebounced();
});
$('#compact_input_area').on('input', function () {
power_user.compact_input_area = !!$(this).prop('checked');
localStorage.setItem(storage_keys.compact_input_area, String(power_user.compact_input_area));
switchCompactInputArea();
saveSettingsDebounced();
});
$(document).on('click', '#debug_table [data-debug-function]', function () { $(document).on('click', '#debug_table [data-debug-function]', function () {
const functionId = $(this).data('debug-function'); const functionId = $(this).data('debug-function');
const functionRecord = debug_functions.find(f => f.functionId === functionId); const functionRecord = debug_functions.find(f => f.functionId === functionId);

View File

@ -12,6 +12,7 @@ import {
nai_settings, nai_settings,
novelai_setting_names, novelai_setting_names,
novelai_settings, novelai_settings,
online_status,
saveSettingsDebounced, saveSettingsDebounced,
this_chid, this_chid,
} from '../script.js'; } from '../script.js';
@ -19,6 +20,7 @@ import { groups, selected_group } from './group-chats.js';
import { instruct_presets } from './instruct-mode.js'; import { instruct_presets } from './instruct-mode.js';
import { kai_settings } from './kai-settings.js'; import { kai_settings } from './kai-settings.js';
import { context_presets, getContextSettings, power_user } from './power-user.js'; import { context_presets, getContextSettings, power_user } from './power-user.js';
import { registerSlashCommand } from './slash-commands.js';
import { import {
textgenerationwebui_preset_names, textgenerationwebui_preset_names,
textgenerationwebui_presets, textgenerationwebui_presets,
@ -28,6 +30,9 @@ import { download, parseJsonFile, waitUntilCondition } from './utils.js';
const presetManagers = {}; const presetManagers = {};
/**
* Automatically select a preset for current API based on character or group name.
*/
function autoSelectPreset() { function autoSelectPreset() {
const presetManager = getPresetManager(); const presetManager = getPresetManager();
@ -57,7 +62,12 @@ function autoSelectPreset() {
} }
} }
function getPresetManager(apiId) { /**
* Gets a preset manager by API id.
* @param {string} apiId API id
* @returns {PresetManager} Preset manager
*/
function getPresetManager(apiId = '') {
if (!apiId) { if (!apiId) {
apiId = main_api == 'koboldhorde' ? 'kobold' : main_api; apiId = main_api == 'koboldhorde' ? 'kobold' : main_api;
} }
@ -69,6 +79,9 @@ function getPresetManager(apiId) {
return presetManagers[apiId]; return presetManagers[apiId];
} }
/**
* Registers preset managers for all select elements with data-preset-manager-for attribute.
*/
function registerPresetManagers() { function registerPresetManagers() {
$('select[data-preset-manager-for]').each((_, e) => { $('select[data-preset-manager-for]').each((_, e) => {
const forData = $(e).data('preset-manager-for'); const forData = $(e).data('preset-manager-for');
@ -85,21 +98,46 @@ class PresetManager {
this.apiId = apiId; this.apiId = apiId;
} }
/**
* Gets all preset names.
* @returns {string[]} List of preset names
*/
getAllPresets() {
return $(this.select).find('option').map((_, el) => el.text).toArray();
}
/**
* Finds a preset by name.
* @param {string} name Preset name
* @returns {any} Preset value
*/
findPreset(name) { findPreset(name) {
return $(this.select).find(`option:contains(${name})`).val(); return $(this.select).find(`option:contains(${name})`).val();
} }
/**
* Gets the selected preset value.
* @returns {any} Selected preset value
*/
getSelectedPreset() { getSelectedPreset() {
return $(this.select).find('option:selected').val(); return $(this.select).find('option:selected').val();
} }
/**
* Gets the selected preset name.
* @returns {string} Selected preset name
*/
getSelectedPresetName() { getSelectedPresetName() {
return $(this.select).find('option:selected').text(); return $(this.select).find('option:selected').text();
} }
selectPreset(preset) { /**
$(this.select).find(`option[value=${preset}]`).prop('selected', true); * Selects a preset by option value.
$(this.select).val(preset).trigger('change'); * @param {string} value Preset option value
*/
selectPreset(value) {
$(this.select).find(`option[value=${value}]`).prop('selected', true);
$(this.select).val(value).trigger('change');
} }
async updatePreset() { async updatePreset() {
@ -334,11 +372,91 @@ class PresetManager {
} }
} }
jQuery(async () => { /**
await waitUntilCondition(() => eventSource !== undefined); * Selects a preset by name for current API.
* @param {any} _ Named arguments
* @param {string} name Unnamed arguments
* @returns {Promise<string>} Selected or current preset name
*/
async function presetCommandCallback(_, name) {
const shouldReconnect = online_status !== 'no_connection';
const presetManager = getPresetManager();
const allPresets = presetManager.getAllPresets();
const currentPreset = presetManager.getSelectedPresetName();
if (!presetManager) {
console.debug(`Preset Manager not found for API: ${main_api}`);
return '';
}
if (!name) {
console.log('No name provided for /preset command, using current preset');
return currentPreset;
}
if (!Array.isArray(allPresets) || allPresets.length === 0) {
console.log(`No presets found for API: ${main_api}`);
return currentPreset;
}
// Find exact match
const exactMatch = allPresets.find(p => p.toLowerCase().trim() === name.toLowerCase().trim());
if (exactMatch) {
console.log('Found exact preset match', exactMatch);
if (currentPreset !== exactMatch) {
const presetValue = presetManager.findPreset(exactMatch);
if (presetValue) {
presetManager.selectPreset(presetValue);
shouldReconnect && await waitForConnection();
}
}
return exactMatch;
} else {
// Find fuzzy match
const fuse = new Fuse(allPresets);
const fuzzyMatch = fuse.search(name);
if (!fuzzyMatch.length) {
console.warn(`WARN: Preset found with name ${name}`);
return currentPreset;
}
const fuzzyPresetName = fuzzyMatch[0].item;
const fuzzyPresetValue = presetManager.findPreset(fuzzyPresetName);
if (fuzzyPresetValue) {
console.log('Found fuzzy preset match', fuzzyPresetName);
if (currentPreset !== fuzzyPresetName) {
presetManager.selectPreset(fuzzyPresetValue);
shouldReconnect && await waitForConnection();
}
}
return fuzzyPresetName;
}
}
/**
* Waits for API connection to be established.
*/
async function waitForConnection() {
try {
await waitUntilCondition(() => online_status !== 'no_connection', 5000, 100);
} catch {
console.log('Timeout waiting for API to connect');
}
}
export async function initPresetManager() {
eventSource.on(event_types.CHAT_CHANGED, autoSelectPreset); eventSource.on(event_types.CHAT_CHANGED, autoSelectPreset);
registerPresetManagers(); registerPresetManagers();
registerSlashCommand('preset', presetCommandCallback, [], '<span class="monospace">(name)</span> sets a preset by name for the current API', true, true);
$(document).on('click', '[data-preset-manager-update]', async function () { $(document).on('click', '[data-preset-manager-update]', async function () {
const apiId = $(this).data('preset-manager-update'); const apiId = $(this).data('preset-manager-update');
const presetManager = getPresetManager(apiId); const presetManager = getPresetManager(apiId);
@ -440,7 +558,7 @@ jQuery(async () => {
saveSettingsDebounced(); saveSettingsDebounced();
}); });
$(document).on('click', '[data-preset-manager-restore]', async function() { $(document).on('click', '[data-preset-manager-restore]', async function () {
const apiId = $(this).data('preset-manager-restore'); const apiId = $(this).data('preset-manager-restore');
const presetManager = getPresetManager(apiId); const presetManager = getPresetManager(apiId);
@ -490,4 +608,4 @@ jQuery(async () => {
toastr.success('Preset restored'); toastr.success('Preset restored');
} }
}); });
}); }

View File

@ -12,8 +12,9 @@ export const SECRET_KEYS = {
SCALE: 'api_key_scale', SCALE: 'api_key_scale',
AI21: 'api_key_ai21', AI21: 'api_key_ai21',
SCALE_COOKIE: 'scale_cookie', SCALE_COOKIE: 'scale_cookie',
PALM: 'api_key_palm', MAKERSUITE: 'api_key_makersuite',
SERPAPI: 'api_key_serpapi', SERPAPI: 'api_key_serpapi',
MISTRALAI: 'api_key_mistralai',
TOGETHERAI: 'api_key_togetherai', TOGETHERAI: 'api_key_togetherai',
}; };
@ -27,9 +28,10 @@ const INPUT_MAP = {
[SECRET_KEYS.SCALE]: '#api_key_scale', [SECRET_KEYS.SCALE]: '#api_key_scale',
[SECRET_KEYS.AI21]: '#api_key_ai21', [SECRET_KEYS.AI21]: '#api_key_ai21',
[SECRET_KEYS.SCALE_COOKIE]: '#scale_cookie', [SECRET_KEYS.SCALE_COOKIE]: '#scale_cookie',
[SECRET_KEYS.PALM]: '#api_key_palm', [SECRET_KEYS.MAKERSUITE]: '#api_key_makersuite',
[SECRET_KEYS.APHRODITE]: '#api_key_aphrodite', [SECRET_KEYS.APHRODITE]: '#api_key_aphrodite',
[SECRET_KEYS.TABBY]: '#api_key_tabby', [SECRET_KEYS.TABBY]: '#api_key_tabby',
[SECRET_KEYS.MISTRALAI]: '#api_key_mistralai',
[SECRET_KEYS.TOGETHERAI]: '#api_key_togetherai', [SECRET_KEYS.TOGETHERAI]: '#api_key_togetherai',
}; };

View File

@ -186,6 +186,7 @@ parser.addCommand('trimend', trimEndCallback, [], '<span class="monospace">(text
parser.addCommand('inject', injectCallback, [], '<span class="monospace">id=injectId (position=before/after/chat depth=number [text])</span> injects a text into the LLM prompt for the current chat. Requires a unique injection ID. Positions: "before" main prompt, "after" main prompt, in-"chat" (default: after). Depth: injection depth for the prompt (default: 4).', true, true); parser.addCommand('inject', injectCallback, [], '<span class="monospace">id=injectId (position=before/after/chat depth=number [text])</span> injects a text into the LLM prompt for the current chat. Requires a unique injection ID. Positions: "before" main prompt, "after" main prompt, in-"chat" (default: after). Depth: injection depth for the prompt (default: 4).', true, true);
parser.addCommand('listinjects', listInjectsCallback, [], ' lists all script injections for the current chat.', true, true); parser.addCommand('listinjects', listInjectsCallback, [], ' lists all script injections for the current chat.', true, true);
parser.addCommand('flushinjects', flushInjectsCallback, [], ' removes all script injections for the current chat.', true, true); parser.addCommand('flushinjects', flushInjectsCallback, [], ' removes all script injections for the current chat.', true, true);
parser.addCommand('tokens', (_, text) => getTokenCount(text), [], '<span class="monospace">(text)</span> counts the number of tokens in the text.', true, true);
registerVariableCommands(); registerVariableCommands();
const NARRATOR_NAME_KEY = 'narrator_name'; const NARRATOR_NAME_KEY = 'narrator_name';

View File

@ -0,0 +1,77 @@
/**
* A stream which handles Server-Sent Events from a binary ReadableStream like you get from the fetch API.
*/
class EventSourceStream {
constructor() {
const decoder = new TextDecoderStream('utf-8');
let streamBuffer = '';
let lastEventId = '';
function processChunk(controller) {
// Events are separated by two newlines
const events = streamBuffer.split(/\r\n\r\n|\r\r|\n\n/g);
if (events.length === 0) return;
// The leftover text to remain in the buffer is whatever doesn't have two newlines after it. If the buffer ended
// with two newlines, this will be an empty string.
streamBuffer = events.pop();
for (const eventChunk of events) {
let eventType = '';
// Split up by single newlines.
const lines = eventChunk.split(/\n|\r|\r\n/g);
let eventData = '';
for (const line of lines) {
const lineMatch = /([^:]+)(?:: ?(.*))?/.exec(line);
if (lineMatch) {
const field = lineMatch[1];
const value = lineMatch[2] || '';
switch (field) {
case 'event':
eventType = value;
break;
case 'data':
eventData += value;
eventData += '\n';
break;
case 'id':
// The ID field cannot contain null, per the spec
if (!value.includes('\0')) lastEventId = value;
break;
// We do nothing for the `delay` type, and other types are explicitly ignored
}
}
}
// https://html.spec.whatwg.org/multipage/server-sent-events.html#dispatchMessage
// Skip the event if the data buffer is the empty string.
if (eventData === '') continue;
if (eventData[eventData.length - 1] === '\n') {
eventData = eventData.slice(0, -1);
}
// Trim the *last* trailing newline only.
const event = new MessageEvent(eventType || 'message', { data: eventData, lastEventId });
controller.enqueue(event);
}
}
const sseStream = new TransformStream({
transform(chunk, controller) {
streamBuffer += chunk;
processChunk(controller);
},
});
decoder.readable.pipeThrough(sseStream);
this.readable = sseStream.readable;
this.writable = decoder.writable;
}
}
export default EventSourceStream;

View File

@ -17,6 +17,7 @@
<li><tt>&lcub;&lcub;char&rcub;&rcub;</tt> the Character's name</li> <li><tt>&lcub;&lcub;char&rcub;&rcub;</tt> the Character's name</li>
<li><tt>&lcub;&lcub;lastMessage&rcub;&rcub;</tt> - the text of the latest chat message.</li> <li><tt>&lcub;&lcub;lastMessage&rcub;&rcub;</tt> - the text of the latest chat message.</li>
<li><tt>&lcub;&lcub;lastMessageId&rcub;&rcub;</tt> index # of the latest chat message. Useful for slash command batching.</li> <li><tt>&lcub;&lcub;lastMessageId&rcub;&rcub;</tt> index # of the latest chat message. Useful for slash command batching.</li>
<li><tt>&lcub;&lcub;firstIncludedMessageId&rcub;&rcub;</tt> - the ID of the first message included in the context. Requires generation to be ran at least once in the current session.</li>
<li><tt>&lcub;&lcub;currentSwipeId&rcub;&rcub;</tt> the 1-based ID of the current swipe in the last chat message. Empty string if the last message is user or prompt-hidden.</li> <li><tt>&lcub;&lcub;currentSwipeId&rcub;&rcub;</tt> the 1-based ID of the current swipe in the last chat message. Empty string if the last message is user or prompt-hidden.</li>
<li><tt>&lcub;&lcub;lastSwipeId&rcub;&rcub;</tt> the number of swipes in the last chat message. Empty string if the last message is user or prompt-hidden.</li> <li><tt>&lcub;&lcub;lastSwipeId&rcub;&rcub;</tt> the number of swipes in the last chat message. Empty string if the last message is user or prompt-hidden.</li>
<li><tt>&lcub;&lcub;// (note)&rcub;&rcub;</tt> you can leave a note here, and the macro will be replaced with blank content. Not visible for the AI.</li> <li><tt>&lcub;&lcub;// (note)&rcub;&rcub;</tt> you can leave a note here, and the macro will be replaced with blank content. Not visible for the AI.</li>

View File

@ -14,6 +14,7 @@ import {
power_user, power_user,
registerDebugFunction, registerDebugFunction,
} from './power-user.js'; } from './power-user.js';
import EventSourceStream from './sse-stream.js';
import { SENTENCEPIECE_TOKENIZERS, getTextTokens, tokenizers } from './tokenizers.js'; import { SENTENCEPIECE_TOKENIZERS, getTextTokens, tokenizers } from './tokenizers.js';
import { getSortableDelay, onlyUnique } from './utils.js'; import { getSortableDelay, onlyUnique } from './utils.js';
@ -467,7 +468,7 @@ function setSettingByName(setting, value, trigger) {
async function generateTextGenWithStreaming(generate_data, signal) { async function generateTextGenWithStreaming(generate_data, signal) {
generate_data.stream = true; generate_data.stream = true;
const response = await fetch('/api/textgenerationwebui/generate', { const response = await fetch('/api/backends/text-completions/generate', {
headers: { headers: {
...getRequestHeaders(), ...getRequestHeaders(),
}, },
@ -476,68 +477,50 @@ async function generateTextGenWithStreaming(generate_data, signal) {
signal: signal, signal: signal,
}); });
if (!response.ok) {
tryParseStreamingError(response, await response.text());
throw new Error(`Got response status ${response.status}`);
}
const eventStream = new EventSourceStream();
response.body.pipeThrough(eventStream);
const reader = eventStream.readable.getReader();
return async function* streamData() { return async function* streamData() {
const decoder = new TextDecoder(); let text = '';
const reader = response.body.getReader();
let getMessage = '';
let messageBuffer = '';
const swipes = []; const swipes = [];
while (true) { while (true) {
const { done, value } = await reader.read(); const { done, value } = await reader.read();
// We don't want carriage returns in our messages if (done) return;
let response = decoder.decode(value).replace(/\r/g, ''); if (value.data === '[DONE]') return;
tryParseStreamingError(response); tryParseStreamingError(response, value.data);
let eventList = []; let data = JSON.parse(value.data);
messageBuffer += response; if (data?.choices[0]?.index > 0) {
eventList = messageBuffer.split('\n\n'); const swipeIndex = data.choices[0].index - 1;
// Last element will be an empty string or a leftover partial message swipes[swipeIndex] = (swipes[swipeIndex] || '') + data.choices[0].text;
messageBuffer = eventList.pop(); } else {
text += data?.choices[0]?.text || '';
for (let event of eventList) {
if (event.startsWith('event: completion')) {
event = event.split('\n')[1];
}
if (typeof event !== 'string' || !event.length)
continue;
if (!event.startsWith('data'))
continue;
if (event == 'data: [DONE]') {
return;
}
let data = JSON.parse(event.substring(6));
if (data?.choices[0]?.index > 0) {
const swipeIndex = data.choices[0].index - 1;
swipes[swipeIndex] = (swipes[swipeIndex] || '') + data.choices[0].text;
} else {
getMessage += data?.choices[0]?.text || '';
}
yield { text: getMessage, swipes: swipes };
} }
if (done) { yield { text, swipes };
return;
}
} }
}; };
} }
/** /**
* Parses errors in streaming responses and displays them in toastr. * Parses errors in streaming responses and displays them in toastr.
* @param {string} response - Response from the server. * @param {Response} response - Response from the server.
* @param {string} decoded - Decoded response body.
* @returns {void} Nothing. * @returns {void} Nothing.
*/ */
function tryParseStreamingError(response) { function tryParseStreamingError(response, decoded) {
let data = {}; let data = {};
try { try {
data = JSON.parse(response); data = JSON.parse(decoded);
} catch { } catch {
// No JSON. Do nothing. // No JSON. Do nothing.
} }
@ -545,7 +528,7 @@ function tryParseStreamingError(response) {
const message = data?.error?.message || data?.message; const message = data?.error?.message || data?.message;
if (message) { if (message) {
toastr.error(message, 'API Error'); toastr.error(message, 'Text Completion API');
throw new Error(message); throw new Error(message);
} }
} }

View File

@ -1,4 +1,4 @@
import { characters, getAPIServerUrl, main_api, nai_settings, online_status, this_chid } from '../script.js'; import { characters, main_api, api_server, api_server_textgenerationwebui, nai_settings, online_status, this_chid } from '../script.js';
import { power_user, registerDebugFunction } from './power-user.js'; import { power_user, registerDebugFunction } from './power-user.js';
import { chat_completion_sources, model_list, oai_settings } from './openai.js'; import { chat_completion_sources, model_list, oai_settings } from './openai.js';
import { groups, selected_group } from './group-chats.js'; import { groups, selected_group } from './group-chats.js';
@ -18,9 +18,11 @@ export const tokenizers = {
LLAMA: 3, LLAMA: 3,
NERD: 4, NERD: 4,
NERD2: 5, NERD2: 5,
API: 6, API_CURRENT: 6,
MISTRAL: 7, MISTRAL: 7,
YI: 8, YI: 8,
API_TEXTGENERATIONWEBUI: 9,
API_KOBOLD: 10,
BEST_MATCH: 99, BEST_MATCH: 99,
}; };
@ -33,6 +35,52 @@ export const SENTENCEPIECE_TOKENIZERS = [
//tokenizers.NERD2, //tokenizers.NERD2,
]; ];
const TOKENIZER_URLS = {
[tokenizers.GPT2]: {
encode: '/api/tokenizers/gpt2/encode',
decode: '/api/tokenizers/gpt2/decode',
count: '/api/tokenizers/gpt2/encode',
},
[tokenizers.OPENAI]: {
encode: '/api/tokenizers/openai/encode',
decode: '/api/tokenizers/openai/decode',
count: '/api/tokenizers/openai/encode',
},
[tokenizers.LLAMA]: {
encode: '/api/tokenizers/llama/encode',
decode: '/api/tokenizers/llama/decode',
count: '/api/tokenizers/llama/encode',
},
[tokenizers.NERD]: {
encode: '/api/tokenizers/nerdstash/encode',
decode: '/api/tokenizers/nerdstash/decode',
count: '/api/tokenizers/nerdstash/encode',
},
[tokenizers.NERD2]: {
encode: '/api/tokenizers/nerdstash_v2/encode',
decode: '/api/tokenizers/nerdstash_v2/decode',
count: '/api/tokenizers/nerdstash_v2/encode',
},
[tokenizers.API_KOBOLD]: {
count: '/api/tokenizers/remote/kobold/count',
encode: '/api/tokenizers/remote/kobold/count',
},
[tokenizers.MISTRAL]: {
encode: '/api/tokenizers/mistral/encode',
decode: '/api/tokenizers/mistral/decode',
count: '/api/tokenizers/mistral/encode',
},
[tokenizers.YI]: {
encode: '/api/tokenizers/yi/encode',
decode: '/api/tokenizers/yi/decode',
count: '/api/tokenizers/yi/encode',
},
[tokenizers.API_TEXTGENERATIONWEBUI]: {
encode: '/api/tokenizers/remote/textgenerationwebui/encode',
count: '/api/tokenizers/remote/textgenerationwebui/encode',
},
};
const objectStore = new localforage.createInstance({ name: 'SillyTavern_ChatCompletions' }); const objectStore = new localforage.createInstance({ name: 'SillyTavern_ChatCompletions' });
let tokenCache = {}; let tokenCache = {};
@ -92,7 +140,18 @@ export function getFriendlyTokenizerName(forApi) {
if (forApi !== 'openai' && tokenizerId === tokenizers.BEST_MATCH) { if (forApi !== 'openai' && tokenizerId === tokenizers.BEST_MATCH) {
tokenizerId = getTokenizerBestMatch(forApi); tokenizerId = getTokenizerBestMatch(forApi);
tokenizerName = $(`#tokenizer option[value="${tokenizerId}"]`).text();
switch (tokenizerId) {
case tokenizers.API_KOBOLD:
tokenizerName = 'API (KoboldAI Classic)';
break;
case tokenizers.API_TEXTGENERATIONWEBUI:
tokenizerName = 'API (Text Completion)';
break;
default:
tokenizerName = $(`#tokenizer option[value="${tokenizerId}"]`).text();
break;
}
} }
tokenizerName = forApi == 'openai' tokenizerName = forApi == 'openai'
@ -135,11 +194,11 @@ export function getTokenizerBestMatch(forApi) {
if (!hasTokenizerError && isConnected) { if (!hasTokenizerError && isConnected) {
if (forApi === 'kobold' && kai_flags.can_use_tokenization) { if (forApi === 'kobold' && kai_flags.can_use_tokenization) {
return tokenizers.API; return tokenizers.API_KOBOLD;
} }
if (forApi === 'textgenerationwebui' && isTokenizerSupported) { if (forApi === 'textgenerationwebui' && isTokenizerSupported) {
return tokenizers.API; return tokenizers.API_TEXTGENERATIONWEBUI;
} }
} }
@ -149,34 +208,42 @@ export function getTokenizerBestMatch(forApi) {
return tokenizers.NONE; return tokenizers.NONE;
} }
// Get the current remote tokenizer API based on the current text generation API.
function currentRemoteTokenizerAPI() {
switch (main_api) {
case 'kobold':
return tokenizers.API_KOBOLD;
case 'textgenerationwebui':
return tokenizers.API_TEXTGENERATIONWEBUI;
default:
return tokenizers.NONE;
}
}
/** /**
* Calls the underlying tokenizer model to the token count for a string. * Calls the underlying tokenizer model to the token count for a string.
* @param {number} type Tokenizer type. * @param {number} type Tokenizer type.
* @param {string} str String to tokenize. * @param {string} str String to tokenize.
* @param {number} padding Number of padding tokens.
* @returns {number} Token count. * @returns {number} Token count.
*/ */
function callTokenizer(type, str, padding) { function callTokenizer(type, str) {
if (type === tokenizers.NONE) return guesstimate(str);
switch (type) { switch (type) {
case tokenizers.NONE: case tokenizers.API_CURRENT:
return guesstimate(str) + padding; return callTokenizer(currentRemoteTokenizerAPI(), str);
case tokenizers.GPT2: case tokenizers.API_KOBOLD:
return countTokensRemote('/api/tokenizers/gpt2/encode', str, padding); return countTokensFromKoboldAPI(str);
case tokenizers.LLAMA: case tokenizers.API_TEXTGENERATIONWEBUI:
return countTokensRemote('/api/tokenizers/llama/encode', str, padding); return countTokensFromTextgenAPI(str);
case tokenizers.NERD: default: {
return countTokensRemote('/api/tokenizers/nerdstash/encode', str, padding); const endpointUrl = TOKENIZER_URLS[type]?.count;
case tokenizers.NERD2: if (!endpointUrl) {
return countTokensRemote('/api/tokenizers/nerdstash_v2/encode', str, padding); console.warn('Unknown tokenizer type', type);
case tokenizers.MISTRAL: return apiFailureTokenCount(str);
return countTokensRemote('/api/tokenizers/mistral/encode', str, padding); }
case tokenizers.YI: return countTokensFromServer(endpointUrl, str);
return countTokensRemote('/api/tokenizers/yi/encode', str, padding); }
case tokenizers.API:
return countTokensRemote('/tokenize_via_api', str, padding);
default:
console.warn('Unknown tokenizer type', type);
return callTokenizer(tokenizers.NONE, str, padding);
} }
} }
@ -219,7 +286,7 @@ export function getTokenCount(str, padding = undefined) {
return cacheObject[cacheKey]; return cacheObject[cacheKey];
} }
const result = callTokenizer(tokenizerType, str, padding); const result = callTokenizer(tokenizerType, str) + padding;
if (isNaN(result)) { if (isNaN(result)) {
console.warn('Token count calculation returned NaN'); console.warn('Token count calculation returned NaN');
@ -309,10 +376,18 @@ export function getTokenizerModel() {
} }
} }
if (oai_settings.chat_completion_source == chat_completion_sources.MAKERSUITE) {
return oai_settings.google_model;
}
if (oai_settings.chat_completion_source == chat_completion_sources.CLAUDE) { if (oai_settings.chat_completion_source == chat_completion_sources.CLAUDE) {
return claudeTokenizer; return claudeTokenizer;
} }
if (oai_settings.chat_completion_source == chat_completion_sources.MISTRALAI) {
return mistralTokenizer;
}
// Default to Turbo 3.5 // Default to Turbo 3.5
return turboTokenizer; return turboTokenizer;
} }
@ -322,6 +397,15 @@ export function getTokenizerModel() {
*/ */
export function countTokensOpenAI(messages, full = false) { export function countTokensOpenAI(messages, full = false) {
const shouldTokenizeAI21 = oai_settings.chat_completion_source === chat_completion_sources.AI21 && oai_settings.use_ai21_tokenizer; const shouldTokenizeAI21 = oai_settings.chat_completion_source === chat_completion_sources.AI21 && oai_settings.use_ai21_tokenizer;
const shouldTokenizeGoogle = oai_settings.chat_completion_source === chat_completion_sources.MAKERSUITE && oai_settings.use_google_tokenizer;
let tokenizerEndpoint = '';
if (shouldTokenizeAI21) {
tokenizerEndpoint = '/api/tokenizers/ai21/count';
} else if (shouldTokenizeGoogle) {
tokenizerEndpoint = `/api/tokenizers/google/count?model=${getTokenizerModel()}`;
} else {
tokenizerEndpoint = `/api/tokenizers/openai/count?model=${getTokenizerModel()}`;
}
const cacheObject = getTokenCacheObject(); const cacheObject = getTokenCacheObject();
if (!Array.isArray(messages)) { if (!Array.isArray(messages)) {
@ -333,7 +417,7 @@ export function countTokensOpenAI(messages, full = false) {
for (const message of messages) { for (const message of messages) {
const model = getTokenizerModel(); const model = getTokenizerModel();
if (model === 'claude' || shouldTokenizeAI21) { if (model === 'claude' || shouldTokenizeAI21 || shouldTokenizeGoogle) {
full = true; full = true;
} }
@ -349,7 +433,7 @@ export function countTokensOpenAI(messages, full = false) {
jQuery.ajax({ jQuery.ajax({
async: false, async: false,
type: 'POST', // type: 'POST', //
url: shouldTokenizeAI21 ? '/api/tokenizers/ai21/count' : `/api/tokenizers/openai/count?model=${model}`, url: tokenizerEndpoint,
data: JSON.stringify([message]), data: JSON.stringify([message]),
dataType: 'json', dataType: 'json',
contentType: 'application/json', contentType: 'application/json',
@ -391,76 +475,131 @@ function getTokenCacheObject() {
return tokenCache[String(chatId)]; return tokenCache[String(chatId)];
} }
function getRemoteTokenizationParams(str) {
return {
text: str,
main_api,
api_type: textgen_settings.type,
url: getAPIServerUrl(),
legacy_api: main_api === 'textgenerationwebui' &&
textgen_settings.legacy_api &&
textgen_settings.type !== MANCER,
};
}
/** /**
* Counts token using the remote server API. * Count tokens using the server API.
* @param {string} endpoint API endpoint. * @param {string} endpoint API endpoint.
* @param {string} str String to tokenize. * @param {string} str String to tokenize.
* @param {number} padding Number of padding tokens. * @returns {number} Token count.
* @returns {number} Token count with padding.
*/ */
function countTokensRemote(endpoint, str, padding) { function countTokensFromServer(endpoint, str) {
let tokenCount = 0; let tokenCount = 0;
jQuery.ajax({ jQuery.ajax({
async: false, async: false,
type: 'POST', type: 'POST',
url: endpoint, url: endpoint,
data: JSON.stringify(getRemoteTokenizationParams(str)), data: JSON.stringify({ text: str }),
dataType: 'json', dataType: 'json',
contentType: 'application/json', contentType: 'application/json',
success: function (data) { success: function (data) {
if (typeof data.count === 'number') { if (typeof data.count === 'number') {
tokenCount = data.count; tokenCount = data.count;
} else { } else {
tokenCount = guesstimate(str); tokenCount = apiFailureTokenCount(str);
console.error('Error counting tokens');
if (!sessionStorage.getItem(TOKENIZER_WARNING_KEY)) {
toastr.warning(
'Your selected API doesn\'t support the tokenization endpoint. Using estimated counts.',
'Error counting tokens',
{ timeOut: 10000, preventDuplicates: true },
);
sessionStorage.setItem(TOKENIZER_WARNING_KEY, String(true));
}
} }
}, },
}); });
return tokenCount + padding; return tokenCount;
}
/**
* Count tokens using the AI provider's API.
* @param {string} str String to tokenize.
* @returns {number} Token count.
*/
function countTokensFromKoboldAPI(str) {
let tokenCount = 0;
jQuery.ajax({
async: false,
type: 'POST',
url: TOKENIZER_URLS[tokenizers.API_KOBOLD].count,
data: JSON.stringify({
text: str,
url: api_server,
}),
dataType: 'json',
contentType: 'application/json',
success: function (data) {
if (typeof data.count === 'number') {
tokenCount = data.count;
} else {
tokenCount = apiFailureTokenCount(str);
}
},
});
return tokenCount;
}
function getTextgenAPITokenizationParams(str) {
return {
text: str,
api_type: textgen_settings.type,
url: api_server_textgenerationwebui,
legacy_api:
textgen_settings.legacy_api &&
textgen_settings.type !== MANCER,
};
}
/**
* Count tokens using the AI provider's API.
* @param {string} str String to tokenize.
* @returns {number} Token count.
*/
function countTokensFromTextgenAPI(str) {
let tokenCount = 0;
jQuery.ajax({
async: false,
type: 'POST',
url: TOKENIZER_URLS[tokenizers.API_TEXTGENERATIONWEBUI].count,
data: JSON.stringify(getTextgenAPITokenizationParams(str)),
dataType: 'json',
contentType: 'application/json',
success: function (data) {
if (typeof data.count === 'number') {
tokenCount = data.count;
} else {
tokenCount = apiFailureTokenCount(str);
}
},
});
return tokenCount;
}
function apiFailureTokenCount(str) {
console.error('Error counting tokens');
if (!sessionStorage.getItem(TOKENIZER_WARNING_KEY)) {
toastr.warning(
'Your selected API doesn\'t support the tokenization endpoint. Using estimated counts.',
'Error counting tokens',
{ timeOut: 10000, preventDuplicates: true },
);
sessionStorage.setItem(TOKENIZER_WARNING_KEY, String(true));
}
return guesstimate(str);
} }
/** /**
* Calls the underlying tokenizer model to encode a string to tokens. * Calls the underlying tokenizer model to encode a string to tokens.
* @param {string} endpoint API endpoint. * @param {string} endpoint API endpoint.
* @param {string} str String to tokenize. * @param {string} str String to tokenize.
* @param {string} model Tokenizer model.
* @returns {number[]} Array of token ids. * @returns {number[]} Array of token ids.
*/ */
function getTextTokensRemote(endpoint, str, model = '') { function getTextTokensFromServer(endpoint, str) {
if (model) {
endpoint += `?model=${model}`;
}
let ids = []; let ids = [];
jQuery.ajax({ jQuery.ajax({
async: false, async: false,
type: 'POST', type: 'POST',
url: endpoint, url: endpoint,
data: JSON.stringify(getRemoteTokenizationParams(str)), data: JSON.stringify({ text: str }),
dataType: 'json', dataType: 'json',
contentType: 'application/json', contentType: 'application/json',
success: function (data) { success: function (data) {
@ -475,16 +614,59 @@ function getTextTokensRemote(endpoint, str, model = '') {
return ids; return ids;
} }
/**
* Calls the AI provider's tokenize API to encode a string to tokens.
* @param {string} str String to tokenize.
* @returns {number[]} Array of token ids.
*/
function getTextTokensFromTextgenAPI(str) {
let ids = [];
jQuery.ajax({
async: false,
type: 'POST',
url: TOKENIZER_URLS[tokenizers.API_TEXTGENERATIONWEBUI].encode,
data: JSON.stringify(getTextgenAPITokenizationParams(str)),
dataType: 'json',
contentType: 'application/json',
success: function (data) {
ids = data.ids;
},
});
return ids;
}
/**
* Calls the AI provider's tokenize API to encode a string to tokens.
* @param {string} str String to tokenize.
* @returns {number[]} Array of token ids.
*/
function getTextTokensFromKoboldAPI(str) {
let ids = [];
jQuery.ajax({
async: false,
type: 'POST',
url: TOKENIZER_URLS[tokenizers.API_KOBOLD].encode,
data: JSON.stringify({
text: str,
url: api_server,
}),
dataType: 'json',
contentType: 'application/json',
success: function (data) {
ids = data.ids;
},
});
return ids;
}
/** /**
* Calls the underlying tokenizer model to decode token ids to text. * Calls the underlying tokenizer model to decode token ids to text.
* @param {string} endpoint API endpoint. * @param {string} endpoint API endpoint.
* @param {number[]} ids Array of token ids * @param {number[]} ids Array of token ids
*/ */
function decodeTextTokensRemote(endpoint, ids, model = '') { function decodeTextTokensFromServer(endpoint, ids) {
if (model) {
endpoint += `?model=${model}`;
}
let text = ''; let text = '';
jQuery.ajax({ jQuery.ajax({
async: false, async: false,
@ -501,64 +683,64 @@ function decodeTextTokensRemote(endpoint, ids, model = '') {
} }
/** /**
* Encodes a string to tokens using the remote server API. * Encodes a string to tokens using the server API.
* @param {number} tokenizerType Tokenizer type. * @param {number} tokenizerType Tokenizer type.
* @param {string} str String to tokenize. * @param {string} str String to tokenize.
* @returns {number[]} Array of token ids. * @returns {number[]} Array of token ids.
*/ */
export function getTextTokens(tokenizerType, str) { export function getTextTokens(tokenizerType, str) {
switch (tokenizerType) { switch (tokenizerType) {
case tokenizers.GPT2: case tokenizers.API_CURRENT:
return getTextTokensRemote('/api/tokenizers/gpt2/encode', str); return getTextTokens(currentRemoteTokenizerAPI(), str);
case tokenizers.LLAMA: case tokenizers.API_TEXTGENERATIONWEBUI:
return getTextTokensRemote('/api/tokenizers/llama/encode', str); return getTextTokensFromTextgenAPI(str);
case tokenizers.NERD: case tokenizers.API_KOBOLD:
return getTextTokensRemote('/api/tokenizers/nerdstash/encode', str); return getTextTokensFromKoboldAPI(str);
case tokenizers.NERD2: default: {
return getTextTokensRemote('/api/tokenizers/nerdstash_v2/encode', str); const tokenizerEndpoints = TOKENIZER_URLS[tokenizerType];
case tokenizers.MISTRAL: if (!tokenizerEndpoints) {
return getTextTokensRemote('/api/tokenizers/mistral/encode', str); apiFailureTokenCount(str);
case tokenizers.YI: console.warn('Unknown tokenizer type', tokenizerType);
return getTextTokensRemote('/api/tokenizers/yi/encode', str); return [];
case tokenizers.OPENAI: { }
const model = getTokenizerModel(); let endpointUrl = tokenizerEndpoints.encode;
return getTextTokensRemote('/api/tokenizers/openai/encode', str, model); if (!endpointUrl) {
apiFailureTokenCount(str);
console.warn('This tokenizer type does not support encoding', tokenizerType);
return [];
}
if (tokenizerType === tokenizers.OPENAI) {
endpointUrl += `?model=${getTokenizerModel()}`;
}
return getTextTokensFromServer(endpointUrl, str);
} }
case tokenizers.API:
return getTextTokensRemote('/tokenize_via_api', str);
default:
console.warn('Calling getTextTokens with unsupported tokenizer type', tokenizerType);
return [];
} }
} }
/** /**
* Decodes token ids to text using the remote server API. * Decodes token ids to text using the server API.
* @param {number} tokenizerType Tokenizer type. * @param {number} tokenizerType Tokenizer type.
* @param {number[]} ids Array of token ids * @param {number[]} ids Array of token ids
*/ */
export function decodeTextTokens(tokenizerType, ids) { export function decodeTextTokens(tokenizerType, ids) {
switch (tokenizerType) { // Currently, neither remote API can decode, but this may change in the future. Put this guard here to be safe
case tokenizers.GPT2: if (tokenizerType === tokenizers.API_CURRENT) {
return decodeTextTokensRemote('/api/tokenizers/gpt2/decode', ids); return decodeTextTokens(tokenizers.NONE, ids);
case tokenizers.LLAMA:
return decodeTextTokensRemote('/api/tokenizers/llama/decode', ids);
case tokenizers.NERD:
return decodeTextTokensRemote('/api/tokenizers/nerdstash/decode', ids);
case tokenizers.NERD2:
return decodeTextTokensRemote('/api/tokenizers/nerdstash_v2/decode', ids);
case tokenizers.MISTRAL:
return decodeTextTokensRemote('/api/tokenizers/mistral/decode', ids);
case tokenizers.YI:
return decodeTextTokensRemote('/api/tokenizers/yi/decode', ids);
case tokenizers.OPENAI: {
const model = getTokenizerModel();
return decodeTextTokensRemote('/api/tokenizers/openai/decode', ids, model);
}
default:
console.warn('Calling decodeTextTokens with unsupported tokenizer type', tokenizerType);
return '';
} }
const tokenizerEndpoints = TOKENIZER_URLS[tokenizerType];
if (!tokenizerEndpoints) {
console.warn('Unknown tokenizer type', tokenizerType);
return [];
}
let endpointUrl = tokenizerEndpoints.decode;
if (!endpointUrl) {
console.warn('This tokenizer type does not support decoding', tokenizerType);
return [];
}
if (tokenizerType === tokenizers.OPENAI) {
endpointUrl += `?model=${getTokenizerModel()}`;
}
return decodeTextTokensFromServer(endpointUrl, ids);
} }
export async function initTokenizers() { export async function initTokenizers() {

View File

@ -741,6 +741,38 @@ export function escapeRegex(string) {
return string.replace(/[/\-\\^$*+?.()|[\]{}]/g, '\\$&'); return string.replace(/[/\-\\^$*+?.()|[\]{}]/g, '\\$&');
} }
export class Stopwatch {
/**
* Initializes a Stopwatch class.
* @param {number} interval Update interval in milliseconds. Must be a finite number above zero.
*/
constructor(interval) {
if (isNaN(interval) || !isFinite(interval) || interval <= 0) {
console.warn('Invalid interval for Stopwatch, setting to 1');
interval = 1;
}
this.interval = interval;
this.lastAction = Date.now();
}
/**
* Executes a function if the interval passed.
* @param {(arg0: any) => any} action Action function
* @returns Promise<void>
*/
async tick(action) {
const passed = (Date.now() - this.lastAction);
if (passed < this.interval) {
return;
}
await action();
this.lastAction = Date.now();
}
}
/** /**
* Provides an interface for rate limiting function calls. * Provides an interface for rate limiting function calls.
*/ */
@ -998,6 +1030,11 @@ export function loadFileToDocument(url, type) {
* @returns {Promise<string>} A promise that resolves to the thumbnail data URL. * @returns {Promise<string>} A promise that resolves to the thumbnail data URL.
*/ */
export function createThumbnail(dataUrl, maxWidth, maxHeight, type = 'image/jpeg') { export function createThumbnail(dataUrl, maxWidth, maxHeight, type = 'image/jpeg') {
// Someone might pass in a base64 encoded string without the data URL prefix
if (!dataUrl.includes('data:')) {
dataUrl = `data:image/jpeg;base64,${dataUrl}`;
}
return new Promise((resolve, reject) => { return new Promise((resolve, reject) => {
const img = new Image(); const img = new Image();
img.src = dataUrl; img.src = dataUrl;
@ -1143,11 +1180,13 @@ export async function extractTextFromPDF(blob) {
* @param {Blob} blob HTML content blob * @param {Blob} blob HTML content blob
* @returns {Promise<string>} A promise that resolves to the parsed text. * @returns {Promise<string>} A promise that resolves to the parsed text.
*/ */
export async function extractTextFromHTML(blob) { export async function extractTextFromHTML(blob, textSelector = 'body') {
const html = await blob.text(); const html = await blob.text();
const domParser = new DOMParser(); const domParser = new DOMParser();
const document = domParser.parseFromString(DOMPurify.sanitize(html), 'text/html'); const document = domParser.parseFromString(DOMPurify.sanitize(html), 'text/html');
const text = postProcessText(document.body.textContent); const elements = document.querySelectorAll(textSelector);
const rawText = Array.from(elements).map(e => e.textContent).join('\n');
const text = postProcessText(rawText);
return text; return text;
} }

View File

@ -1,6 +1,9 @@
import { chat_metadata, getCurrentChatId, saveSettingsDebounced, sendSystemMessage, system_message_types } from '../script.js'; import { chat_metadata, getCurrentChatId, saveSettingsDebounced, sendSystemMessage, system_message_types } from '../script.js';
import { extension_settings, saveMetadataDebounced } from './extensions.js'; import { extension_settings, saveMetadataDebounced } from './extensions.js';
import { executeSlashCommands, registerSlashCommand } from './slash-commands.js'; import { executeSlashCommands, registerSlashCommand } from './slash-commands.js';
import { isFalseBoolean } from './utils.js';
const MAX_LOOPS = 100;
function getLocalVariable(name, args = {}) { function getLocalVariable(name, args = {}) {
if (!chat_metadata.variables) { if (!chat_metadata.variables) {
@ -301,8 +304,7 @@ function listVariablesCallback() {
} }
async function whileCallback(args, command) { async function whileCallback(args, command) {
const MAX_LOOPS = 100; const isGuardOff = isFalseBoolean(args.guard);
const isGuardOff = ['off', 'false', '0'].includes(args.guard?.toLowerCase());
const iterations = isGuardOff ? Number.MAX_SAFE_INTEGER : MAX_LOOPS; const iterations = isGuardOff ? Number.MAX_SAFE_INTEGER : MAX_LOOPS;
for (let i = 0; i < iterations; i++) { for (let i = 0; i < iterations; i++) {
@ -319,6 +321,19 @@ async function whileCallback(args, command) {
return ''; return '';
} }
async function timesCallback(args, value) {
const [repeats, ...commandParts] = value.split(' ');
const command = commandParts.join(' ');
const isGuardOff = isFalseBoolean(args.guard);
const iterations = Math.min(Number(repeats), isGuardOff ? Number.MAX_SAFE_INTEGER : MAX_LOOPS);
for (let i = 0; i < iterations; i++) {
await executeSubCommands(command.replace(/\{\{timesIndex\}\}/g, i));
}
return '';
}
async function ifCallback(args, command) { async function ifCallback(args, command) {
const { a, b, rule } = parseBooleanOperands(args); const { a, b, rule } = parseBooleanOperands(args);
const result = evalBoolean(rule, a, b); const result = evalBoolean(rule, a, b);
@ -637,6 +652,21 @@ function lenValuesCallback(value) {
return parsedValue.length; return parsedValue.length;
} }
function randValuesCallback(from, to, args) {
const range = to - from;
const value = from + Math.random() * range;
if (args.round == 'round') {
return Math.round(value);
}
if (args.round == 'ceil') {
return Math.ceil(value);
}
if (args.round == 'floor') {
return Math.floor(value);
}
return value;
}
export function registerVariableCommands() { export function registerVariableCommands() {
registerSlashCommand('listvar', listVariablesCallback, [], ' list registered chat variables', true, true); registerSlashCommand('listvar', listVariablesCallback, [], ' list registered chat variables', true, true);
registerSlashCommand('setvar', (args, value) => setLocalVariable(args.key || args.name, value, args), [], '<span class="monospace">key=varname index=listIndex (value)</span> set a local variable value and pass it down the pipe, index is optional, e.g. <tt>/setvar key=color green</tt>', true, true); registerSlashCommand('setvar', (args, value) => setLocalVariable(args.key || args.name, value, args), [], '<span class="monospace">key=varname index=listIndex (value)</span> set a local variable value and pass it down the pipe, index is optional, e.g. <tt>/setvar key=color green</tt>', true, true);
@ -651,6 +681,7 @@ export function registerVariableCommands() {
registerSlashCommand('decglobalvar', (_, value) => decrementGlobalVariable(value), [], '<span class="monospace">(key)</span> decrement a global variable by 1 and pass the result down the pipe, e.g. <tt>/decglobalvar score</tt>', true, true); registerSlashCommand('decglobalvar', (_, value) => decrementGlobalVariable(value), [], '<span class="monospace">(key)</span> decrement a global variable by 1 and pass the result down the pipe, e.g. <tt>/decglobalvar score</tt>', true, true);
registerSlashCommand('if', ifCallback, [], '<span class="monospace">left=varname1 right=varname2 rule=comparison else="(alt.command)" "(command)"</span> compare the value of the left operand "a" with the value of the right operand "b", and if the condition yields true, then execute any valid slash command enclosed in quotes and pass the result of the command execution down the pipe. Numeric values and string literals for left and right operands supported. Available rules: gt => a > b, gte => a >= b, lt => a < b, lte => a <= b, eq => a == b, neq => a != b, not => !a, in (strings) => a includes b, nin (strings) => a not includes b, e.g. <tt>/if left=score right=10 rule=gte "/speak You win"</tt> triggers a /speak command if the value of "score" is greater or equals 10.', true, true); registerSlashCommand('if', ifCallback, [], '<span class="monospace">left=varname1 right=varname2 rule=comparison else="(alt.command)" "(command)"</span> compare the value of the left operand "a" with the value of the right operand "b", and if the condition yields true, then execute any valid slash command enclosed in quotes and pass the result of the command execution down the pipe. Numeric values and string literals for left and right operands supported. Available rules: gt => a > b, gte => a >= b, lt => a < b, lte => a <= b, eq => a == b, neq => a != b, not => !a, in (strings) => a includes b, nin (strings) => a not includes b, e.g. <tt>/if left=score right=10 rule=gte "/speak You win"</tt> triggers a /speak command if the value of "score" is greater or equals 10.', true, true);
registerSlashCommand('while', whileCallback, [], '<span class="monospace">left=varname1 right=varname2 rule=comparison "(command)"</span> compare the value of the left operand "a" with the value of the right operand "b", and if the condition yields true, then execute any valid slash command enclosed in quotes. Numeric values and string literals for left and right operands supported. Available rules: gt => a > b, gte => a >= b, lt => a < b, lte => a <= b, eq => a == b, neq => a != b, not => !a, in (strings) => a includes b, nin (strings) => a not includes b, e.g. <tt>/setvar key=i 0 | /while left=i right=10 rule=let "/addvar key=i 1"</tt> adds 1 to the value of "i" until it reaches 10. Loops are limited to 100 iterations by default, pass guard=off to disable.', true, true); registerSlashCommand('while', whileCallback, [], '<span class="monospace">left=varname1 right=varname2 rule=comparison "(command)"</span> compare the value of the left operand "a" with the value of the right operand "b", and if the condition yields true, then execute any valid slash command enclosed in quotes. Numeric values and string literals for left and right operands supported. Available rules: gt => a > b, gte => a >= b, lt => a < b, lte => a <= b, eq => a == b, neq => a != b, not => !a, in (strings) => a includes b, nin (strings) => a not includes b, e.g. <tt>/setvar key=i 0 | /while left=i right=10 rule=let "/addvar key=i 1"</tt> adds 1 to the value of "i" until it reaches 10. Loops are limited to 100 iterations by default, pass guard=off to disable.', true, true);
registerSlashCommand('times', (args, value) => timesCallback(args, value), [], '<span class="monospace">(repeats) "(command)"</span> execute any valid slash command enclosed in quotes <tt>repeats</tt> number of times, e.g. <tt>/setvar key=i 1 | /times 5 "/addvar key=i 1"</tt> adds 1 to the value of "i" 5 times. <tt>{{timesIndex}}</tt> is replaced with the iteration number (zero-based), e.g. <tt>/times 4 "/echo {{timesIndex}}"</tt> echos the numbers 0 through 4. Loops are limited to 100 iterations by default, pass guard=off to disable.', true, true);
registerSlashCommand('flushvar', (_, value) => deleteLocalVariable(value), [], '<span class="monospace">(key)</span> delete a local variable, e.g. <tt>/flushvar score</tt>', true, true); registerSlashCommand('flushvar', (_, value) => deleteLocalVariable(value), [], '<span class="monospace">(key)</span> delete a local variable, e.g. <tt>/flushvar score</tt>', true, true);
registerSlashCommand('flushglobalvar', (_, value) => deleteGlobalVariable(value), [], '<span class="monospace">(key)</span> delete a global variable, e.g. <tt>/flushglobalvar score</tt>', true, true); registerSlashCommand('flushglobalvar', (_, value) => deleteGlobalVariable(value), [], '<span class="monospace">(key)</span> delete a global variable, e.g. <tt>/flushglobalvar score</tt>', true, true);
registerSlashCommand('add', (_, value) => addValuesCallback(value), [], '<span class="monospace">(a b c d)</span> performs an addition of the set of values and passes the result down the pipe, can use variable names, e.g. <tt>/add 10 i 30 j</tt>', true, true); registerSlashCommand('add', (_, value) => addValuesCallback(value), [], '<span class="monospace">(a b c d)</span> performs an addition of the set of values and passes the result down the pipe, can use variable names, e.g. <tt>/add 10 i 30 j</tt>', true, true);
@ -668,4 +699,5 @@ export function registerVariableCommands() {
registerSlashCommand('sqrt', (_, value) => sqrtValuesCallback(value), [], '<span class="monospace">(a)</span> performs a square root operation of a value and passes the result down the pipe, can use variable names, e.g. <tt>/sqrt i</tt>', true, true); registerSlashCommand('sqrt', (_, value) => sqrtValuesCallback(value), [], '<span class="monospace">(a)</span> performs a square root operation of a value and passes the result down the pipe, can use variable names, e.g. <tt>/sqrt i</tt>', true, true);
registerSlashCommand('round', (_, value) => roundValuesCallback(value), [], '<span class="monospace">(a)</span> rounds a value and passes the result down the pipe, can use variable names, e.g. <tt>/round i</tt>', true, true); registerSlashCommand('round', (_, value) => roundValuesCallback(value), [], '<span class="monospace">(a)</span> rounds a value and passes the result down the pipe, can use variable names, e.g. <tt>/round i</tt>', true, true);
registerSlashCommand('len', (_, value) => lenValuesCallback(value), [], '<span class="monospace">(a)</span> gets the length of a value and passes the result down the pipe, can use variable names, e.g. <tt>/len i</tt>', true, true); registerSlashCommand('len', (_, value) => lenValuesCallback(value), [], '<span class="monospace">(a)</span> gets the length of a value and passes the result down the pipe, can use variable names, e.g. <tt>/len i</tt>', true, true);
registerSlashCommand('rand', (args, value) => randValuesCallback(Number(args.from ?? 0), Number(args.to ?? (value.length ? value : 1)), args), [], '<span class="monospace">(from=number=0 to=number=1 round=round|ceil|floor)</span> returns a random number between from and to, e.g. <tt>/rand</tt> or <tt>/rand 10</tt> or <tt>/rand from=5 to=10</tt>', true, true);
} }

View File

@ -3,7 +3,7 @@ import { download, debounce, initScrollHeight, resetScrollHeight, parseJsonFile,
import { extension_settings, getContext } from './extensions.js'; import { extension_settings, getContext } from './extensions.js';
import { NOTE_MODULE_NAME, metadata_keys, shouldWIAddPrompt } from './authors-note.js'; import { NOTE_MODULE_NAME, metadata_keys, shouldWIAddPrompt } from './authors-note.js';
import { registerSlashCommand } from './slash-commands.js'; import { registerSlashCommand } from './slash-commands.js';
import { getDeviceInfo } from './RossAscends-mods.js'; import { isMobile } from './RossAscends-mods.js';
import { FILTER_TYPES, FilterHelper } from './filters.js'; import { FILTER_TYPES, FilterHelper } from './filters.js';
import { getTokenCount } from './tokenizers.js'; import { getTokenCount } from './tokenizers.js';
import { power_user } from './power-user.js'; import { power_user } from './power-user.js';
@ -441,7 +441,7 @@ async function loadWorldInfoData(name) {
} }
async function updateWorldInfoList() { async function updateWorldInfoList() {
const result = await fetch('/getsettings', { const result = await fetch('/api/settings/get', {
method: 'POST', method: 'POST',
headers: getRequestHeaders(), headers: getRequestHeaders(),
body: JSON.stringify({}), body: JSON.stringify({}),
@ -896,8 +896,8 @@ function getWorldEntry(name, data, entry) {
const characterFilter = template.find('select[name="characterFilter"]'); const characterFilter = template.find('select[name="characterFilter"]');
characterFilter.data('uid', entry.uid); characterFilter.data('uid', entry.uid);
const deviceInfo = getDeviceInfo();
if (deviceInfo && deviceInfo.device.type === 'desktop') { if (!isMobile()) {
$(characterFilter).select2({ $(characterFilter).select2({
width: '100%', width: '100%',
placeholder: 'All characters will pull from this entry.', placeholder: 'All characters will pull from this entry.',
@ -1684,20 +1684,13 @@ async function checkWorldInfo(chat, maxContext) {
// Add the depth or AN if enabled // Add the depth or AN if enabled
// Put this code here since otherwise, the chat reference is modified // Put this code here since otherwise, the chat reference is modified
if (extension_settings.note.allowWIScan) { for (const key of Object.keys(context.extensionPrompts)) {
for (const key of Object.keys(context.extensionPrompts)) { if (context.extensionPrompts[key]?.scan) {
if (key.startsWith('DEPTH_PROMPT')) { const prompt = getExtensionPromptByName(key);
const depthPrompt = getExtensionPromptByName(key); if (prompt) {
if (depthPrompt) { textToScan = `${prompt}\n${textToScan}`;
textToScan = `${depthPrompt}\n${textToScan}`;
}
} }
} }
const anPrompt = getExtensionPromptByName(NOTE_MODULE_NAME);
if (anPrompt) {
textToScan = `${anPrompt}\n${textToScan}`;
}
} }
// Transform the resulting string // Transform the resulting string
@ -1948,7 +1941,7 @@ async function checkWorldInfo(chat, maxContext) {
if (shouldWIAddPrompt) { if (shouldWIAddPrompt) {
const originalAN = context.extensionPrompts[NOTE_MODULE_NAME].value; const originalAN = context.extensionPrompts[NOTE_MODULE_NAME].value;
const ANWithWI = `${ANTopEntries.join('\n')}\n${originalAN}\n${ANBottomEntries.join('\n')}`; const ANWithWI = `${ANTopEntries.join('\n')}\n${originalAN}\n${ANBottomEntries.join('\n')}`;
context.setExtensionPrompt(NOTE_MODULE_NAME, ANWithWI, chat_metadata[metadata_keys.position], chat_metadata[metadata_keys.depth]); context.setExtensionPrompt(NOTE_MODULE_NAME, ANWithWI, chat_metadata[metadata_keys.position], chat_metadata[metadata_keys.depth], extension_settings.note.allowWIScan);
} }
return { worldInfoBefore, worldInfoAfter, WIDepthEntries }; return { worldInfoBefore, worldInfoAfter, WIDepthEntries };
@ -2558,8 +2551,7 @@ jQuery(() => {
$(document).on('click', '.chat_lorebook_button', assignLorebookToChat); $(document).on('click', '.chat_lorebook_button', assignLorebookToChat);
// Not needed on mobile // Not needed on mobile
const deviceInfo = getDeviceInfo(); if (!isMobile()) {
if (deviceInfo && deviceInfo.device.type === 'desktop') {
$('#world_info').select2({ $('#world_info').select2({
width: '100%', width: '100%',
placeholder: 'No Worlds active. Click here to select.', placeholder: 'No Worlds active. Click here to select.',

View File

@ -316,6 +316,11 @@ table.responsiveTable {
padding: 1em; padding: 1em;
} }
.mes_text img:not(.mes_img) {
max-width: 100%;
max-height: var(--doc-height);
}
.mes .mes_timer, .mes .mes_timer,
.mes .mesIDDisplay, .mes .mesIDDisplay,
.mes .tokenCounterDisplay { .mes .tokenCounterDisplay {
@ -414,7 +419,7 @@ hr {
} }
#bg1 { #bg1 {
background-image: url('backgrounds/tavern day.jpg'); background-image: url('backgrounds/__transparent.png');
z-index: -3; z-index: -3;
} }
@ -631,6 +636,7 @@ hr {
display: none; display: none;
order: 2; order: 2;
padding-right: 2px; padding-right: 2px;
place-self: center;
} }
#options_button { #options_button {
@ -1444,9 +1450,7 @@ select option:not(:checked) {
display: block; display: block;
} }
#api_button:hover, .menu_button.api_button:hover {
#api_button_novel:hover,
#api_button_textgenerationwebui:hover {
background-color: var(--active); background-color: var(--active);
} }
@ -2630,7 +2634,7 @@ input[type="range"]::-webkit-slider-thumb {
max-height: calc(100vh - 84px); max-height: calc(100vh - 84px);
max-height: calc(100svh - 84px); max-height: calc(100svh - 84px);
position: absolute; position: absolute;
z-index: 3002; z-index: 3000;
margin-left: auto; margin-left: auto;
margin-right: auto; margin-right: auto;
left: 0; left: 0;
@ -3661,12 +3665,13 @@ a {
} }
.icon-svg { .icon-svg {
fill: currentColor;
/* Takes on the color of the surrounding text */ /* Takes on the color of the surrounding text */
fill: currentColor;
width: auto; width: auto;
height: 14px; height: 14px;
vertical-align: middle; aspect-ratio: 1;
/* To align with adjacent text */ /* To align with adjacent text */
place-self: center;
} }
.paginationjs { .paginationjs {

1555
server.js

File diff suppressed because it is too large Load Diff

72
src/additional-headers.js Normal file
View File

@ -0,0 +1,72 @@
const { TEXTGEN_TYPES } = require('./constants');
const { SECRET_KEYS, readSecret } = require('./endpoints/secrets');
const { getConfigValue } = require('./util');
function getMancerHeaders() {
const apiKey = readSecret(SECRET_KEYS.MANCER);
return apiKey ? ({
'X-API-KEY': apiKey,
'Authorization': `Bearer ${apiKey}`,
}) : {};
}
function getAphroditeHeaders() {
const apiKey = readSecret(SECRET_KEYS.APHRODITE);
return apiKey ? ({
'X-API-KEY': apiKey,
'Authorization': `Bearer ${apiKey}`,
}) : {};
}
function getTabbyHeaders() {
const apiKey = readSecret(SECRET_KEYS.TABBY);
return apiKey ? ({
'x-api-key': apiKey,
'Authorization': `Bearer ${apiKey}`,
}) : {};
}
function getOverrideHeaders(urlHost) {
const requestOverrides = getConfigValue('requestOverrides', []);
const overrideHeaders = requestOverrides?.find((e) => e.hosts?.includes(urlHost))?.headers;
if (overrideHeaders && urlHost) {
return overrideHeaders;
} else {
return {};
}
}
/**
* Sets additional headers for the request.
* @param {object} request Original request body
* @param {object} args New request arguments
* @param {string|null} server API server for new request
*/
function setAdditionalHeaders(request, args, server) {
let headers;
switch (request.body.api_type) {
case TEXTGEN_TYPES.MANCER:
headers = getMancerHeaders();
break;
case TEXTGEN_TYPES.APHRODITE:
headers = getAphroditeHeaders();
break;
case TEXTGEN_TYPES.TABBY:
headers = getTabbyHeaders();
break;
default:
headers = server ? getOverrideHeaders((new URL(server))?.host) : {};
break;
}
Object.assign(args.headers, headers);
}
module.exports = {
getOverrideHeaders,
setAdditionalHeaders,
};

View File

@ -1,77 +0,0 @@
/**
* Convert a prompt from the ChatML objects to the format used by Claude.
* @param {object[]} messages Array of messages
* @param {boolean} addHumanPrefix Add Human prefix
* @param {boolean} addAssistantPostfix Add Assistant postfix
* @param {boolean} withSystemPrompt Build system prompt before "\n\nHuman: "
* @returns {string} Prompt for Claude
* @copyright Prompt Conversion script taken from RisuAI by kwaroran (GPLv3).
*/
function convertClaudePrompt(messages, addHumanPrefix, addAssistantPostfix, withSystemPrompt) {
// Claude doesn't support message names, so we'll just add them to the message content.
for (const message of messages) {
if (message.name && message.role !== 'system') {
message.content = message.name + ': ' + message.content;
delete message.name;
}
}
let systemPrompt = '';
if (withSystemPrompt) {
let lastSystemIdx = -1;
for (let i = 0; i < messages.length - 1; i++) {
const message = messages[i];
if (message.role === 'system' && !message.name) {
systemPrompt += message.content + '\n\n';
} else {
lastSystemIdx = i - 1;
break;
}
}
if (lastSystemIdx >= 0) {
messages.splice(0, lastSystemIdx + 1);
}
}
let requestPrompt = messages.map((v) => {
let prefix = '';
switch (v.role) {
case 'assistant':
prefix = '\n\nAssistant: ';
break;
case 'user':
prefix = '\n\nHuman: ';
break;
case 'system':
// According to the Claude docs, H: and A: should be used for example conversations.
if (v.name === 'example_assistant') {
prefix = '\n\nA: ';
} else if (v.name === 'example_user') {
prefix = '\n\nH: ';
} else {
prefix = '\n\n';
}
break;
}
return prefix + v.content;
}).join('');
if (addHumanPrefix) {
requestPrompt = '\n\nHuman: ' + requestPrompt;
}
if (addAssistantPostfix) {
requestPrompt = requestPrompt + '\n\nAssistant: ';
}
if (withSystemPrompt) {
requestPrompt = systemPrompt + requestPrompt;
}
return requestPrompt;
}
module.exports = {
convertClaudePrompt,
};

View File

@ -105,7 +105,26 @@ const UNSAFE_EXTENSIONS = [
'.ws', '.ws',
]; ];
const PALM_SAFETY = [ const GEMINI_SAFETY = [
{
category: 'HARM_CATEGORY_HARASSMENT',
threshold: 'BLOCK_NONE',
},
{
category: 'HARM_CATEGORY_HATE_SPEECH',
threshold: 'BLOCK_NONE',
},
{
category: 'HARM_CATEGORY_SEXUALLY_EXPLICIT',
threshold: 'BLOCK_NONE',
},
{
category: 'HARM_CATEGORY_DANGEROUS_CONTENT',
threshold: 'BLOCK_NONE',
},
];
const BISON_SAFETY = [
{ {
category: 'HARM_CATEGORY_DEROGATORY', category: 'HARM_CATEGORY_DEROGATORY',
threshold: 'BLOCK_NONE', threshold: 'BLOCK_NONE',
@ -139,8 +158,9 @@ const CHAT_COMPLETION_SOURCES = {
SCALE: 'scale', SCALE: 'scale',
OPENROUTER: 'openrouter', OPENROUTER: 'openrouter',
AI21: 'ai21', AI21: 'ai21',
PALM: 'palm',
TOGETHERAI: 'togetherai', TOGETHERAI: 'togetherai',
MAKERSUITE: 'makersuite',
MISTRALAI: 'mistralai',
}; };
const UPLOADS_PATH = './uploads'; const UPLOADS_PATH = './uploads';
@ -161,7 +181,8 @@ module.exports = {
DIRECTORIES, DIRECTORIES,
UNSAFE_EXTENSIONS, UNSAFE_EXTENSIONS,
UPLOADS_PATH, UPLOADS_PATH,
PALM_SAFETY, GEMINI_SAFETY,
BISON_SAFETY,
TEXTGEN_TYPES, TEXTGEN_TYPES,
CHAT_COMPLETION_SOURCES, CHAT_COMPLETION_SOURCES,
AVATAR_WIDTH, AVATAR_WIDTH,

View File

@ -0,0 +1,811 @@
const express = require('express');
const fetch = require('node-fetch').default;
const { Readable } = require('stream');
const { jsonParser } = require('../../express-common');
const { CHAT_COMPLETION_SOURCES, GEMINI_SAFETY, BISON_SAFETY } = require('../../constants');
const { forwardFetchResponse, getConfigValue, tryParse, uuidv4 } = require('../../util');
const { convertClaudePrompt, convertGooglePrompt, convertTextCompletionPrompt } = require('../prompt-converters');
const { readSecret, SECRET_KEYS } = require('../secrets');
const { getTokenizerModel, getSentencepiceTokenizer, getTiktokenTokenizer, sentencepieceTokenizers, TEXT_COMPLETION_MODELS } = require('../tokenizers');
const API_OPENAI = 'https://api.openai.com/v1';
const API_CLAUDE = 'https://api.anthropic.com/v1';
/**
* Sends a request to Claude API.
* @param {express.Request} request Express request
* @param {express.Response} response Express response
*/
async function sendClaudeRequest(request, response) {
const apiUrl = new URL(request.body.reverse_proxy || API_CLAUDE).toString();
const apiKey = request.body.reverse_proxy ? request.body.proxy_password : readSecret(SECRET_KEYS.CLAUDE);
if (!apiKey) {
console.log('Claude API key is missing.');
return response.status(400).send({ error: true });
}
try {
const controller = new AbortController();
request.socket.removeAllListeners('close');
request.socket.on('close', function () {
controller.abort();
});
let doSystemPrompt = request.body.model === 'claude-2' || request.body.model === 'claude-2.1';
let requestPrompt = convertClaudePrompt(request.body.messages, true, !request.body.exclude_assistant, doSystemPrompt);
if (request.body.assistant_prefill && !request.body.exclude_assistant) {
requestPrompt += request.body.assistant_prefill;
}
console.log('Claude request:', requestPrompt);
const stop_sequences = ['\n\nHuman:', '\n\nSystem:', '\n\nAssistant:'];
// Add custom stop sequences
if (Array.isArray(request.body.stop)) {
stop_sequences.push(...request.body.stop);
}
const generateResponse = await fetch(apiUrl + '/complete', {
method: 'POST',
signal: controller.signal,
body: JSON.stringify({
prompt: requestPrompt,
model: request.body.model,
max_tokens_to_sample: request.body.max_tokens,
stop_sequences: stop_sequences,
temperature: request.body.temperature,
top_p: request.body.top_p,
top_k: request.body.top_k,
stream: request.body.stream,
}),
headers: {
'Content-Type': 'application/json',
'anthropic-version': '2023-06-01',
'x-api-key': apiKey,
},
timeout: 0,
});
if (request.body.stream) {
// Pipe remote SSE stream to Express response
forwardFetchResponse(generateResponse, response);
} else {
if (!generateResponse.ok) {
console.log(`Claude API returned error: ${generateResponse.status} ${generateResponse.statusText} ${await generateResponse.text()}`);
return response.status(generateResponse.status).send({ error: true });
}
const generateResponseJson = await generateResponse.json();
const responseText = generateResponseJson.completion;
console.log('Claude response:', responseText);
// Wrap it back to OAI format
const reply = { choices: [{ 'message': { 'content': responseText } }] };
return response.send(reply);
}
} catch (error) {
console.log('Error communicating with Claude: ', error);
if (!response.headersSent) {
return response.status(500).send({ error: true });
}
}
}
/**
* Sends a request to Scale Spellbook API.
* @param {import("express").Request} request Express request
* @param {import("express").Response} response Express response
*/
async function sendScaleRequest(request, response) {
const apiUrl = new URL(request.body.api_url_scale).toString();
const apiKey = readSecret(SECRET_KEYS.SCALE);
if (!apiKey) {
console.log('Scale API key is missing.');
return response.status(400).send({ error: true });
}
const requestPrompt = convertTextCompletionPrompt(request.body.messages);
console.log('Scale request:', requestPrompt);
try {
const controller = new AbortController();
request.socket.removeAllListeners('close');
request.socket.on('close', function () {
controller.abort();
});
const generateResponse = await fetch(apiUrl, {
method: 'POST',
body: JSON.stringify({ input: { input: requestPrompt } }),
headers: {
'Content-Type': 'application/json',
'Authorization': `Basic ${apiKey}`,
},
timeout: 0,
});
if (!generateResponse.ok) {
console.log(`Scale API returned error: ${generateResponse.status} ${generateResponse.statusText} ${await generateResponse.text()}`);
return response.status(generateResponse.status).send({ error: true });
}
const generateResponseJson = await generateResponse.json();
console.log('Scale response:', generateResponseJson);
const reply = { choices: [{ 'message': { 'content': generateResponseJson.output } }] };
return response.send(reply);
} catch (error) {
console.log(error);
if (!response.headersSent) {
return response.status(500).send({ error: true });
}
}
}
/**
* Sends a request to Google AI API.
* @param {express.Request} request Express request
* @param {express.Response} response Express response
*/
async function sendMakerSuiteRequest(request, response) {
const apiKey = readSecret(SECRET_KEYS.MAKERSUITE);
if (!apiKey) {
console.log('MakerSuite API key is missing.');
return response.status(400).send({ error: true });
}
const model = String(request.body.model);
const isGemini = model.includes('gemini');
const isText = model.includes('text');
const stream = Boolean(request.body.stream) && isGemini;
const generationConfig = {
stopSequences: request.body.stop,
candidateCount: 1,
maxOutputTokens: request.body.max_tokens,
temperature: request.body.temperature,
topP: request.body.top_p,
topK: request.body.top_k || undefined,
};
function getGeminiBody() {
return {
contents: convertGooglePrompt(request.body.messages, model),
safetySettings: GEMINI_SAFETY,
generationConfig: generationConfig,
};
}
function getBisonBody() {
const prompt = isText
? ({ text: convertTextCompletionPrompt(request.body.messages) })
: ({ messages: convertGooglePrompt(request.body.messages, model) });
/** @type {any} Shut the lint up */
const bisonBody = {
...generationConfig,
safetySettings: BISON_SAFETY,
candidate_count: 1, // lewgacy spelling
prompt: prompt,
};
if (!isText) {
delete bisonBody.stopSequences;
delete bisonBody.maxOutputTokens;
delete bisonBody.safetySettings;
if (Array.isArray(prompt.messages)) {
for (const msg of prompt.messages) {
msg.author = msg.role;
msg.content = msg.parts[0].text;
delete msg.parts;
delete msg.role;
}
}
}
delete bisonBody.candidateCount;
return bisonBody;
}
const body = isGemini ? getGeminiBody() : getBisonBody();
console.log('MakerSuite request:', body);
try {
const controller = new AbortController();
request.socket.removeAllListeners('close');
request.socket.on('close', function () {
controller.abort();
});
const apiVersion = isGemini ? 'v1beta' : 'v1beta2';
const responseType = isGemini
? (stream ? 'streamGenerateContent' : 'generateContent')
: (isText ? 'generateText' : 'generateMessage');
const generateResponse = await fetch(`https://generativelanguage.googleapis.com/${apiVersion}/models/${model}:${responseType}?key=${apiKey}`, {
body: JSON.stringify(body),
method: 'POST',
headers: {
'Content-Type': 'application/json',
},
signal: controller.signal,
timeout: 0,
});
// have to do this because of their busted ass streaming endpoint
if (stream) {
try {
let partialData = '';
generateResponse.body.on('data', (data) => {
const chunk = data.toString();
if (chunk.startsWith(',') || chunk.endsWith(',') || chunk.startsWith('[') || chunk.endsWith(']')) {
partialData = chunk.slice(1);
} else {
partialData += chunk;
}
while (true) {
let json;
try {
json = JSON.parse(partialData);
} catch (e) {
break;
}
response.write(JSON.stringify(json));
partialData = '';
}
});
request.socket.on('close', function () {
if (generateResponse.body instanceof Readable) generateResponse.body.destroy();
response.end();
});
generateResponse.body.on('end', () => {
console.log('Streaming request finished');
response.end();
});
} catch (error) {
console.log('Error forwarding streaming response:', error);
if (!response.headersSent) {
return response.status(500).send({ error: true });
}
}
} else {
if (!generateResponse.ok) {
console.log(`MakerSuite API returned error: ${generateResponse.status} ${generateResponse.statusText} ${await generateResponse.text()}`);
return response.status(generateResponse.status).send({ error: true });
}
const generateResponseJson = await generateResponse.json();
const candidates = generateResponseJson?.candidates;
if (!candidates || candidates.length === 0) {
let message = 'MakerSuite API returned no candidate';
console.log(message, generateResponseJson);
if (generateResponseJson?.promptFeedback?.blockReason) {
message += `\nPrompt was blocked due to : ${generateResponseJson.promptFeedback.blockReason}`;
}
return response.send({ error: { message } });
}
const responseContent = candidates[0].content ?? candidates[0].output;
const responseText = typeof responseContent === 'string' ? responseContent : responseContent.parts?.[0]?.text;
if (!responseText) {
let message = 'MakerSuite Candidate text empty';
console.log(message, generateResponseJson);
return response.send({ error: { message } });
}
console.log('MakerSuite response:', responseText);
// Wrap it back to OAI format
const reply = { choices: [{ 'message': { 'content': responseText } }] };
return response.send(reply);
}
} catch (error) {
console.log('Error communicating with MakerSuite API: ', error);
if (!response.headersSent) {
return response.status(500).send({ error: true });
}
}
}
/**
* Sends a request to AI21 API.
* @param {express.Request} request Express request
* @param {express.Response} response Express response
*/
async function sendAI21Request(request, response) {
if (!request.body) return response.sendStatus(400);
const controller = new AbortController();
console.log(request.body.messages);
request.socket.removeAllListeners('close');
request.socket.on('close', function () {
controller.abort();
});
const options = {
method: 'POST',
headers: {
accept: 'application/json',
'content-type': 'application/json',
Authorization: `Bearer ${readSecret(SECRET_KEYS.AI21)}`,
},
body: JSON.stringify({
numResults: 1,
maxTokens: request.body.max_tokens,
minTokens: 0,
temperature: request.body.temperature,
topP: request.body.top_p,
stopSequences: request.body.stop_tokens,
topKReturn: request.body.top_k,
frequencyPenalty: {
scale: request.body.frequency_penalty * 100,
applyToWhitespaces: false,
applyToPunctuations: false,
applyToNumbers: false,
applyToStopwords: false,
applyToEmojis: false,
},
presencePenalty: {
scale: request.body.presence_penalty,
applyToWhitespaces: false,
applyToPunctuations: false,
applyToNumbers: false,
applyToStopwords: false,
applyToEmojis: false,
},
countPenalty: {
scale: request.body.count_pen,
applyToWhitespaces: false,
applyToPunctuations: false,
applyToNumbers: false,
applyToStopwords: false,
applyToEmojis: false,
},
prompt: request.body.messages,
}),
signal: controller.signal,
};
fetch(`https://api.ai21.com/studio/v1/${request.body.model}/complete`, options)
.then(r => r.json())
.then(r => {
if (r.completions === undefined) {
console.log(r);
} else {
console.log(r.completions[0].data.text);
}
const reply = { choices: [{ 'message': { 'content': r.completions[0].data.text } }] };
return response.send(reply);
})
.catch(err => {
console.error(err);
return response.send({ error: true });
});
}
/**
* Sends a request to MistralAI API.
* @param {express.Request} request Express request
* @param {express.Response} response Express response
*/
async function sendMistralAIRequest(request, response) {
const apiKey = readSecret(SECRET_KEYS.MISTRALAI);
if (!apiKey) {
console.log('MistralAI API key is missing.');
return response.status(400).send({ error: true });
}
try {
//must send a user role as last message
const messages = Array.isArray(request.body.messages) ? request.body.messages : [];
const lastMsg = messages[messages.length - 1];
if (messages.length > 0 && lastMsg && (lastMsg.role === 'system' || lastMsg.role === 'assistant')) {
lastMsg.role = 'user';
if (lastMsg.role === 'assistant') {
lastMsg.content = lastMsg.name + ': ' + lastMsg.content;
} else if (lastMsg.role === 'system') {
lastMsg.content = '[INST] ' + lastMsg.content + ' [/INST]';
}
}
//system prompts can be stacked at the start, but any futher sys prompts after the first user/assistant message will break the model
let encounteredNonSystemMessage = false;
messages.forEach(msg => {
if ((msg.role === 'user' || msg.role === 'assistant') && !encounteredNonSystemMessage) {
encounteredNonSystemMessage = true;
}
if (encounteredNonSystemMessage && msg.role === 'system') {
msg.role = 'user';
//unsure if the instruct version is what they've deployed on their endpoints and if this will make a difference or not.
//it should be better than just sending the message as a user role without context though
msg.content = '[INST] ' + msg.content + ' [/INST]';
}
});
const controller = new AbortController();
request.socket.removeAllListeners('close');
request.socket.on('close', function () {
controller.abort();
});
const config = {
method: 'POST',
headers: {
'Content-Type': 'application/json',
'Authorization': 'Bearer ' + apiKey,
},
body: JSON.stringify({
'model': request.body.model,
'messages': messages,
'temperature': request.body.temperature,
'top_p': request.body.top_p,
'max_tokens': request.body.max_tokens,
'stream': request.body.stream,
'safe_mode': request.body.safe_mode,
'random_seed': request.body.seed === -1 ? undefined : request.body.seed,
}),
signal: controller.signal,
timeout: 0,
};
const generateResponse = await fetch('https://api.mistral.ai/v1/chat/completions', config);
if (request.body.stream) {
forwardFetchResponse(generateResponse, response);
} else {
if (!generateResponse.ok) {
console.log(`MistralAI API returned error: ${generateResponse.status} ${generateResponse.statusText} ${await generateResponse.text()}`);
// a 401 unauthorized response breaks the frontend auth, so return a 500 instead. prob a better way of dealing with this.
// 401s are already handled by the streaming processor and dont pop up an error toast, that should probably be fixed too.
return response.status(generateResponse.status === 401 ? 500 : generateResponse.status).send({ error: true });
}
const generateResponseJson = await generateResponse.json();
return response.send(generateResponseJson);
}
} catch (error) {
console.log('Error communicating with MistralAI API: ', error);
if (!response.headersSent) {
response.send({ error: true });
} else {
response.end();
}
}
}
const router = express.Router();
router.post('/status', jsonParser, async function (request, response_getstatus_openai) {
if (!request.body) return response_getstatus_openai.sendStatus(400);
let api_url;
let api_key_openai;
let headers;
if (request.body.chat_completion_source === CHAT_COMPLETION_SOURCES.OPENAI) {
api_url = new URL(request.body.reverse_proxy || API_OPENAI).toString();
api_key_openai = request.body.reverse_proxy ? request.body.proxy_password : readSecret(SECRET_KEYS.OPENAI);
headers = {};
} else if (request.body.chat_completion_source === CHAT_COMPLETION_SOURCES.OPENROUTER) {
api_url = 'https://openrouter.ai/api/v1';
api_key_openai = readSecret(SECRET_KEYS.OPENROUTER);
// OpenRouter needs to pass the referer: https://openrouter.ai/docs
headers = { 'HTTP-Referer': request.headers.referer };
} else if (request.body.chat_completion_source === CHAT_COMPLETION_SOURCES.MISTRALAI) {
api_url = 'https://api.mistral.ai/v1';
api_key_openai = readSecret(SECRET_KEYS.MISTRALAI);
} else {
console.log('This chat completion source is not supported yet.');
return response_getstatus_openai.status(400).send({ error: true });
}
if (!api_key_openai && !request.body.reverse_proxy) {
console.log('OpenAI API key is missing.');
return response_getstatus_openai.status(400).send({ error: true });
}
try {
const response = await fetch(api_url + '/models', {
method: 'GET',
headers: {
'Authorization': 'Bearer ' + api_key_openai,
...headers,
},
});
if (response.ok) {
const data = await response.json();
response_getstatus_openai.send(data);
if (request.body.chat_completion_source === CHAT_COMPLETION_SOURCES.OPENROUTER && Array.isArray(data?.data)) {
let models = [];
data.data.forEach(model => {
const context_length = model.context_length;
const tokens_dollar = Number(1 / (1000 * model.pricing?.prompt));
const tokens_rounded = (Math.round(tokens_dollar * 1000) / 1000).toFixed(0);
models[model.id] = {
tokens_per_dollar: tokens_rounded + 'k',
context_length: context_length,
};
});
console.log('Available OpenRouter models:', models);
} else if (request.body.chat_completion_source === CHAT_COMPLETION_SOURCES.MISTRALAI) {
const models = data?.data;
console.log(models);
} else {
const models = data?.data;
if (Array.isArray(models)) {
const modelIds = models.filter(x => x && typeof x === 'object').map(x => x.id).sort();
console.log('Available OpenAI models:', modelIds);
} else {
console.log('OpenAI endpoint did not return a list of models.');
}
}
}
else {
console.log('OpenAI status check failed. Either Access Token is incorrect or API endpoint is down.');
response_getstatus_openai.send({ error: true, can_bypass: true, data: { data: [] } });
}
} catch (e) {
console.error(e);
if (!response_getstatus_openai.headersSent) {
response_getstatus_openai.send({ error: true });
} else {
response_getstatus_openai.end();
}
}
});
router.post('/bias', jsonParser, async function (request, response) {
if (!request.body || !Array.isArray(request.body))
return response.sendStatus(400);
try {
const result = {};
const model = getTokenizerModel(String(request.query.model || ''));
// no bias for claude
if (model == 'claude') {
return response.send(result);
}
let encodeFunction;
if (sentencepieceTokenizers.includes(model)) {
const tokenizer = getSentencepiceTokenizer(model);
const instance = await tokenizer?.get();
encodeFunction = (text) => new Uint32Array(instance?.encodeIds(text));
} else {
const tokenizer = getTiktokenTokenizer(model);
encodeFunction = (tokenizer.encode.bind(tokenizer));
}
for (const entry of request.body) {
if (!entry || !entry.text) {
continue;
}
try {
const tokens = getEntryTokens(entry.text, encodeFunction);
for (const token of tokens) {
result[token] = entry.value;
}
} catch {
console.warn('Tokenizer failed to encode:', entry.text);
}
}
// not needed for cached tokenizers
//tokenizer.free();
return response.send(result);
/**
* Gets tokenids for a given entry
* @param {string} text Entry text
* @param {(string) => Uint32Array} encode Function to encode text to token ids
* @returns {Uint32Array} Array of token ids
*/
function getEntryTokens(text, encode) {
// Get raw token ids from JSON array
if (text.trim().startsWith('[') && text.trim().endsWith(']')) {
try {
const json = JSON.parse(text);
if (Array.isArray(json) && json.every(x => typeof x === 'number')) {
return new Uint32Array(json);
}
} catch {
// ignore
}
}
// Otherwise, get token ids from tokenizer
return encode(text);
}
} catch (error) {
console.error(error);
return response.send({});
}
});
router.post('/generate', jsonParser, function (request, response) {
if (!request.body) return response.status(400).send({ error: true });
switch (request.body.chat_completion_source) {
case CHAT_COMPLETION_SOURCES.CLAUDE: return sendClaudeRequest(request, response);
case CHAT_COMPLETION_SOURCES.SCALE: return sendScaleRequest(request, response);
case CHAT_COMPLETION_SOURCES.AI21: return sendAI21Request(request, response);
case CHAT_COMPLETION_SOURCES.MAKERSUITE: return sendMakerSuiteRequest(request, response);
case CHAT_COMPLETION_SOURCES.MISTRALAI: return sendMistralAIRequest(request, response);
}
let apiUrl;
let apiKey;
let headers;
let bodyParams;
if (request.body.chat_completion_source !== CHAT_COMPLETION_SOURCES.OPENROUTER) {
apiUrl = new URL(request.body.reverse_proxy || API_OPENAI).toString();
apiKey = request.body.reverse_proxy ? request.body.proxy_password : readSecret(SECRET_KEYS.OPENAI);
headers = {};
bodyParams = {};
if (getConfigValue('openai.randomizeUserId', false)) {
bodyParams['user'] = uuidv4();
}
} else {
apiUrl = 'https://openrouter.ai/api/v1';
apiKey = readSecret(SECRET_KEYS.OPENROUTER);
// OpenRouter needs to pass the referer: https://openrouter.ai/docs
headers = { 'HTTP-Referer': request.headers.referer };
bodyParams = { 'transforms': ['middle-out'] };
if (request.body.use_fallback) {
bodyParams['route'] = 'fallback';
}
}
if (!apiKey && !request.body.reverse_proxy) {
console.log('OpenAI API key is missing.');
return response.status(400).send({ error: true });
}
// Add custom stop sequences
if (Array.isArray(request.body.stop) && request.body.stop.length > 0) {
bodyParams['stop'] = request.body.stop;
}
const isTextCompletion = Boolean(request.body.model && TEXT_COMPLETION_MODELS.includes(request.body.model)) || typeof request.body.messages === 'string';
const textPrompt = isTextCompletion ? convertTextCompletionPrompt(request.body.messages) : '';
const endpointUrl = isTextCompletion && request.body.chat_completion_source !== CHAT_COMPLETION_SOURCES.OPENROUTER ?
`${apiUrl}/completions` :
`${apiUrl}/chat/completions`;
const controller = new AbortController();
request.socket.removeAllListeners('close');
request.socket.on('close', function () {
controller.abort();
});
/** @type {import('node-fetch').RequestInit} */
const config = {
method: 'post',
headers: {
'Content-Type': 'application/json',
'Authorization': 'Bearer ' + apiKey,
...headers,
},
body: JSON.stringify({
'messages': isTextCompletion === false ? request.body.messages : undefined,
'prompt': isTextCompletion === true ? textPrompt : undefined,
'model': request.body.model,
'temperature': request.body.temperature,
'max_tokens': request.body.max_tokens,
'stream': request.body.stream,
'presence_penalty': request.body.presence_penalty,
'frequency_penalty': request.body.frequency_penalty,
'top_p': request.body.top_p,
'top_k': request.body.top_k,
'stop': isTextCompletion === false ? request.body.stop : undefined,
'logit_bias': request.body.logit_bias,
'seed': request.body.seed,
...bodyParams,
}),
signal: controller.signal,
timeout: 0,
};
console.log(JSON.parse(String(config.body)));
makeRequest(config, response, request);
/**
* Makes a fetch request to the OpenAI API endpoint.
* @param {import('node-fetch').RequestInit} config Fetch config
* @param {express.Response} response Express response
* @param {express.Request} request Express request
* @param {Number} retries Number of retries left
* @param {Number} timeout Request timeout in ms
*/
async function makeRequest(config, response, request, retries = 5, timeout = 5000) {
try {
const fetchResponse = await fetch(endpointUrl, config);
if (request.body.stream) {
console.log('Streaming request in progress');
forwardFetchResponse(fetchResponse, response);
return;
}
if (fetchResponse.ok) {
let json = await fetchResponse.json();
response.send(json);
console.log(json);
console.log(json?.choices[0]?.message);
} else if (fetchResponse.status === 429 && retries > 0) {
console.log(`Out of quota, retrying in ${Math.round(timeout / 1000)}s`);
setTimeout(() => {
timeout *= 2;
makeRequest(config, response, request, retries - 1, timeout);
}, timeout);
} else {
await handleErrorResponse(fetchResponse);
}
} catch (error) {
console.log('Generation failed', error);
if (!response.headersSent) {
response.send({ error: true });
} else {
response.end();
}
}
}
/**
* @param {import("node-fetch").Response} errorResponse
*/
async function handleErrorResponse(errorResponse) {
const responseText = await errorResponse.text();
const errorData = tryParse(responseText);
const statusMessages = {
400: 'Bad request',
401: 'Unauthorized',
402: 'Credit limit reached',
403: 'Forbidden',
404: 'Not found',
429: 'Too many requests',
451: 'Unavailable for legal reasons',
502: 'Bad gateway',
};
const message = errorData?.error?.message || statusMessages[errorResponse.status] || 'Unknown error occurred';
const quota_error = errorResponse.status === 429 && errorData?.error?.type === 'insufficient_quota';
console.log(message);
if (!response.headersSent) {
response.send({ error: { message }, quota_error: quota_error });
} else if (!response.writableEnded) {
response.write(errorResponse);
} else {
response.end();
}
}
});
module.exports = {
router,
};

View File

@ -0,0 +1,188 @@
const express = require('express');
const fetch = require('node-fetch').default;
const { jsonParser } = require('../../express-common');
const { forwardFetchResponse, delay } = require('../../util');
const { getOverrideHeaders, setAdditionalHeaders } = require('../../additional-headers');
const router = express.Router();
router.post('/generate', jsonParser, async function (request, response_generate) {
if (!request.body) return response_generate.sendStatus(400);
if (request.body.api_server.indexOf('localhost') != -1) {
request.body.api_server = request.body.api_server.replace('localhost', '127.0.0.1');
}
const request_prompt = request.body.prompt;
const controller = new AbortController();
request.socket.removeAllListeners('close');
request.socket.on('close', async function () {
if (request.body.can_abort && !response_generate.writableEnded) {
try {
console.log('Aborting Kobold generation...');
// send abort signal to koboldcpp
const abortResponse = await fetch(`${request.body.api_server}/extra/abort`, {
method: 'POST',
});
if (!abortResponse.ok) {
console.log('Error sending abort request to Kobold:', abortResponse.status);
}
} catch (error) {
console.log(error);
}
}
controller.abort();
});
let this_settings = {
prompt: request_prompt,
use_story: false,
use_memory: false,
use_authors_note: false,
use_world_info: false,
max_context_length: request.body.max_context_length,
max_length: request.body.max_length,
};
if (!request.body.gui_settings) {
this_settings = {
prompt: request_prompt,
use_story: false,
use_memory: false,
use_authors_note: false,
use_world_info: false,
max_context_length: request.body.max_context_length,
max_length: request.body.max_length,
rep_pen: request.body.rep_pen,
rep_pen_range: request.body.rep_pen_range,
rep_pen_slope: request.body.rep_pen_slope,
temperature: request.body.temperature,
tfs: request.body.tfs,
top_a: request.body.top_a,
top_k: request.body.top_k,
top_p: request.body.top_p,
min_p: request.body.min_p,
typical: request.body.typical,
sampler_order: request.body.sampler_order,
singleline: !!request.body.singleline,
use_default_badwordsids: request.body.use_default_badwordsids,
mirostat: request.body.mirostat,
mirostat_eta: request.body.mirostat_eta,
mirostat_tau: request.body.mirostat_tau,
grammar: request.body.grammar,
sampler_seed: request.body.sampler_seed,
};
if (request.body.stop_sequence) {
this_settings['stop_sequence'] = request.body.stop_sequence;
}
}
console.log(this_settings);
const args = {
body: JSON.stringify(this_settings),
headers: Object.assign(
{ 'Content-Type': 'application/json' },
getOverrideHeaders((new URL(request.body.api_server))?.host),
),
signal: controller.signal,
};
const MAX_RETRIES = 50;
const delayAmount = 2500;
for (let i = 0; i < MAX_RETRIES; i++) {
try {
const url = request.body.streaming ? `${request.body.api_server}/extra/generate/stream` : `${request.body.api_server}/v1/generate`;
const response = await fetch(url, { method: 'POST', timeout: 0, ...args });
if (request.body.streaming) {
// Pipe remote SSE stream to Express response
forwardFetchResponse(response, response_generate);
return;
} else {
if (!response.ok) {
const errorText = await response.text();
console.log(`Kobold returned error: ${response.status} ${response.statusText} ${errorText}`);
try {
const errorJson = JSON.parse(errorText);
const message = errorJson?.detail?.msg || errorText;
return response_generate.status(400).send({ error: { message } });
} catch {
return response_generate.status(400).send({ error: { message: errorText } });
}
}
const data = await response.json();
console.log('Endpoint response:', data);
return response_generate.send(data);
}
} catch (error) {
// response
switch (error?.status) {
case 403:
case 503: // retry in case of temporary service issue, possibly caused by a queue failure?
console.debug(`KoboldAI is busy. Retry attempt ${i + 1} of ${MAX_RETRIES}...`);
await delay(delayAmount);
break;
default:
if ('status' in error) {
console.log('Status Code from Kobold:', error.status);
}
return response_generate.send({ error: true });
}
}
}
console.log('Max retries exceeded. Giving up.');
return response_generate.send({ error: true });
});
router.post('/status', jsonParser, async function (request, response) {
if (!request.body) return response.sendStatus(400);
let api_server = request.body.api_server;
if (api_server.indexOf('localhost') != -1) {
api_server = api_server.replace('localhost', '127.0.0.1');
}
const args = {
headers: { 'Content-Type': 'application/json' },
};
setAdditionalHeaders(request, args, api_server);
const result = {};
const [koboldUnitedResponse, koboldExtraResponse, koboldModelResponse] = await Promise.all([
// We catch errors both from the response not having a successful HTTP status and from JSON parsing failing
// Kobold United API version
fetch(`${api_server}/v1/info/version`).then(response => {
if (!response.ok) throw new Error(`Kobold API error: ${response.status, response.statusText}`);
return response.json();
}).catch(() => ({ result: '0.0.0' })),
// KoboldCpp version
fetch(`${api_server}/extra/version`).then(response => {
if (!response.ok) throw new Error(`Kobold API error: ${response.status, response.statusText}`);
return response.json();
}).catch(() => ({ version: '0.0' })),
// Current model
fetch(`${api_server}/v1/model`).then(response => {
if (!response.ok) throw new Error(`Kobold API error: ${response.status, response.statusText}`);
return response.json();
}).catch(() => null),
]);
result.koboldUnitedVersion = koboldUnitedResponse.result;
result.koboldCppVersion = koboldExtraResponse.result;
result.model = !koboldModelResponse || koboldModelResponse.result === 'ReadOnly' ?
'no_connection' :
koboldModelResponse.result;
response.send(result);
});
module.exports = { router };

View File

@ -0,0 +1,76 @@
const express = require('express');
const fetch = require('node-fetch').default;
const { jsonParser } = require('../../express-common');
const { readSecret, SECRET_KEYS } = require('../secrets');
const router = express.Router();
router.post('/generate', jsonParser, function (request, response) {
if (!request.body) return response.sendStatus(400);
fetch('https://dashboard.scale.com/spellbook/api/trpc/v2.variant.run', {
method: 'POST',
headers: {
'Content-Type': 'application/json',
'cookie': `_jwt=${readSecret(SECRET_KEYS.SCALE_COOKIE)}`,
},
body: JSON.stringify({
json: {
variant: {
name: 'New Variant',
appId: '',
taxonomy: null,
},
prompt: {
id: '',
template: '{{input}}\n',
exampleVariables: {},
variablesSourceDataId: null,
systemMessage: request.body.sysprompt,
},
modelParameters: {
id: '',
modelId: 'GPT4',
modelType: 'OpenAi',
maxTokens: request.body.max_tokens,
temperature: request.body.temp,
stop: 'user:',
suffix: null,
topP: request.body.top_p,
logprobs: null,
logitBias: request.body.logit_bias,
},
inputs: [
{
index: '-1',
valueByName: {
input: request.body.prompt,
},
},
],
},
meta: {
values: {
'variant.taxonomy': ['undefined'],
'prompt.variablesSourceDataId': ['undefined'],
'modelParameters.suffix': ['undefined'],
'modelParameters.logprobs': ['undefined'],
},
},
}),
})
.then(res => res.json())
.then(data => {
console.log(data.result.data.json.outputs[0]);
return response.send({ output: data.result.data.json.outputs[0] });
})
.catch((error) => {
console.error('Error:', error);
return response.send({ error: true });
});
});
module.exports = { router };

View File

@ -0,0 +1,208 @@
const express = require('express');
const fetch = require('node-fetch').default;
const { jsonParser } = require('../../express-common');
const { TEXTGEN_TYPES } = require('../../constants');
const { forwardFetchResponse } = require('../../util');
const { setAdditionalHeaders } = require('../../additional-headers');
const router = express.Router();
//************** Ooba/OpenAI text completions API
router.post('/status', jsonParser, async function (request, response) {
if (!request.body) return response.sendStatus(400);
try {
if (request.body.api_server.indexOf('localhost') !== -1) {
request.body.api_server = request.body.api_server.replace('localhost', '127.0.0.1');
}
console.log('Trying to connect to API:', request.body);
// Convert to string + remove trailing slash + /v1 suffix
const baseUrl = String(request.body.api_server).replace(/\/$/, '').replace(/\/v1$/, '');
const args = {
headers: { 'Content-Type': 'application/json' },
};
setAdditionalHeaders(request, args, baseUrl);
let url = baseUrl;
let result = '';
if (request.body.legacy_api) {
url += '/v1/model';
} else {
switch (request.body.api_type) {
case TEXTGEN_TYPES.OOBA:
case TEXTGEN_TYPES.APHRODITE:
case TEXTGEN_TYPES.KOBOLDCPP:
url += '/v1/models';
break;
case TEXTGEN_TYPES.MANCER:
url += '/oai/v1/models';
break;
case TEXTGEN_TYPES.TABBY:
url += '/v1/model/list';
break;
}
}
const modelsReply = await fetch(url, args);
if (!modelsReply.ok) {
console.log('Models endpoint is offline.');
return response.status(400);
}
const data = await modelsReply.json();
if (request.body.legacy_api) {
console.log('Legacy API response:', data);
return response.send({ result: data?.result });
}
if (!Array.isArray(data.data)) {
console.log('Models response is not an array.');
return response.status(400);
}
const modelIds = data.data.map(x => x.id);
console.log('Models available:', modelIds);
// Set result to the first model ID
result = modelIds[0] || 'Valid';
if (request.body.api_type === TEXTGEN_TYPES.OOBA) {
try {
const modelInfoUrl = baseUrl + '/v1/internal/model/info';
const modelInfoReply = await fetch(modelInfoUrl, args);
if (modelInfoReply.ok) {
const modelInfo = await modelInfoReply.json();
console.log('Ooba model info:', modelInfo);
const modelName = modelInfo?.model_name;
result = modelName || result;
}
} catch (error) {
console.error(`Failed to get Ooba model info: ${error}`);
}
} else if (request.body.api_type === TEXTGEN_TYPES.TABBY) {
try {
const modelInfoUrl = baseUrl + '/v1/model';
const modelInfoReply = await fetch(modelInfoUrl, args);
if (modelInfoReply.ok) {
const modelInfo = await modelInfoReply.json();
console.log('Tabby model info:', modelInfo);
const modelName = modelInfo?.id;
result = modelName || result;
} else {
// TabbyAPI returns an error 400 if a model isn't loaded
result = 'None';
}
} catch (error) {
console.error(`Failed to get TabbyAPI model info: ${error}`);
}
}
return response.send({ result, data: data.data });
} catch (error) {
console.error(error);
return response.status(500);
}
});
router.post('/generate', jsonParser, async function (request, response_generate) {
if (!request.body) return response_generate.sendStatus(400);
try {
if (request.body.api_server.indexOf('localhost') !== -1) {
request.body.api_server = request.body.api_server.replace('localhost', '127.0.0.1');
}
const baseUrl = request.body.api_server;
console.log(request.body);
const controller = new AbortController();
request.socket.removeAllListeners('close');
request.socket.on('close', function () {
controller.abort();
});
// Convert to string + remove trailing slash + /v1 suffix
let url = String(baseUrl).replace(/\/$/, '').replace(/\/v1$/, '');
if (request.body.legacy_api) {
url += '/v1/generate';
} else {
switch (request.body.api_type) {
case TEXTGEN_TYPES.APHRODITE:
case TEXTGEN_TYPES.OOBA:
case TEXTGEN_TYPES.TABBY:
case TEXTGEN_TYPES.KOBOLDCPP:
url += '/v1/completions';
break;
case TEXTGEN_TYPES.MANCER:
url += '/oai/v1/completions';
break;
}
}
const args = {
method: 'POST',
body: JSON.stringify(request.body),
headers: { 'Content-Type': 'application/json' },
signal: controller.signal,
timeout: 0,
};
setAdditionalHeaders(request, args, baseUrl);
if (request.body.stream) {
const completionsStream = await fetch(url, args);
// Pipe remote SSE stream to Express response
forwardFetchResponse(completionsStream, response_generate);
}
else {
const completionsReply = await fetch(url, args);
if (completionsReply.ok) {
const data = await completionsReply.json();
console.log('Endpoint response:', data);
// Wrap legacy response to OAI completions format
if (request.body.legacy_api) {
const text = data?.results[0]?.text;
data['choices'] = [{ text }];
}
return response_generate.send(data);
} else {
const text = await completionsReply.text();
const errorBody = { error: true, status: completionsReply.status, response: text };
if (!response_generate.headersSent) {
return response_generate.send(errorBody);
}
return response_generate.end();
}
}
} catch (error) {
let value = { error: true, status: error?.status, response: error?.statusText };
console.log('Endpoint error:', error);
if (!response_generate.headersSent) {
return response_generate.send(value);
}
return response_generate.end();
}
});
module.exports = { router };

View File

@ -2,7 +2,6 @@ const fs = require('fs');
const path = require('path'); const path = require('path');
const express = require('express'); const express = require('express');
const sanitize = require('sanitize-filename'); const sanitize = require('sanitize-filename');
const writeFileAtomicSync = require('write-file-atomic').sync;
const { jsonParser, urlencodedParser } = require('../express-common'); const { jsonParser, urlencodedParser } = require('../express-common');
const { DIRECTORIES, UPLOADS_PATH } = require('../constants'); const { DIRECTORIES, UPLOADS_PATH } = require('../constants');
@ -17,17 +16,6 @@ router.post('/all', jsonParser, function (request, response) {
}); });
router.post('/set', jsonParser, function (request, response) {
try {
const bg = `#bg1 {background-image: url('../backgrounds/${request.body.bg}');}`;
writeFileAtomicSync('public/css/bg_load.css', bg, 'utf8');
response.send({ result: 'ok' });
} catch (err) {
console.log(err);
response.send(err);
}
});
router.post('/delete', jsonParser, function (request, response) { router.post('/delete', jsonParser, function (request, response) {
if (!request.body) return response.sendStatus(400); if (!request.body) return response.sendStatus(400);

View File

@ -22,8 +22,25 @@ const { importRisuSprites } = require('./sprites');
let characters = {}; let characters = {};
// KV-store for parsed character data
const characterDataCache = new Map();
/**
* Reads the character card from the specified image file.
* @param {string} img_url - Path to the image file
* @param {string} input_format - 'png'
* @returns {Promise<string | undefined>} - Character card data
*/
async function charaRead(img_url, input_format) { async function charaRead(img_url, input_format) {
return characterCardParser.parse(img_url, input_format); const stat = fs.statSync(img_url);
const cacheKey = `${img_url}-${stat.mtimeMs}`;
if (characterDataCache.has(cacheKey)) {
return characterDataCache.get(cacheKey);
}
const result = characterCardParser.parse(img_url, input_format);
characterDataCache.set(cacheKey, result);
return result;
} }
/** /**
@ -32,6 +49,13 @@ async function charaRead(img_url, input_format) {
*/ */
async function charaWrite(img_url, data, target_img, response = undefined, mes = 'ok', crop = undefined) { async function charaWrite(img_url, data, target_img, response = undefined, mes = 'ok', crop = undefined) {
try { try {
// Reset the cache
for (const key of characterDataCache.keys()) {
if (key.startsWith(img_url)) {
characterDataCache.delete(key);
break;
}
}
// Read the image, resize, and save it as a PNG into the buffer // Read the image, resize, and save it as a PNG into the buffer
const image = await tryReadImage(img_url, crop); const image = await tryReadImage(img_url, crop);

View File

@ -10,9 +10,9 @@ const { DIRECTORIES, UPLOADS_PATH } = require('../constants');
const { getConfigValue, humanizedISO8601DateTime, tryParse, generateTimestamp, removeOldBackups } = require('../util'); const { getConfigValue, humanizedISO8601DateTime, tryParse, generateTimestamp, removeOldBackups } = require('../util');
/** /**
* * Saves a chat to the backups directory.
* @param {string} name * @param {string} name The name of the chat.
* @param {string} chat * @param {string} chat The serialized chat to save.
*/ */
function backupChat(name, chat) { function backupChat(name, chat) {
try { try {
@ -65,7 +65,6 @@ router.post('/get', jsonParser, function (request, response) {
return response.send({}); return response.send({});
} }
if (!request.body.file_name) { if (!request.body.file_name) {
return response.send({}); return response.send({});
} }
@ -140,7 +139,6 @@ router.post('/delete', jsonParser, function (request, response) {
} }
return response.send('ok'); return response.send('ok');
}); });
@ -190,6 +188,10 @@ router.post('/export', jsonParser, async function (request, response) {
let buffer = ''; let buffer = '';
rl.on('line', (line) => { rl.on('line', (line) => {
const data = JSON.parse(line); const data = JSON.parse(line);
// Skip non-printable/prompt-hidden messages
if (data.is_system) {
return;
}
if (data.mes) { if (data.mes) {
const name = data.name; const name = data.name;
const message = (data?.extra?.display_text || data?.mes || '').replace(/\r?\n/g, '\n'); const message = (data?.extra?.display_text || data?.mes || '').replace(/\r?\n/g, '\n');

66
src/endpoints/google.js Normal file
View File

@ -0,0 +1,66 @@
const { readSecret, SECRET_KEYS } = require('./secrets');
const fetch = require('node-fetch').default;
const express = require('express');
const { jsonParser } = require('../express-common');
const { GEMINI_SAFETY } = require('../constants');
const router = express.Router();
router.post('/caption-image', jsonParser, async (request, response) => {
try {
const mimeType = request.body.image.split(';')[0].split(':')[1];
const base64Data = request.body.image.split(',')[1];
const url = `https://generativelanguage.googleapis.com/v1beta/models/gemini-pro-vision:generateContent?key=${readSecret(SECRET_KEYS.MAKERSUITE)}`;
const body = {
contents: [{
parts: [
{ text: request.body.prompt },
{
inlineData: {
mimeType: 'image/png', // It needs to specify a MIME type in data if it's not a PNG
data: mimeType === 'image/png' ? base64Data : request.body.image,
},
}],
}],
safetySettings: GEMINI_SAFETY,
generationConfig: { maxOutputTokens: 1000 },
};
console.log('Multimodal captioning request', body);
const result = await fetch(url, {
body: JSON.stringify(body),
method: 'POST',
headers: {
'Content-Type': 'application/json',
},
timeout: 0,
});
if (!result.ok) {
const error = await result.json();
console.log(`MakerSuite API returned error: ${result.status} ${result.statusText}`, error);
return response.status(result.status).send({ error: true });
}
const data = await result.json();
console.log('Multimodal captioning response', data);
const candidates = data?.candidates;
if (!candidates) {
return response.status(500).send('No candidates found, image was most likely filtered.');
}
const caption = candidates[0].content.parts[0].text;
if (!caption) {
return response.status(500).send('No caption found');
}
return response.json({ caption });
} catch (error) {
console.error(error);
response.status(500).send('Internal server error');
}
});
module.exports = { router };

View File

@ -73,6 +73,7 @@ router.post('/create', jsonParser, (request, response) => {
fav: request.body.fav, fav: request.body.fav,
chat_id: request.body.chat_id ?? id, chat_id: request.body.chat_id ?? id,
chats: request.body.chats ?? [id], chats: request.body.chats ?? [id],
auto_mode_delay: request.body.auto_mode_delay ?? 5,
}; };
const pathToFile = path.join(DIRECTORIES.groups, `${id}.json`); const pathToFile = path.join(DIRECTORIES.groups, `${id}.json`);
const fileData = JSON.stringify(groupMetadata); const fileData = JSON.stringify(groupMetadata);

View File

@ -1,9 +1,8 @@
const fetch = require('node-fetch').default; const fetch = require('node-fetch').default;
const express = require('express'); const express = require('express');
const util = require('util'); const util = require('util');
const { Readable } = require('stream');
const { readSecret, SECRET_KEYS } = require('./secrets'); const { readSecret, SECRET_KEYS } = require('./secrets');
const { readAllChunks, extractFileFromZipBuffer } = require('../util'); const { readAllChunks, extractFileFromZipBuffer, forwardFetchResponse } = require('../util');
const { jsonParser } = require('../express-common'); const { jsonParser } = require('../express-common');
const API_NOVELAI = 'https://api.novelai.net'; const API_NOVELAI = 'https://api.novelai.net';
@ -190,17 +189,7 @@ router.post('/generate', jsonParser, async function (req, res) {
if (req.body.streaming) { if (req.body.streaming) {
// Pipe remote SSE stream to Express response // Pipe remote SSE stream to Express response
response.body.pipe(res); forwardFetchResponse(response, res);
req.socket.on('close', function () {
if (response.body instanceof Readable) response.body.destroy(); // Close the remote stream
res.end(); // End the Express response
});
response.body.on('end', function () {
console.log('Streaming request finished');
res.end();
});
} else { } else {
if (!response.ok) { if (!response.ok) {
const text = await response.text(); const text = await response.text();

View File

@ -0,0 +1,166 @@
/**
* Convert a prompt from the ChatML objects to the format used by Claude.
* @param {object[]} messages Array of messages
* @param {boolean} addHumanPrefix Add Human prefix
* @param {boolean} addAssistantPostfix Add Assistant postfix
* @param {boolean} withSystemPrompt Build system prompt before "\n\nHuman: "
* @returns {string} Prompt for Claude
* @copyright Prompt Conversion script taken from RisuAI by kwaroran (GPLv3).
*/
function convertClaudePrompt(messages, addHumanPrefix, addAssistantPostfix, withSystemPrompt) {
// Claude doesn't support message names, so we'll just add them to the message content.
for (const message of messages) {
if (message.name && message.role !== 'system') {
message.content = message.name + ': ' + message.content;
delete message.name;
}
}
let systemPrompt = '';
if (withSystemPrompt) {
let lastSystemIdx = -1;
for (let i = 0; i < messages.length - 1; i++) {
const message = messages[i];
if (message.role === 'system' && !message.name) {
systemPrompt += message.content + '\n\n';
} else {
lastSystemIdx = i - 1;
break;
}
}
if (lastSystemIdx >= 0) {
messages.splice(0, lastSystemIdx + 1);
}
}
let requestPrompt = messages.map((v) => {
let prefix = '';
switch (v.role) {
case 'assistant':
prefix = '\n\nAssistant: ';
break;
case 'user':
prefix = '\n\nHuman: ';
break;
case 'system':
// According to the Claude docs, H: and A: should be used for example conversations.
if (v.name === 'example_assistant') {
prefix = '\n\nA: ';
} else if (v.name === 'example_user') {
prefix = '\n\nH: ';
} else {
prefix = '\n\n';
}
break;
}
return prefix + v.content;
}).join('');
if (addHumanPrefix) {
requestPrompt = '\n\nHuman: ' + requestPrompt;
}
if (addAssistantPostfix) {
requestPrompt = requestPrompt + '\n\nAssistant: ';
}
if (withSystemPrompt) {
requestPrompt = systemPrompt + requestPrompt;
}
return requestPrompt;
}
/**
* Convert a prompt from the ChatML objects to the format used by Google MakerSuite models.
* @param {object[]} messages Array of messages
* @param {string} model Model name
* @returns {object[]} Prompt for Google MakerSuite models
*/
function convertGooglePrompt(messages, model) {
// This is a 1x1 transparent PNG
const PNG_PIXEL = 'iVBORw0KGgoAAAANSUhEUgAAAAEAAAABCAQAAAC1HAwCAAAAC0lEQVR42mNkYAAAAAYAAjCB0C8AAAAASUVORK5CYII=';
const contents = [];
let lastRole = '';
let currentText = '';
const isMultimodal = model === 'gemini-pro-vision';
if (isMultimodal) {
const combinedText = messages.map((message) => {
const role = message.role === 'assistant' ? 'MODEL: ' : 'USER: ';
return role + message.content;
}).join('\n\n').trim();
const imageEntry = messages.find((message) => message.content?.[1]?.image_url);
const imageData = imageEntry?.content?.[1]?.image_url?.data ?? PNG_PIXEL;
contents.push({
parts: [
{ text: combinedText },
{
inlineData: {
mimeType: 'image/png',
data: imageData,
},
},
],
role: 'user',
});
} else {
messages.forEach((message, index) => {
const role = message.role === 'assistant' ? 'model' : 'user';
if (lastRole === role) {
currentText += '\n\n' + message.content;
} else {
if (currentText !== '') {
contents.push({
parts: [{ text: currentText.trim() }],
role: lastRole,
});
}
currentText = message.content;
lastRole = role;
}
if (index === messages.length - 1) {
contents.push({
parts: [{ text: currentText.trim() }],
role: lastRole,
});
}
});
}
return contents;
}
/**
* Convert a prompt from the ChatML objects to the format used by Text Completion API.
* @param {object[]} messages Array of messages
* @returns {string} Prompt for Text Completion API
*/
function convertTextCompletionPrompt(messages) {
if (typeof messages === 'string') {
return messages;
}
const messageStrings = [];
messages.forEach(m => {
if (m.role === 'system' && m.name === undefined) {
messageStrings.push('System: ' + m.content);
}
else if (m.role === 'system' && m.name !== undefined) {
messageStrings.push(m.name + ': ' + m.content);
}
else {
messageStrings.push(m.role + ': ' + m.content);
}
});
return messageStrings.join('\n') + '\nassistant:';
}
module.exports = {
convertClaudePrompt,
convertGooglePrompt,
convertTextCompletionPrompt,
};

View File

@ -23,9 +23,10 @@ const SECRET_KEYS = {
SCALE_COOKIE: 'scale_cookie', SCALE_COOKIE: 'scale_cookie',
ONERING_URL: 'oneringtranslator_url', ONERING_URL: 'oneringtranslator_url',
DEEPLX_URL: 'deeplx_url', DEEPLX_URL: 'deeplx_url',
PALM: 'api_key_palm', MAKERSUITE: 'api_key_makersuite',
SERPAPI: 'api_key_serpapi', SERPAPI: 'api_key_serpapi',
TOGETHERAI: 'api_key_togetherai', TOGETHERAI: 'api_key_togetherai',
MISTRALAI: 'api_key_mistralai',
}; };
/** /**
@ -45,6 +46,17 @@ function writeSecret(key, value) {
writeFileAtomicSync(SECRETS_FILE, JSON.stringify(secrets, null, 4), 'utf-8'); writeFileAtomicSync(SECRETS_FILE, JSON.stringify(secrets, null, 4), 'utf-8');
} }
function deleteSecret(key) {
if (!fs.existsSync(SECRETS_FILE)) {
return;
}
const fileContents = fs.readFileSync(SECRETS_FILE, 'utf-8');
const secrets = JSON.parse(fileContents);
delete secrets[key];
writeFileAtomicSync(SECRETS_FILE, JSON.stringify(secrets, null, 4), 'utf-8');
}
/** /**
* Reads a secret from the secrets file * Reads a secret from the secrets file
* @param {string} key Secret key * @param {string} key Secret key
@ -86,6 +98,13 @@ function readSecretState() {
* @returns {void} * @returns {void}
*/ */
function migrateSecrets(settingsFile) { function migrateSecrets(settingsFile) {
const palmKey = readSecret('api_key_palm');
if (palmKey) {
console.log('Migrating Palm key...');
writeSecret(SECRET_KEYS.MAKERSUITE, palmKey);
deleteSecret('api_key_palm');
}
if (!fs.existsSync(settingsFile)) { if (!fs.existsSync(settingsFile)) {
console.log('Settings file does not exist'); console.log('Settings file does not exist');
return; return;

View File

@ -5,6 +5,23 @@ const { jsonParser } = require('../express-common');
const router = express.Router(); const router = express.Router();
// Cosplay as Firefox
const visitHeaders = {
'Accept': 'text/html',
'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10.15; rv:120.0) Gecko/20100101 Firefox/120.0',
'Accept-Language': 'en-US,en;q=0.5',
'Accept-Encoding': 'gzip, deflate, br',
'Connection': 'keep-alive',
'Cache-Control': 'no-cache',
'Pragma': 'no-cache',
'TE': 'trailers',
'DNT': '1',
'Sec-Fetch-Dest': 'document',
'Sec-Fetch-Mode': 'navigate',
'Sec-Fetch-Site': 'none',
'Sec-Fetch-User': '?1',
};
router.post('/search', jsonParser, async (request, response) => { router.post('/search', jsonParser, async (request, response) => {
try { try {
const key = readSecret(SECRET_KEYS.SERPAPI); const key = readSecret(SECRET_KEYS.SERPAPI);
@ -31,4 +48,61 @@ router.post('/search', jsonParser, async (request, response) => {
} }
}); });
router.post('/visit', jsonParser, async (request, response) => {
try {
const url = request.body.url;
if (!url) {
console.log('No url provided for /visit');
return response.sendStatus(400);
}
try {
const urlObj = new URL(url);
// Reject relative URLs
if (urlObj.protocol === null || urlObj.host === null) {
throw new Error('Invalid URL format');
}
// Reject non-HTTP URLs
if (urlObj.protocol !== 'http:' && urlObj.protocol !== 'https:') {
throw new Error('Invalid protocol');
}
// Reject URLs with a non-standard port
if (urlObj.port !== '') {
throw new Error('Invalid port');
}
// Reject IP addresses
if (urlObj.hostname.match(/^\d+\.\d+\.\d+\.\d+$/)) {
throw new Error('Invalid hostname');
}
} catch (error) {
console.log('Invalid url provided for /visit', url);
return response.sendStatus(400);
}
const result = await fetch(url, { headers: visitHeaders });
if (!result.ok) {
console.log(`Visit failed ${result.status} ${result.statusText}`);
return response.sendStatus(500);
}
const contentType = String(result.headers.get('content-type'));
if (!contentType.includes('text/html')) {
console.log(`Visit failed, content-type is ${contentType}, expected text/html`);
return response.sendStatus(500);
}
const text = await result.text();
return response.send(text);
} catch (error) {
console.log(error);
return response.sendStatus(500);
}
});
module.exports = { router }; module.exports = { router };

164
src/endpoints/settings.js Normal file
View File

@ -0,0 +1,164 @@
const fs = require('fs');
const path = require('path');
const express = require('express');
const writeFileAtomicSync = require('write-file-atomic').sync;
const { DIRECTORIES } = require('../constants');
const { getConfigValue, generateTimestamp, removeOldBackups } = require('../util');
const { jsonParser } = require('../express-common');
const { migrateSecrets } = require('./secrets');
const enableExtensions = getConfigValue('enableExtensions', true);
const SETTINGS_FILE = './public/settings.json';
function readAndParseFromDirectory(directoryPath, fileExtension = '.json') {
const files = fs
.readdirSync(directoryPath)
.filter(x => path.parse(x).ext == fileExtension)
.sort();
const parsedFiles = [];
files.forEach(item => {
try {
const file = fs.readFileSync(path.join(directoryPath, item), 'utf-8');
parsedFiles.push(fileExtension == '.json' ? JSON.parse(file) : file);
}
catch {
// skip
}
});
return parsedFiles;
}
function sortByName(_) {
return (a, b) => a.localeCompare(b);
}
function readPresetsFromDirectory(directoryPath, options = {}) {
const {
sortFunction,
removeFileExtension = false,
fileExtension = '.json',
} = options;
const files = fs.readdirSync(directoryPath).sort(sortFunction).filter(x => path.parse(x).ext == fileExtension);
const fileContents = [];
const fileNames = [];
files.forEach(item => {
try {
const file = fs.readFileSync(path.join(directoryPath, item), 'utf8');
JSON.parse(file);
fileContents.push(file);
fileNames.push(removeFileExtension ? item.replace(/\.[^/.]+$/, '') : item);
} catch {
// skip
console.log(`${item} is not a valid JSON`);
}
});
return { fileContents, fileNames };
}
function backupSettings() {
try {
if (!fs.existsSync(DIRECTORIES.backups)) {
fs.mkdirSync(DIRECTORIES.backups);
}
const backupFile = path.join(DIRECTORIES.backups, `settings_${generateTimestamp()}.json`);
fs.copyFileSync(SETTINGS_FILE, backupFile);
removeOldBackups('settings_');
} catch (err) {
console.log('Could not backup settings file', err);
}
}
const router = express.Router();
router.post('/save', jsonParser, function (request, response) {
try {
writeFileAtomicSync('public/settings.json', JSON.stringify(request.body, null, 4), 'utf8');
response.send({ result: 'ok' });
} catch (err) {
console.log(err);
response.send(err);
}
});
// Wintermute's code
router.post('/get', jsonParser, (request, response) => {
let settings;
try {
settings = fs.readFileSync('public/settings.json', 'utf8');
} catch (e) {
return response.sendStatus(500);
}
// NovelAI Settings
const { fileContents: novelai_settings, fileNames: novelai_setting_names }
= readPresetsFromDirectory(DIRECTORIES.novelAI_Settings, {
sortFunction: sortByName(DIRECTORIES.novelAI_Settings),
removeFileExtension: true,
});
// OpenAI Settings
const { fileContents: openai_settings, fileNames: openai_setting_names }
= readPresetsFromDirectory(DIRECTORIES.openAI_Settings, {
sortFunction: sortByName(DIRECTORIES.openAI_Settings), removeFileExtension: true,
});
// TextGenerationWebUI Settings
const { fileContents: textgenerationwebui_presets, fileNames: textgenerationwebui_preset_names }
= readPresetsFromDirectory(DIRECTORIES.textGen_Settings, {
sortFunction: sortByName(DIRECTORIES.textGen_Settings), removeFileExtension: true,
});
//Kobold
const { fileContents: koboldai_settings, fileNames: koboldai_setting_names }
= readPresetsFromDirectory(DIRECTORIES.koboldAI_Settings, {
sortFunction: sortByName(DIRECTORIES.koboldAI_Settings), removeFileExtension: true,
});
const worldFiles = fs
.readdirSync(DIRECTORIES.worlds)
.filter(file => path.extname(file).toLowerCase() === '.json')
.sort((a, b) => a.localeCompare(b));
const world_names = worldFiles.map(item => path.parse(item).name);
const themes = readAndParseFromDirectory(DIRECTORIES.themes);
const movingUIPresets = readAndParseFromDirectory(DIRECTORIES.movingUI);
const quickReplyPresets = readAndParseFromDirectory(DIRECTORIES.quickreplies);
const instruct = readAndParseFromDirectory(DIRECTORIES.instruct);
const context = readAndParseFromDirectory(DIRECTORIES.context);
response.send({
settings,
koboldai_settings,
koboldai_setting_names,
world_names,
novelai_settings,
novelai_setting_names,
openai_settings,
openai_setting_names,
textgenerationwebui_presets,
textgenerationwebui_preset_names,
themes,
movingUIPresets,
quickReplyPresets,
instruct,
context,
enable_extensions: enableExtensions,
});
});
// Sync for now, but should probably be migrated to async file APIs
async function init() {
backupSettings();
migrateSecrets(SETTINGS_FILE);
}
module.exports = { router, init };

View File

@ -4,9 +4,11 @@ const express = require('express');
const { SentencePieceProcessor } = require('@agnai/sentencepiece-js'); const { SentencePieceProcessor } = require('@agnai/sentencepiece-js');
const tiktoken = require('@dqbd/tiktoken'); const tiktoken = require('@dqbd/tiktoken');
const { Tokenizer } = require('@agnai/web-tokenizers'); const { Tokenizer } = require('@agnai/web-tokenizers');
const { convertClaudePrompt } = require('../chat-completion'); const { convertClaudePrompt, convertGooglePrompt } = require('./prompt-converters');
const { readSecret, SECRET_KEYS } = require('./secrets'); const { readSecret, SECRET_KEYS } = require('./secrets');
const { TEXTGEN_TYPES } = require('../constants');
const { jsonParser } = require('../express-common'); const { jsonParser } = require('../express-common');
const { setAdditionalHeaders } = require('../additional-headers');
/** /**
* @type {{[key: string]: import("@dqbd/tiktoken").Tiktoken}} Tokenizers cache * @type {{[key: string]: import("@dqbd/tiktoken").Tiktoken}} Tokenizers cache
@ -385,6 +387,26 @@ router.post('/ai21/count', jsonParser, async function (req, res) {
} }
}); });
router.post('/google/count', jsonParser, async function (req, res) {
if (!req.body) return res.sendStatus(400);
const options = {
method: 'POST',
headers: {
accept: 'application/json',
'content-type': 'application/json',
},
body: JSON.stringify({ contents: convertGooglePrompt(req.body) }),
};
try {
const response = await fetch(`https://generativelanguage.googleapis.com/v1beta/models/${req.query.model}:countTokens?key=${readSecret(SECRET_KEYS.MAKERSUITE)}`, options);
const data = await response.json();
return res.send({ 'token_count': data?.totalTokens || 0 });
} catch (err) {
console.error(err);
return res.send({ 'token_count': 0 });
}
});
router.post('/llama/encode', jsonParser, createSentencepieceEncodingHandler(spp_llama)); router.post('/llama/encode', jsonParser, createSentencepieceEncodingHandler(spp_llama));
router.post('/nerdstash/encode', jsonParser, createSentencepieceEncodingHandler(spp_nerd)); router.post('/nerdstash/encode', jsonParser, createSentencepieceEncodingHandler(spp_nerd));
router.post('/nerdstash_v2/encode', jsonParser, createSentencepieceEncodingHandler(spp_nerd_v2)); router.post('/nerdstash_v2/encode', jsonParser, createSentencepieceEncodingHandler(spp_nerd_v2));
@ -534,6 +556,97 @@ router.post('/openai/count', jsonParser, async function (req, res) {
} }
}); });
router.post('/remote/kobold/count', jsonParser, async function (request, response) {
if (!request.body) {
return response.sendStatus(400);
}
const text = String(request.body.text) || '';
const baseUrl = String(request.body.url);
try {
const args = {
method: 'POST',
body: JSON.stringify({ 'prompt': text }),
headers: { 'Content-Type': 'application/json' },
};
let url = String(baseUrl).replace(/\/$/, '');
url += '/extra/tokencount';
const result = await fetch(url, args);
if (!result.ok) {
console.log(`API returned error: ${result.status} ${result.statusText}`);
return response.send({ error: true });
}
const data = await result.json();
const count = data['value'];
const ids = data['ids'] ?? [];
return response.send({ count, ids });
} catch (error) {
console.log(error);
return response.send({ error: true });
}
});
router.post('/remote/textgenerationwebui/encode', jsonParser, async function (request, response) {
if (!request.body) {
return response.sendStatus(400);
}
const text = String(request.body.text) || '';
const baseUrl = String(request.body.url);
const legacyApi = Boolean(request.body.legacy_api);
try {
const args = {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
};
setAdditionalHeaders(request, args, null);
// Convert to string + remove trailing slash + /v1 suffix
let url = String(baseUrl).replace(/\/$/, '').replace(/\/v1$/, '');
if (legacyApi) {
url += '/v1/token-count';
args.body = JSON.stringify({ 'prompt': text });
} else {
switch (request.body.api_type) {
case TEXTGEN_TYPES.TABBY:
url += '/v1/token/encode';
args.body = JSON.stringify({ 'text': text });
break;
case TEXTGEN_TYPES.KOBOLDCPP:
url += '/api/extra/tokencount';
args.body = JSON.stringify({ 'prompt': text });
break;
default:
url += '/v1/internal/encode';
args.body = JSON.stringify({ 'text': text });
break;
}
}
const result = await fetch(url, args);
if (!result.ok) {
console.log(`API returned error: ${result.status} ${result.statusText}`);
return response.send({ error: true });
}
const data = await result.json();
const count = legacyApi ? data?.results[0]?.tokens : (data?.length ?? data?.value);
const ids = legacyApi ? [] : (data?.tokens ?? data?.ids ?? []);
return response.send({ count, ids });
} catch (error) {
console.log(error);
return response.send({ error: true });
}
});
module.exports = { module.exports = {
TEXT_COMPLETION_MODELS, TEXT_COMPLETION_MODELS,
getTokenizerModel, getTokenizerModel,

View File

@ -13,12 +13,13 @@ const { jsonParser } = require('../express-common');
async function getVector(source, text) { async function getVector(source, text) {
switch (source) { switch (source) {
case 'togetherai': case 'togetherai':
case 'mistral':
case 'openai': case 'openai':
return require('../openai-vectors').getOpenAIVector(text, source); return require('../openai-vectors').getOpenAIVector(text, source);
case 'transformers': case 'transformers':
return require('../embedding').getTransformersVector(text); return require('../embedding').getTransformersVector(text);
case 'palm': case 'palm':
return require('../palm-vectors').getPaLMVector(text); return require('../makersuite-vectors').getMakerSuiteVector(text);
} }
throw new Error(`Unknown vector source ${source}`); throw new Error(`Unknown vector source ${source}`);
@ -197,7 +198,7 @@ router.post('/purge', jsonParser, async (req, res) => {
const collectionId = String(req.body.collectionId); const collectionId = String(req.body.collectionId);
const sources = ['transformers', 'openai']; const sources = ['transformers', 'openai', 'palm'];
for (const source of sources) { for (const source of sources) {
const index = await getIndex(collectionId, source, false); const index = await getIndex(collectionId, source, false);

View File

@ -6,15 +6,15 @@ const { SECRET_KEYS, readSecret } = require('./endpoints/secrets');
* @param {string} text - The text to get the vector for * @param {string} text - The text to get the vector for
* @returns {Promise<number[]>} - The vector for the text * @returns {Promise<number[]>} - The vector for the text
*/ */
async function getPaLMVector(text) { async function getMakerSuiteVector(text) {
const key = readSecret(SECRET_KEYS.PALM); const key = readSecret(SECRET_KEYS.MAKERSUITE);
if (!key) { if (!key) {
console.log('No PaLM key found'); console.log('No MakerSuite key found');
throw new Error('No PaLM key found'); throw new Error('No MakerSuite key found');
} }
const response = await fetch(`https://generativelanguage.googleapis.com/v1beta2/models/embedding-gecko-001:embedText?key=${key}`, { const response = await fetch(`https://generativelanguage.googleapis.com/v1beta/models/embedding-gecko-001:embedText?key=${key}`, {
method: 'POST', method: 'POST',
headers: { headers: {
'Content-Type': 'application/json', 'Content-Type': 'application/json',
@ -26,8 +26,8 @@ async function getPaLMVector(text) {
if (!response.ok) { if (!response.ok) {
const text = await response.text(); const text = await response.text();
console.log('PaLM request failed', response.statusText, text); console.log('MakerSuite request failed', response.statusText, text);
throw new Error('PaLM request failed'); throw new Error('MakerSuite request failed');
} }
const data = await response.json(); const data = await response.json();
@ -39,5 +39,5 @@ async function getPaLMVector(text) {
} }
module.exports = { module.exports = {
getPaLMVector, getMakerSuiteVector,
}; };

View File

@ -2,7 +2,7 @@
* When applied, this middleware will ensure the request contains the required header for basic authentication and only * When applied, this middleware will ensure the request contains the required header for basic authentication and only
* allow access to the endpoint after successful authentication. * allow access to the endpoint after successful authentication.
*/ */
const { getConfig } = require('./../util.js'); const { getConfig } = require('../util.js');
const unauthorizedResponse = (res) => { const unauthorizedResponse = (res) => {
res.set('WWW-Authenticate', 'Basic realm="SillyTavern", charset="UTF-8"'); res.set('WWW-Authenticate', 'Basic realm="SillyTavern", charset="UTF-8"');

View File

@ -0,0 +1,63 @@
const path = require('path');
const fs = require('fs');
const ipaddr = require('ipaddr.js');
const ipMatching = require('ip-matching');
const { color, getConfigValue } = require('../util');
const whitelistPath = path.join(process.cwd(), './whitelist.txt');
let whitelist = getConfigValue('whitelist', []);
let knownIPs = new Set();
const listen = getConfigValue('listen', false);
const whitelistMode = getConfigValue('whitelistMode', true);
if (fs.existsSync(whitelistPath)) {
try {
let whitelistTxt = fs.readFileSync(whitelistPath, 'utf-8');
whitelist = whitelistTxt.split('\n').filter(ip => ip).map(ip => ip.trim());
} catch (e) {
// Ignore errors that may occur when reading the whitelist (e.g. permissions)
}
}
function getIpFromRequest(req) {
let clientIp = req.connection.remoteAddress;
let ip = ipaddr.parse(clientIp);
// Check if the IP address is IPv4-mapped IPv6 address
if (ip.kind() === 'ipv6' && ip instanceof ipaddr.IPv6 && ip.isIPv4MappedAddress()) {
const ipv4 = ip.toIPv4Address().toString();
clientIp = ipv4;
} else {
clientIp = ip;
clientIp = clientIp.toString();
}
return clientIp;
}
const whitelistMiddleware = function (req, res, next) {
const clientIp = getIpFromRequest(req);
if (listen && !knownIPs.has(clientIp)) {
const userAgent = req.headers['user-agent'];
console.log(color.yellow(`New connection from ${clientIp}; User Agent: ${userAgent}\n`));
knownIPs.add(clientIp);
// Write access log
const timestamp = new Date().toISOString();
const log = `${timestamp} ${clientIp} ${userAgent}\n`;
fs.appendFile('access.log', log, (err) => {
if (err) {
console.error('Failed to write access log:', err);
}
});
}
//clientIp = req.connection.remoteAddress.split(':').pop();
if (whitelistMode === true && !whitelist.some(x => ipMatching.matches(clientIp, ipMatching.getMatch(x)))) {
console.log(color.red('Forbidden: Connection attempt from ' + clientIp + '. If you are attempting to connect, please add your IP address in whitelist or disable whitelist mode in config.yaml in root of SillyTavern folder.\n'));
return res.status(403).send('<b>Forbidden</b>: Connection attempt from <b>' + clientIp + '</b>. If you are attempting to connect, please add your IP address in whitelist or disable whitelist mode in config.yaml in root of SillyTavern folder.');
}
next();
};
module.exports = whitelistMiddleware;

View File

@ -1,6 +1,24 @@
const fetch = require('node-fetch').default; const fetch = require('node-fetch').default;
const { SECRET_KEYS, readSecret } = require('./endpoints/secrets'); const { SECRET_KEYS, readSecret } = require('./endpoints/secrets');
const SOURCES = {
'mistral': {
secretKey: SECRET_KEYS.MISTRAL,
url: 'api.mistral.ai',
model: 'mistral-embed',
},
'openai': {
secretKey: SECRET_KEYS.OPENAI,
url: 'api.openai.com',
model: 'text-embedding-ada-002',
},
'togetherai': {
secretKey: SECRET_KEYS.TOGETHERAI,
url: 'api.togetherai.xyz',
model: 'togethercomputer/GPT-NeoXT-Chat-Base-20B',
},
};
/** /**
* Gets the vector for the given text from an OpenAI compatible endpoint. * Gets the vector for the given text from an OpenAI compatible endpoint.
* @param {string} text - The text to get the vector for * @param {string} text - The text to get the vector for
@ -8,34 +26,23 @@ const { SECRET_KEYS, readSecret } = require('./endpoints/secrets');
* @returns {Promise<number[]>} - The vector for the text * @returns {Promise<number[]>} - The vector for the text
*/ */
async function getOpenAIVector(text, source) { async function getOpenAIVector(text, source) {
const config = SOURCES[source];
// dictionary of sources to endpoints with source as key and endpoint, model and secret key as value if (!config) {
const endpoints = { console.log('Unknown source', source);
'togetherai': { throw new Error('Unknown source');
endpoint: 'https://api.togetherai.xyz/v1/embeddings', // is this correct?
model: 'togethercomputer/GPT-NeoXT-Chat-Base-20B',
secret: SECRET_KEYS.TOGETHERAI,
},
'openai': {
endpoint: 'https://api.openai.com/v1/embeddings',
model: 'text-embedding-ada-002',
secret: SECRET_KEYS.OPENAI,
},
'mistral': {
endpoint: 'https://api.mistral.ai/v1/embeddings',
model: 'mistral-embed',
secret: SECRET_KEYS.MISTRAL,
},
};
const key = readSecret(endpoints[source].secret);
if (!key) {
console.log('No %s key found.', source);
throw new Error('No ${source} key found.');
} }
const response = await fetch(endpoints[source].endpoint, { const key = readSecret(config.secretKey);
if (!key) {
console.log('No API key found');
throw new Error('No API key found');
}
const url = config.url;
const response = await fetch(`https://${url}/v1/embeddings`, {
method: 'POST', method: 'POST',
headers: { headers: {
'Content-Type': 'application/json', 'Content-Type': 'application/json',
@ -43,22 +50,22 @@ async function getOpenAIVector(text, source) {
}, },
body: JSON.stringify({ body: JSON.stringify({
input: text, input: text,
model: endpoints[source].model, model: config.model,
}), }),
}); });
if (!response.ok) { if (!response.ok) {
const text = await response.text(); const text = await response.text();
console.log('${source} request failed', response.statusText, text); console.log('API request failed', response.statusText, text);
throw new Error('${source} request failed'); throw new Error('API request failed');
} }
const data = await response.json(); const data = await response.json();
const vector = data?.data[0]?.embedding; const vector = data?.data[0]?.embedding;
if (!Array.isArray(vector)) { if (!Array.isArray(vector)) {
console.log('${source} response was not an array'); console.log('API response was not an array');
throw new Error('${source} response was not an array'); throw new Error('API response was not an array');
} }
return vector; return vector;

162
src/plugin-loader.js Normal file
View File

@ -0,0 +1,162 @@
const fs = require('fs');
const path = require('path');
const { getConfigValue } = require('./util');
const enableServerPlugins = getConfigValue('enableServerPlugins', false);
/**
* Determine if a file is a CommonJS module.
* @param {string} file Path to file
* @returns {boolean} True if file is a CommonJS module
*/
const isCommonJS = (file) => path.extname(file) === '.js';
/**
* Determine if a file is an ECMAScript module.
* @param {string} file Path to file
* @returns {boolean} True if file is an ECMAScript module
*/
const isESModule = (file) => path.extname(file) === '.mjs';
/**
* Load and initialize server plugins from a directory if they are enabled.
* @param {import('express').Express} app Express app
* @param {string} pluginsPath Path to plugins directory
* @returns {Promise<any>} Promise that resolves when all plugins are loaded
*/
async function loadPlugins(app, pluginsPath) {
// Server plugins are disabled.
if (!enableServerPlugins) {
return;
}
// Plugins directory does not exist.
if (!fs.existsSync(pluginsPath)) {
return;
}
const files = fs.readdirSync(pluginsPath);
// No plugins to load.
if (files.length === 0) {
return;
}
for (const file of files) {
const pluginFilePath = path.join(pluginsPath, file);
if (fs.statSync(pluginFilePath).isDirectory()) {
await loadFromDirectory(app, pluginFilePath);
continue;
}
// Not a JavaScript file.
if (!isCommonJS(file) && !isESModule(file)) {
continue;
}
await loadFromFile(app, pluginFilePath);
}
}
async function loadFromDirectory(app, pluginDirectoryPath) {
const files = fs.readdirSync(pluginDirectoryPath);
// No plugins to load.
if (files.length === 0) {
return;
}
// Plugin is an npm package.
const packageJsonFilePath = path.join(pluginDirectoryPath, 'package.json');
if (fs.existsSync(packageJsonFilePath)) {
if (await loadFromPackage(app, packageJsonFilePath)) {
return;
}
}
// Plugin is a CommonJS module.
const cjsFilePath = path.join(pluginDirectoryPath, 'index.js');
if (fs.existsSync(cjsFilePath)) {
if (await loadFromFile(app, cjsFilePath)) {
return;
}
}
// Plugin is an ECMAScript module.
const esmFilePath = path.join(pluginDirectoryPath, 'index.mjs');
if (fs.existsSync(esmFilePath)) {
if (await loadFromFile(app, esmFilePath)) {
return;
}
}
}
/**
* Loads and initializes a plugin from an npm package.
* @param {import('express').Express} app Express app
* @param {string} packageJsonPath Path to package.json file
* @returns {Promise<boolean>} Promise that resolves to true if plugin was loaded successfully
*/
async function loadFromPackage(app, packageJsonPath) {
try {
const packageJson = JSON.parse(fs.readFileSync(packageJsonPath, 'utf8'));
if (packageJson.main) {
const pluginFilePath = path.join(path.dirname(packageJsonPath), packageJson.main);
return await loadFromFile(app, pluginFilePath);
}
} catch (error) {
console.error(`Failed to load plugin from ${packageJsonPath}: ${error}`);
}
return false;
}
/**
* Loads and initializes a plugin from a file.
* @param {import('express').Express} app Express app
* @param {string} pluginFilePath Path to plugin directory
* @returns {Promise<boolean>} Promise that resolves to true if plugin was loaded successfully
*/
async function loadFromFile(app, pluginFilePath) {
try {
const plugin = await getPluginModule(pluginFilePath);
console.log(`Initializing plugin from ${pluginFilePath}`);
return await initPlugin(app, plugin);
} catch (error) {
console.error(`Failed to load plugin from ${pluginFilePath}: ${error}`);
return false;
}
}
/**
* Initializes a plugin module.
* @param {import('express').Express} app Express app
* @param {any} plugin Plugin module
* @returns {Promise<boolean>} Promise that resolves to true if plugin was initialized successfully
*/
async function initPlugin(app, plugin) {
if (typeof plugin.init === 'function') {
await plugin.init(app);
return true;
}
return false;
}
/**
* Loads a module from a file depending on the module type.
* @param {string} pluginFilePath Path to plugin file
* @returns {Promise<any>} Promise that resolves to plugin module
*/
async function getPluginModule(pluginFilePath) {
if (isCommonJS(pluginFilePath)) {
return require(pluginFilePath);
}
if (isESModule(pluginFilePath)) {
return await import(pluginFilePath);
}
throw new Error(`Unsupported module type in ${pluginFilePath}`);
}
module.exports = {
loadPlugins,
};

View File

@ -6,6 +6,7 @@ const yauzl = require('yauzl');
const mime = require('mime-types'); const mime = require('mime-types');
const yaml = require('yaml'); const yaml = require('yaml');
const { default: simpleGit } = require('simple-git'); const { default: simpleGit } = require('simple-git');
const { Readable } = require('stream');
const { DIRECTORIES } = require('./constants'); const { DIRECTORIES } = require('./constants');
@ -346,6 +347,43 @@ function getImages(path) {
.sort(Intl.Collator().compare); .sort(Intl.Collator().compare);
} }
/**
* Pipe a fetch() response to an Express.js Response, including status code.
* @param {import('node-fetch').Response} from The Fetch API response to pipe from.
* @param {Express.Response} to The Express response to pipe to.
*/
function forwardFetchResponse(from, to) {
let statusCode = from.status;
let statusText = from.statusText;
if (!from.ok) {
console.log(`Streaming request failed with status ${statusCode} ${statusText}`);
}
// Avoid sending 401 responses as they reset the client Basic auth.
// This can produce an interesting artifact as "400 Unauthorized", but it's not out of spec.
// https://www.rfc-editor.org/rfc/rfc9110.html#name-overview-of-status-codes
// "The reason phrases listed here are only recommendations -- they can be replaced by local
// equivalents or left out altogether without affecting the protocol."
if (statusCode === 401) {
statusCode = 400;
}
to.statusCode = statusCode;
to.statusMessage = statusText;
from.body.pipe(to);
to.socket.on('close', function () {
if (from.body instanceof Readable) from.body.destroy(); // Close the remote stream
to.end(); // End the Express response
});
from.body.on('end', function () {
console.log('Streaming request finished');
to.end();
});
}
module.exports = { module.exports = {
getConfig, getConfig,
getConfigValue, getConfigValue,
@ -365,4 +403,5 @@ module.exports = {
generateTimestamp, generateTimestamp,
removeOldBackups, removeOldBackups,
getImages, getImages,
forwardFetchResponse,
}; };