Merge branch 'staging' into pr/Cohee1207/2711

This commit is contained in:
RossAscends 2024-08-26 06:48:00 +09:00
commit 64d3ed4680
99 changed files with 2658 additions and 781 deletions

View File

@ -55,6 +55,7 @@ module.exports = {
isProbablyReaderable: 'readonly',
ePub: 'readonly',
diff_match_patch: 'readonly',
SillyTavern: 'readonly',
},
},
],

3
.github/readme.md vendored
View File

@ -246,7 +246,6 @@ You will need two mandatory directory mappings and a port mapping to allow Silly
##### Additional Settings
- [TimeZone] - The timezone your instance should use. This is useful for making logs match your local time for easier troubleshooting. Use your TZ Identifier. (https://en.wikipedia.org/wiki/List_of_tz_database_time_zones)
- [DockerNet] - The docker network that the container should be created with a connection to. If you don't know what it is, see the [official Docker documentation](https://docs.docker.com/reference/cli/docker/network/).
- [version] - On the right-hand side of this GitHub page, you'll see "Packages". Select the "sillytavern" package and you'll see the image versions. The image tag "latest" will keep you up-to-date with the current release. You can also utilize "staging" and "release" tags that point to the nightly images of the respective branches, but this may not be appropriate, if you are utilizing extensions that could be broken, and may need time to update.
@ -255,7 +254,7 @@ You will need two mandatory directory mappings and a port mapping to allow Silly
1. Open your Command Line
2. Run the following command
`docker create --name='sillytavern' --net='[DockerNet]' -e TZ="[TimeZone]" -p '8000:8000/tcp' -v '[plugins]':'/home/node/app/plugins':'rw' -v '[config]':'/home/node/app/config':'rw' -v '[data]':'/home/node/app/data':'rw' 'ghcr.io/sillytavern/sillytavern:[version]'`
`docker create --name='sillytavern' --net='[DockerNet]' -p '8000:8000/tcp' -v '[plugins]':'/home/node/app/plugins':'rw' -v '[config]':'/home/node/app/config':'rw' -v '[data]':'/home/node/app/data':'rw' 'ghcr.io/sillytavern/sillytavern:[version]'`
> Note that 8000 is a default listening port. Don't forget to use an appropriate port if you change it in the config.

View File

@ -34,3 +34,8 @@
- What did you do to achieve this?
- How would a reviewer test the change?
6. Mind the license. Your contributions will be licensed under the GNU Affero General Public License. If you don't know what that implies, consult your lawyer.
## Further reading
1. [How to write UI extensions](https://docs.sillytavern.app/for-contributors/writing-extensions/)
2. [How to write server plugins](https://docs.sillytavern.app/for-contributors/server-plugins)

View File

@ -1,4 +1,4 @@
FROM node:lts-alpine3.18
FROM node:lts-alpine3.19
# Arguments
ARG APP_HOME=/home/node/app

View File

@ -4,8 +4,22 @@ dataRoot: ./data
# -- SERVER CONFIGURATION --
# Listen for incoming connections
listen: false
# Enables IPv6 and/or IPv4 protocols. Need to have at least one enabled!
protocol:
ipv4: true
ipv6: false
# Prefers IPv6 for DNS. Enable this on ISPs that don't have issues with IPv6
dnsPreferIPv6: false
# The hostname that autorun opens.
# - Use "auto" to let the server decide
# - Use options like 'localhost', 'st.example.com'
autorunHostname: "auto"
# Server port
port: 8000
# Overrides the port for autorun in browser.
# - Use -1 to use the server port.
# - Specify a port to override the default.
autorunPortOverride: -1
# -- SECURITY CONFIGURATION --
# Toggle whitelist mode
whitelistMode: true
@ -13,6 +27,7 @@ whitelistMode: true
enableForwardedWhitelist: true
# Whitelist of allowed IP addresses
whitelist:
- ::1
- 127.0.0.1
# Toggle basic authentication for endpoints
basicAuthMode: false
@ -26,6 +41,11 @@ enableCorsProxy: false
enableUserAccounts: false
# Enable discreet login mode: hides user list on the login screen
enableDiscreetLogin: false
# User session timeout *in seconds* (defaults to 24 hours).
## Set to a positive number to expire session after a certain time of inactivity
## Set to 0 to expire session when the browser is closed
## Set to a negative number to disable session expiration
sessionTimeout: 86400
# Used to sign session cookies. Will be auto-generated if not set
cookieSecret: ''
# Disable CSRF protection - NOT RECOMMENDED
@ -35,6 +55,9 @@ securityOverride: false
# -- ADVANCED CONFIGURATION --
# Open the browser automatically
autorun: true
# Avoids using 'localhost' for autorun in auto mode.
# use if you don't have 'localhost' in your hosts file
avoidLocalhost: false
# Disable thumbnail generation
disableThumbnails: false
# Thumbnail quality (0-100)
@ -67,9 +90,11 @@ whitelistImportDomains:
## headers:
## User-Agent: "Googlebot/2.1 (+http://www.google.com/bot.html)"
requestOverrides: []
# -- PLUGIN CONFIGURATION --
# -- EXTENSIONS CONFIGURATION --
# Enable UI extensions
enableExtensions: true
# Automatically update extensions when a release version changes
enableExtensionsAutoUpdate: true
# Extension settings
extras:
# Disables automatic model download from HuggingFace
@ -98,10 +123,21 @@ mistral:
# Enables prefilling of the reply with the last assistant message in the prompt
# CAUTION: The prefix is echoed into the completion. You may want to use regex to trim it out.
enablePrefix: false
# -- OLLAMA API CONFIGURATION --
ollama:
# Controls how long the model will stay loaded into memory following the request
# * -1: Keep the model loaded indefinitely
# * 0: Unload the model immediately after the request
# * N (any positive number): Keep the model loaded for N seconds after the request.
keepAlive: -1
# -- ANTHROPIC CLAUDE API CONFIGURATION --
claude:
# Enables caching of the system prompt (if supported).
# https://docs.anthropic.com/en/docs/build-with-claude/prompt-caching
# -- IMPORTANT! --
# Use only when the prompt before the chat history is static and doesn't change between requests
# (e.g {{random}} macro or lorebooks not as in-chat injections).
# Otherwise, you'll just waste money on cache misses.
enableSystemPromptCache: false
# -- SERVER PLUGIN CONFIGURATION --
enableServerPlugins: false
# User session timeout *in seconds* (defaults to 24 hours).
## Set to a positive number to expire session after a certain time of inactivity
## Set to 0 to expire session when the browser is closed
## Set to a negative number to disable session expiration
sessionTimeout: 86400

View File

@ -22,7 +22,7 @@
"count_penalty": 0,
"top_p": 1,
"top_k": 0,
"top_a": 1,
"top_a": 0,
"min_p": 0,
"repetition_penalty": 1,
"openai_max_context": 4095,

View File

@ -142,6 +142,7 @@
"timestamps_enabled": true,
"timestamp_model_icon": true,
"mesIDDisplay_enabled": false,
"hideChatAvatars_enabled": false,
"max_context_unlocked": false,
"prefer_character_prompt": true,
"prefer_character_jailbreak": true,

View File

@ -23,6 +23,7 @@
"timestamps_enabled": true,
"timestamp_model_icon": false,
"mesIDDisplay_enabled": true,
"hideChatAvatars_enabled": false,
"message_token_count_enabled": false,
"expand_message_actions": false,
"enableZenSliders": false,

View File

@ -23,6 +23,7 @@
"timestamps_enabled": true,
"timestamp_model_icon": true,
"mesIDDisplay_enabled": true,
"hideChatAvatars_enabled": false,
"message_token_count_enabled": false,
"expand_message_actions": false,
"enableZenSliders": false,

View File

@ -23,6 +23,7 @@
"timestamps_enabled": true,
"timestamp_model_icon": true,
"mesIDDisplay_enabled": false,
"hideChatAvatars_enabled": false,
"message_token_count_enabled": false,
"expand_message_actions": false,
"enableZenSliders": "",

View File

@ -34,4 +34,4 @@
"zoomed_avatar_magnification": true,
"reduced_motion": true,
"compact_input_area": false
}
}

Binary file not shown.

Before

Width:  |  Height:  |  Size: 4.0 KiB

After

Width:  |  Height:  |  Size: 51 KiB

5
index.d.ts vendored
View File

@ -9,6 +9,11 @@ declare global {
};
}
}
/**
* The root directory for user data.
*/
var DATA_ROOT: string;
}
declare module 'express-session' {

90
package-lock.json generated
View File

@ -1,12 +1,12 @@
{
"name": "sillytavern",
"version": "1.12.4",
"version": "1.12.5",
"lockfileVersion": 3,
"requires": true,
"packages": {
"": {
"name": "sillytavern",
"version": "1.12.4",
"version": "1.12.5",
"hasInstallScript": true,
"license": "AGPL-3.0",
"dependencies": {
@ -27,6 +27,7 @@
"google-translate-api-browser": "^3.0.1",
"he": "^1.2.0",
"helmet": "^7.1.0",
"iconv-lite": "^0.6.3",
"ip-matching": "^2.1.2",
"ipaddr.js": "^2.0.1",
"jimp": "^0.22.10",
@ -42,7 +43,7 @@
"rate-limiter-flexible": "^5.0.0",
"response-time": "^2.3.2",
"sanitize-filename": "^1.6.3",
"sillytavern-transformers": "^2.14.6",
"sillytavern-transformers": "2.14.6",
"simple-git": "^3.19.1",
"tiktoken": "^1.0.15",
"vectra": "^0.2.2",
@ -58,7 +59,7 @@
},
"devDependencies": {
"@types/jquery": "^3.5.29",
"eslint": "^8.55.0",
"eslint": "^8.57.0",
"jquery": "^3.6.4"
},
"engines": {
@ -166,9 +167,9 @@
"license": "MIT"
},
"node_modules/@eslint/js": {
"version": "8.55.0",
"resolved": "https://registry.npmjs.org/@eslint/js/-/js-8.55.0.tgz",
"integrity": "sha512-qQfo2mxH5yVom1kacMtZZJFVdW+E70mqHMJvVg6WTLo+VBuQJ4TojZlfWBjK0ve5BdEeNAVxOsl/nvNMpJOaJA==",
"version": "8.57.0",
"resolved": "https://registry.npmjs.org/@eslint/js/-/js-8.57.0.tgz",
"integrity": "sha512-Ys+3g2TaW7gADOJzPt83SJtCDhMjndcDMFVQ/Tj9iA1BfJzFKD9mAUXT3OenpuPHbI6P/myECxRJrofUsDx/5g==",
"dev": true,
"license": "MIT",
"engines": {
@ -185,14 +186,15 @@
}
},
"node_modules/@humanwhocodes/config-array": {
"version": "0.11.13",
"resolved": "https://registry.npmjs.org/@humanwhocodes/config-array/-/config-array-0.11.13.tgz",
"integrity": "sha512-JSBDMiDKSzQVngfRjOdFXgFfklaXI4K9nLF49Auh21lmBWRLIK3+xTErTWD4KU54pb6coM6ESE7Awz/FNU3zgQ==",
"version": "0.11.14",
"resolved": "https://registry.npmjs.org/@humanwhocodes/config-array/-/config-array-0.11.14.tgz",
"integrity": "sha512-3T8LkOmg45BV5FICb15QQMsyUSWrQ8AygVfC7ZG32zOalnqrilm018ZVCw0eapXux8FtA33q8PSRSstjee3jSg==",
"deprecated": "Use @eslint/config-array instead",
"dev": true,
"license": "Apache-2.0",
"dependencies": {
"@humanwhocodes/object-schema": "^2.0.1",
"debug": "^4.1.1",
"@humanwhocodes/object-schema": "^2.0.2",
"debug": "^4.3.1",
"minimatch": "^3.0.5"
},
"engines": {
@ -200,9 +202,9 @@
}
},
"node_modules/@humanwhocodes/config-array/node_modules/debug": {
"version": "4.3.4",
"resolved": "https://registry.npmjs.org/debug/-/debug-4.3.4.tgz",
"integrity": "sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ==",
"version": "4.3.6",
"resolved": "https://registry.npmjs.org/debug/-/debug-4.3.6.tgz",
"integrity": "sha512-O/09Bd4Z1fBrU4VzkhFqVgpPzaGbw6Sm9FEkBT1A/YBXQFGuuSxa1dN2nxgxS34JmKXqYx8CZAwEVoJFImUXIg==",
"dev": true,
"license": "MIT",
"dependencies": {
@ -239,9 +241,10 @@
}
},
"node_modules/@humanwhocodes/object-schema": {
"version": "2.0.1",
"resolved": "https://registry.npmjs.org/@humanwhocodes/object-schema/-/object-schema-2.0.1.tgz",
"integrity": "sha512-dvuCeX5fC9dXgJn9t+X5atfmgQAzUOWqS1254Gh0m6i8wKd10ebXkfNKiRK+1GWi/yTvvLDHpoxLr0xxxeslWw==",
"version": "2.0.3",
"resolved": "https://registry.npmjs.org/@humanwhocodes/object-schema/-/object-schema-2.0.3.tgz",
"integrity": "sha512-93zYdMES/c1D69yZiKDBj0V24vqNzB/koF26KPaagAfd3P/4gUlh3Dys5ogAK+Exi9QyzlD8x/08Zt7wIKcDcA==",
"deprecated": "Use @eslint/object-schema instead",
"dev": true,
"license": "BSD-3-Clause"
},
@ -1391,12 +1394,11 @@
"license": "MIT"
},
"node_modules/axios": {
"version": "1.6.1",
"resolved": "https://registry.npmjs.org/axios/-/axios-1.6.1.tgz",
"integrity": "sha512-vfBmhDpKafglh0EldBEbVuoe7DyAavGSLWhuSm5ZSEKQnHhBf0xAAwybbNH1IkrJNGnS/VG4I5yxig1pCEXE4g==",
"license": "MIT",
"version": "1.7.4",
"resolved": "https://registry.npmjs.org/axios/-/axios-1.7.4.tgz",
"integrity": "sha512-DukmaFRnY6AzAALSH4J2M3k6PkaC+MfaAGdEERRWcC9q3/TWQwLpHR8ZRLKTdQ3aBDL64EdluRDjJqKw+BPZEw==",
"dependencies": {
"follow-redirects": "^1.15.0",
"follow-redirects": "^1.15.6",
"form-data": "^4.0.0",
"proxy-from-env": "^1.1.0"
}
@ -1491,6 +1493,18 @@
"node": ">= 0.8"
}
},
"node_modules/body-parser/node_modules/iconv-lite": {
"version": "0.4.24",
"resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.4.24.tgz",
"integrity": "sha512-v3MXnZAcvnywkTUEZomIActle7RXXeedOR31wwl7VlyoXO4Qi9arvSenNQWne1TcRwhCL1HwLI21bEqdpj8/rA==",
"license": "MIT",
"dependencies": {
"safer-buffer": ">= 2.1.2 < 3"
},
"engines": {
"node": ">=0.10.0"
}
},
"node_modules/boolbase": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/boolbase/-/boolbase-1.0.0.tgz",
@ -2456,17 +2470,17 @@
}
},
"node_modules/eslint": {
"version": "8.55.0",
"resolved": "https://registry.npmjs.org/eslint/-/eslint-8.55.0.tgz",
"integrity": "sha512-iyUUAM0PCKj5QpwGfmCAG9XXbZCWsqP/eWAWrG/W0umvjuLRBECwSFdt+rCntju0xEH7teIABPwXpahftIaTdA==",
"version": "8.57.0",
"resolved": "https://registry.npmjs.org/eslint/-/eslint-8.57.0.tgz",
"integrity": "sha512-dZ6+mexnaTIbSBZWgou51U6OmzIhYM2VcNdtiTtI7qPNZm35Akpr0f6vtw3w1Kmn5PYo+tZVfh13WrhpS6oLqQ==",
"dev": true,
"license": "MIT",
"dependencies": {
"@eslint-community/eslint-utils": "^4.2.0",
"@eslint-community/regexpp": "^4.6.1",
"@eslint/eslintrc": "^2.1.4",
"@eslint/js": "8.55.0",
"@humanwhocodes/config-array": "^0.11.13",
"@eslint/js": "8.57.0",
"@humanwhocodes/config-array": "^0.11.14",
"@humanwhocodes/module-importer": "^1.0.1",
"@nodelib/fs.walk": "^1.2.8",
"@ungap/structured-clone": "^1.2.0",
@ -3281,12 +3295,12 @@
}
},
"node_modules/iconv-lite": {
"version": "0.4.24",
"resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.4.24.tgz",
"integrity": "sha512-v3MXnZAcvnywkTUEZomIActle7RXXeedOR31wwl7VlyoXO4Qi9arvSenNQWne1TcRwhCL1HwLI21bEqdpj8/rA==",
"version": "0.6.3",
"resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.6.3.tgz",
"integrity": "sha512-4fCk79wshMdzMp2rH06qWrJE4iolqLhCUH+OiuIgU++RB0+94NlDL81atO7GX55uUKueo0txHNtvEyI6D7WdMw==",
"license": "MIT",
"dependencies": {
"safer-buffer": ">= 2.1.2 < 3"
"safer-buffer": ">= 2.1.2 < 3.0.0"
},
"engines": {
"node": ">=0.10.0"
@ -4617,6 +4631,18 @@
"node": ">= 0.8"
}
},
"node_modules/raw-body/node_modules/iconv-lite": {
"version": "0.4.24",
"resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.4.24.tgz",
"integrity": "sha512-v3MXnZAcvnywkTUEZomIActle7RXXeedOR31wwl7VlyoXO4Qi9arvSenNQWne1TcRwhCL1HwLI21bEqdpj8/rA==",
"license": "MIT",
"dependencies": {
"safer-buffer": ">= 2.1.2 < 3"
},
"engines": {
"node": ">=0.10.0"
}
},
"node_modules/readable-stream": {
"version": "2.3.8",
"resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.8.tgz",

View File

@ -17,6 +17,7 @@
"google-translate-api-browser": "^3.0.1",
"he": "^1.2.0",
"helmet": "^7.1.0",
"iconv-lite": "^0.6.3",
"ip-matching": "^2.1.2",
"ipaddr.js": "^2.0.1",
"jimp": "^0.22.10",
@ -32,7 +33,7 @@
"rate-limiter-flexible": "^5.0.0",
"response-time": "^2.3.2",
"sanitize-filename": "^1.6.3",
"sillytavern-transformers": "^2.14.6",
"sillytavern-transformers": "2.14.6",
"simple-git": "^3.19.1",
"tiktoken": "^1.0.15",
"vectra": "^0.2.2",
@ -70,7 +71,7 @@
"type": "git",
"url": "https://github.com/SillyTavern/SillyTavern.git"
},
"version": "1.12.4",
"version": "1.12.5",
"scripts": {
"start": "node server.js",
"start:no-csrf": "node server.js --disableCsrf",
@ -90,7 +91,7 @@
"main": "server.js",
"devDependencies": {
"@types/jquery": "^3.5.29",
"eslint": "^8.55.0",
"eslint": "^8.57.0",
"jquery": "^3.6.4"
}
}

View File

@ -99,6 +99,6 @@
}
#bulk_tag_shadow_popup #bulk_tag_popup #dialogue_popup_controls .menu_button {
width: 100px;
width: unset;
padding: 0.25em;
}

View File

@ -120,6 +120,14 @@
flex-wrap: wrap;
}
.world_entry .inline-drawer-header {
cursor: initial;
}
.world_entry .killSwitch {
cursor: pointer;
}
.world_entry_form_control input[type=button] {
cursor: pointer;
}
@ -173,6 +181,10 @@
width: 7em;
}
.world_entry .killSwitch.fa-toggle-on {
color: var(--SmartThemeQuoteColor);
}
.wi-card-entry {
border: 1px solid;
border-color: var(--SmartThemeBorderColor);

5
public/global.d.ts vendored
View File

@ -14,6 +14,11 @@ declare var isProbablyReaderable;
declare var ePub;
declare var ai;
declare var SillyTavern: {
getContext(): any;
llm: any;
};
// Jquery plugins
interface JQuery {
nanogallery2(options?: any): JQuery;

Binary file not shown.

Before

Width:  |  Height:  |  Size: 5.4 KiB

After

Width:  |  Height:  |  Size: 52 KiB

View File

@ -0,0 +1,3 @@
<svg id="Layer_2" data-name="Layer 2" xmlns="http://www.w3.org/2000/svg" viewBox="0 0 236.38 282.41">
<path d="M126.55,0v54.44l-79.87,33.76v93.95l27.53-12.94,43.08,31.09.04-.05v.09l55.21-31.44.13-.08v-80.06l-55.34,24.92v80.2l-42.55-30.7h-.02s0-81.16,0-81.16l57.02-24.11V9.23l93.54,56.12v22.51l-24.34,11.53,1.84,90.56-88.45,51.47-.13.08v34.46L5.23,198.97v-65.56H0v66.92c0,.85.41,1.64,1.11,2.14l113.13,79.91v.05l.04-.02h0s0,0,0,0l121.97-73.54.13-.08v-126.13l-5.84,2.76v-22.94h-.3l.11-.18L126.55,0Z" />
</svg>

After

Width:  |  Height:  |  Size: 509 B

View File

@ -383,7 +383,7 @@
Max Response Length (tokens)
</div>
<div class="wide100p">
<input type="number" id="openai_max_tokens" name="openai_max_tokens" class="text_pole" min="50" max="8000">
<input type="number" id="openai_max_tokens" name="openai_max_tokens" class="text_pole" min="1" max="16384">
</div>
</div>
<div class="range-block" data-source="openai,custom">
@ -1823,10 +1823,16 @@
</div>
<div data-newbie-hidden class="range-block" data-source="claude">
<div class="wide100p">
<span id="claude_assistant_prefill_text" data-i18n="Assistant Prefill">Assistant Prefill</span>
<textarea id="claude_assistant_prefill" class="text_pole textarea_compact autoSetHeight" name="assistant_prefill" rows="3" maxlength="10000" data-i18n="[placeholder]Start Claude's answer with..." placeholder="Start Claude's answer with..."></textarea>
<span id="claude_assistant_impersonation_text" data-i18n="Assistant Impersonation Prefill">Assistant Impersonation Prefill</span>
<textarea id="claude_assistant_impersonation" class="text_pole textarea_compact autoSetHeight" name="assistant_impersonation" rows="3" maxlength="10000" data-i18n="[placeholder]Start Claude's answer with..." placeholder="Start Claude's answer with..."></textarea>
<div class="flex-container alignItemsCenter">
<span id="claude_assistant_prefill_text" data-i18n="Assistant Prefill">Assistant Prefill</span>
<i class="editor_maximize fa-solid fa-maximize right_menu_button" data-for="claude_assistant_prefill" title="Expand the editor" data-i18n="[title]Expand the editor"></i>
</div>
<textarea id="claude_assistant_prefill" class="text_pole textarea_compact" name="assistant_prefill" rows="6" maxlength="100000" data-i18n="[placeholder]Start Claude's answer with..." placeholder="Start Claude's answer with..."></textarea>
<div class="flex-container alignItemsCenter">
<span id="claude_assistant_impersonation_text" data-i18n="Assistant Impersonation Prefill">Assistant Impersonation Prefill</span>
<i class="editor_maximize fa-solid fa-maximize right_menu_button" data-for="claude_assistant_impersonation" title="Expand the editor" data-i18n="[title]Expand the editor"></i>
</div>
<textarea id="claude_assistant_impersonation" class="text_pole textarea_compact" name="assistant_impersonation" rows="6" maxlength="100000" data-i18n="[placeholder]Start Claude's answer with..." placeholder="Start Claude's answer with..."></textarea>
</div>
<label for="claude_use_sysprompt" class="checkbox_label widthFreeExpand">
<input id="claude_use_sysprompt" type="checkbox" />
@ -2427,10 +2433,11 @@
<optgroup>
<option value="01ai">01.AI (Yi)</option>
<option value="ai21">AI21</option>
<option value="blockentropy">Block Entropy</option>
<option value="claude">Claude</option>
<option value="cohere">Cohere</option>
<option value="groq">Groq</option>
<option value="makersuite">Google MakerSuite</option>
<option value="makersuite">Google AI Studio</option>
<option value="mistralai">MistralAI</option>
<option value="openrouter">OpenRouter</option>
<option value="perplexity">Perplexity</option>
@ -2570,7 +2577,9 @@
</optgroup>
<optgroup label="GPT-4o">
<option value="gpt-4o">gpt-4o</option>
<option value="gpt-4o-2024-08-06">gpt-4o-2024-08-06</option>
<option value="gpt-4o-2024-05-13">gpt-4o-2024-05-13</option>
<option value="chatgpt-4o-latest">chatgpt-4o-latest</option>
</optgroup>
<optgroup label="gpt-4o-mini">
<option value="gpt-4o-mini">gpt-4o-mini</option>
@ -2791,7 +2800,7 @@
</div>
</form>
<form id="makersuite_form" data-source="makersuite" action="javascript:void(null);" method="post" enctype="multipart/form-data">
<h4 data-i18n="MakerSuite API Key">MakerSuite API Key</h4>
<h4 data-i18n="Google AI Studio API Key">Google AI Studio API Key</h4>
<div class="flex-container">
<input id="api_key_makersuite" name="api_key_makersuite" class="text_pole flex1" maxlength="500" value="" type="text" autocomplete="off">
<div title="Clear your API key" data-i18n="[title]Clear your API key" class="menu_button fa-solid fa-circle-xmark clear-api-key" data-key="api_key_makersuite"></div>
@ -2899,10 +2908,13 @@
</div>
<h4 data-i18n="Perplexity Model">Perplexity Model</h4>
<select id="model_perplexity_select">
<optgroup label="Perplexity Models">
<optgroup label="Perplexity Sonar Models">
<option value="llama-3.1-sonar-small-128k-online">llama-3.1-sonar-small-128k-online</option>
<option value="llama-3.1-sonar-small-128k-chat">llama-3.1-sonar-small-128k-chat</option>
<option value="llama-3.1-sonar-large-128k-online">llama-3.1-sonar-large-128k-online</option>
<option value="llama-3.1-sonar-huge-128k-online">llama-3.1-sonar-huge-128k-online</option>
</optgroup>
<optgroup label="Perplexity Chat Models">
<option value="llama-3.1-sonar-small-128k-chat">llama-3.1-sonar-small-128k-chat</option>
<option value="llama-3.1-sonar-large-128k-chat">llama-3.1-sonar-large-128k-chat</option>
</optgroup>
<optgroup label="Open-Source Models">
@ -2951,6 +2963,20 @@
</select>
</div>
</form>
<form id="blockentropy_form" data-source="blockentropy">
<h4 data-i18n="Block Entropy API Key">Block Entropy API Key</h4>
<div class="flex-container">
<input id="api_key_blockentropy" name="api_key_blockentropy" class="text_pole flex1" maxlength="500" value="" type="text" autocomplete="off">
<div title="Clear your API key" data-i18n="[title]Clear your API key" class="menu_button fa-solid fa-circle-xmark clear-api-key" data-key="api_key_blockentropy"></div>
</div>
<div data-for="api_key_blockentropy" class="neutral_warning" data-i18n="For privacy reasons, your API key will be hidden after you reload the page.">
For privacy reasons, your API key will be hidden after you reload the page.
</div>
<h4 data-i18n="Select a Model">Select a Model</h4>
<div class="flex-container">
<select id="model_blockentropy_select" class="text_pole"></select>
</div>
</form>
<form id="custom_form" data-source="custom">
<h4 data-i18n="Custom Endpoint (Base URL)">Custom Endpoint (Base URL)</h4>
<div class="flex-container">
@ -3485,6 +3511,7 @@
<div class="alignitemscenter flex-container flexFlowColumn flexGrow flexShrink gap0 flexBasis48p" title="Scan chronologically until reached min entries or token budget." data-i18n="[title]Scan chronologically until reached min entries or token budget.">
<small>
<span data-i18n="Min Activations">Min Activations</span>
<div class="fa-solid fa-triangle-exclamation opacity50p" data-i18n="[title](disabled when max recursion steps are used)" title="(disabled when max recursion steps are used)"></div>
</small>
<input class="neo-range-slider" type="range" id="world_info_min_activations" name="world_info_min_activations" min="0" max="100" step="1">
<input class="neo-range-input" type="number" min="0" max="100" step="1" data-for="world_info_min_activations" id="world_info_min_activations_counter">
@ -3498,6 +3525,14 @@
<input class="neo-range-slider" type="range" id="world_info_min_activations_depth_max" name="volume" min="0" max="100" step="1">
<input class="neo-range-input" type="number" min="0" max="100" step="1" data-for="world_info_min_activations_depth_max" id="world_info_min_activations_depth_max_counter">
</div>
<div class="alignitemscenter flex-container flexFlowColumn flexGrow flexShrink gap0 flexBasis48p" title="Cap the number of entry activation recursions" data-i18n="[title]Cap the number of entry activation recursions">
<small>
<span data-i18n="Max Recursion Steps">Max Recursion Steps</span>
<div class="fa-solid fa-triangle-exclamation opacity50p" data-i18n="[title]0 = unlimited, 1 = scans once and doesn't recurse, 2 = scans once and recurses once, etc\n(disabled when min activations are used)" title="0 = unlimited, 1 = scans once and doesn't recurse, 2 = scans once and recurses once, etc&#10;(disabled when min activations are used)"></div>
</small>
<input class="neo-range-slider" type="range" id="world_info_max_recursion_steps" name="world_info_max_recursion_steps" min="0" max="10" step="1">
<input class="neo-range-input" type="number" min="0" max="10" step="1" data-for="world_info_max_recursion_steps" id="world_info_max_recursion_steps_counter">
</div>
<div class="alignitemscenter flex-container flexFlowColumn flexGrow flexShrink flexBasis48p">
<small data-i18n="Insertion Strategy">
@ -3761,8 +3796,8 @@
<span data-i18n="Font Scale">Font Scale</span>
<div class="fa-solid fa-circle-info opacity50p" data-i18n="[title]Font size" title="Font size"></div>
</small>
<input class="neo-range-slider" type="range" id="font_scale" name="font_scale" min="0.8" max="1.2" step="0.01">
<input class="neo-range-input" type="number" min="0.8" max="1.2" step="0.01" data-for="font_scale" id="font_scale_counter">
<input class="neo-range-slider" type="range" id="font_scale" name="font_scale" min="0.5" max="1.5" step="0.01">
<input class="neo-range-input" type="number" min="0.5" max="1.5" step="0.01" data-for="font_scale" id="font_scale_counter">
</div>
<div class="alignitemscenter flex-container flexFlowColumn flexBasis48p flexGrow flexShrink gap0">
@ -3961,6 +3996,10 @@
<input id="world_import_dialog" type="checkbox" />
<small data-i18n="Lorebook Import Dialog">Lorebook Import Dialog</small>
</label>
<label data-newbie-hidden class="checkbox_label" for="enable_auto_select_input" title="Enable auto-select of input text in some text fields when clicking/selecting them. Applies to popup input textboxes, and possible other custom input fields." data-i18n="[title]Enable auto-select of input text in some text fields when clicking/selecting them. Applies to popup input textboxes, and possible other custom input fields.">
<input id="enable_auto_select_input" type="checkbox" />
<small data-i18n="Auto-select Input Text">Auto-select Input Text</small>
</label>
<label class="checkbox_label" for="restore_user_input" title="Restore unsaved user input on page refresh." data-i18n="[title]Restore unsaved user input on page refresh">
<input id="restore_user_input" type="checkbox" />
<small data-i18n="Restore User Input">Restore User Input</small>
@ -4565,7 +4604,7 @@
<div id="favorite_button" class="menu_button fa-solid fa-star" title="Add to Favorites" data-i18n="[title]Add to Favorites"></div>
<input type="hidden" id="fav_checkbox" name="fav" />
<div id="advanced_div" class="menu_button fa-solid fa-book " title="Advanced Definitions" data-i18n="[title]Advanced Definition"></div>
<div id="world_button" class="menu_button fa-solid fa-globe" title="Character Lore" data-i18n="[title]Character Lore"></div>
<div id="world_button" class="menu_button fa-solid fa-globe" title="Character Lore&#10;&#10;Click to load&#10;Shift-click to open 'Link to World Info' popup" data-i18n="[title]world_button_title"></div>
<div class="chat_lorebook_button menu_button fa-solid fa-passport" title="Chat Lore" data-i18n="[title]Chat Lore"></div>
<div id="export_button" class="menu_button fa-solid fa-file-export " title="Export and Download" data-i18n="[title]Export and Download"></div>
<!-- <div id="set_chat_scenario" class="menu_button fa-solid fa-scroll" title="Set a chat scenario override"></div> -->

View File

@ -390,7 +390,7 @@
"Alt Method": "طريقة بديلة",
"AI21 API Key": "مفتاح API لـ AI21",
"AI21 Model": "نموذج AI21",
"MakerSuite API Key": "مفتاح واجهة برمجة تطبيقات MakerSuite",
"Google AI Studio API Key": "مفتاح واجهة برمجة تطبيقات Google AI Studio",
"Google Model": "نموذج جوجل",
"MistralAI API Key": "مفتاح واجهة برمجة التطبيقات MistralAI",
"MistralAI Model": "نموذج ميسترال آي آي",

View File

@ -390,7 +390,7 @@
"Alt Method": "Alternative Methode",
"AI21 API Key": "AI21 API-Schlüssel",
"AI21 Model": "AI21-Modell",
"MakerSuite API Key": "MakerSuite API-Schlüssel",
"Google AI Studio API Key": "Google AI Studio API-Schlüssel",
"Google Model": "Google-Modell",
"MistralAI API Key": "MistralAI API-Schlüssel",
"MistralAI Model": "MistralAI-Modell",

View File

@ -390,7 +390,7 @@
"Alt Method": "Método alternativo",
"AI21 API Key": "Clave API de AI21",
"AI21 Model": "Modelo de AI21",
"MakerSuite API Key": "Clave API de MakerSuite",
"Google AI Studio API Key": "Clave API de Google AI Studio",
"Google Model": "Modelo de Google",
"MistralAI API Key": "Clave API de MistralAI",
"MistralAI Model": "Modelo MistralAI",

View File

@ -390,7 +390,7 @@
"Alt Method": "Méthode alternative",
"AI21 API Key": "Clé API AI21",
"AI21 Model": "Modèle AI21",
"MakerSuite API Key": "Clé API MakerSuite",
"Google AI Studio API Key": "Clé API Google AI Studio",
"Google Model": "Modèle Google",
"MistralAI API Key": "Clé API MistralAI",
"MistralAI Model": "Modèle MistralAI",

View File

@ -390,7 +390,7 @@
"Alt Method": "Aðferð Bakmenn",
"AI21 API Key": "Lykill API fyrir AI21",
"AI21 Model": "AI21 Módel",
"MakerSuite API Key": "MakerSuite API lykill",
"Google AI Studio API Key": "Google AI Studio API lykill",
"Google Model": "Google líkan",
"MistralAI API Key": "MistralAI API lykill",
"MistralAI Model": "MistralAI líkan",

View File

@ -390,7 +390,7 @@
"Alt Method": "Metodo alternativo",
"AI21 API Key": "Chiave API di AI21",
"AI21 Model": "Modello AI21",
"MakerSuite API Key": "Chiave API MakerSuite",
"Google AI Studio API Key": "Chiave API Google AI Studio",
"Google Model": "Modello Google",
"MistralAI API Key": "Chiave API MistralAI",
"MistralAI Model": "Modello MistralAI",

View File

@ -390,7 +390,7 @@
"Alt Method": "代替手法",
"AI21 API Key": "AI21のAPIキー",
"AI21 Model": "AI21モデル",
"MakerSuite API Key": "MakerSuite APIキー",
"Google AI Studio API Key": "Google AI Studio APIキー",
"Google Model": "Google モデル",
"MistralAI API Key": "MistralAI API キー",
"MistralAI Model": "MistralAI モデル",

View File

@ -390,7 +390,7 @@
"Alt Method": "대체 방법",
"AI21 API Key": "AI21 API 키",
"AI21 Model": "AI21 모델",
"MakerSuite API Key": "MakerSuite API 키",
"Google AI Studio API Key": "Google AI Studio API 키",
"Google Model": "구글 모델",
"MistralAI API Key": "MistralAI API 키",
"MistralAI Model": "MistralAI 모델",

View File

@ -390,7 +390,7 @@
"Alt Method": "Método Alternativo",
"AI21 API Key": "Chave da API AI21",
"AI21 Model": "Modelo AI21",
"MakerSuite API Key": "Chave API MakerSuite",
"Google AI Studio API Key": "Chave API Google AI Studio",
"Google Model": "Modelo Google",
"MistralAI API Key": "Chave de API MistralAI",
"MistralAI Model": "Modelo MistralAI",

View File

@ -722,7 +722,7 @@
"Proxy Server URL": "Адрес прокси-сервера",
"MistralAI Model": "Модель MistralAI",
"MistralAI API Key": "Ключ от API MistralAI",
"MakerSuite API Key": "Ключ от API MakerSuite",
"Google AI Studio API Key": "Ключ от API Google AI Studio",
"Google Model": "Модель Google",
"Cohere API Key": "Ключ от API Cohere",
"Cohere Model": "Модель Cohere",

View File

@ -390,7 +390,7 @@
"Alt Method": "Альтернативний метод",
"AI21 API Key": "Ключ API для AI21",
"AI21 Model": "Модель AI21",
"MakerSuite API Key": "Ключ API MakerSuite",
"Google AI Studio API Key": "Ключ API Google AI Studio",
"Google Model": "Модель Google",
"MistralAI API Key": "Ключ API MistralAI",
"MistralAI Model": "Модель MistralAI",

View File

@ -390,7 +390,7 @@
"Alt Method": "Phương pháp thay thế",
"AI21 API Key": "Khóa API của AI21",
"AI21 Model": "Mô hình AI21",
"MakerSuite API Key": "Khóa API MakerSuite",
"Google AI Studio API Key": "Khóa API Google AI Studio",
"Google Model": "Mô hình Google",
"MistralAI API Key": "Khóa API MistralAI",
"MistralAI Model": "Mô hình MistralAI",

View File

@ -406,7 +406,7 @@
"Alt Method": "备用方法",
"AI21 API Key": "AI21 API 密钥",
"AI21 Model": "AI21 模型",
"MakerSuite API Key": "MakerSuite API 密钥",
"Google AI Studio API Key": "Google AI Studio API 密钥",
"Google Model": "Google 模型",
"MistralAI API Key": "MistralAI API 密钥",
"MistralAI Model": "MistralAI 模型",
@ -707,10 +707,10 @@
"Restore User Input": "恢复用户输入",
"Allow repositioning certain UI elements by dragging them. PC only, no effect on mobile": "允许通过拖动重新定位某些UI元素。仅适用于PC对移动设备无影响",
"Movable UI Panels": "可移动 UI 面板",
"Reset MovingUI panel sizes/locations.": "重置 MovingUI 面板大小/位置。",
"MovingUI preset. Predefined/saved draggable positions": "可移动UI预设。预定义/保存的可拖动位置",
"MUI Preset": "可移动 UI 预设",
"Save movingUI changes to a new file": "将可移动UI更改保存到新文件中",
"Reset MovingUI panel sizes/locations.": "重置 MovingUI 面板大小/位置。",
"Apply a custom CSS style to all of the ST GUI": "将自定义CSS样式应用于所有ST GUI",
"Custom CSS": "自定义 CSS",
"Expand the editor": "展开编辑器",
@ -730,6 +730,8 @@
"Press Send to continue": "按发送键以继续",
"Show a button in the input area to ask the AI to continue (extend) its last message": "在输入区域中显示一个按钮要求AI继续延长其上一条消息",
"Quick 'Continue' button": "快速“继续”按钮",
"Show a button in the input area to ask the AI to impersonate your character for a single message": "在输入区域中显示一个按钮,让 AI 模仿你的角色发送一条消息。",
"Quick 'Impersonate' button": "快速“模仿”按钮",
"Show arrow buttons on the last in-chat message to generate alternative AI responses. Both PC and mobile": "在聊天窗口的最后一条信息上显示箭头按钮以生成AI的其他回复选项。适用于电脑和手机端。",
"Swipes": "刷新回复按钮",
"Allow using swiping gestures on the last in-chat message to trigger swipe generation. Mobile only, no effect on PC": "允许在最后一条聊天消息上使用滑动手势触发滑动生成。仅适用于移动设备对PC无影响",
@ -1183,6 +1185,7 @@
"Pause script execution": "暂停执行脚本",
"Abort script execution": "中止执行脚本",
"Abort request": "中止请求",
"Ask AI to write your message for you": "让AI为您撰写消息",
"Continue the last message": "继续上一条消息",
"Send a message": "发送消息",
"Close chat": "关闭聊天",
@ -1194,7 +1197,6 @@
"Manage chat files": "管理聊天文件",
"Delete messages": "删除消息",
"Regenerate": "重新生成",
"Ask AI to write your message for you": "请求AI为您撰写消息",
"Impersonate": "AI 帮答",
"Continue": "继续",
"Bind user name to that avatar": "将用户名称绑定到该头像",
@ -1429,6 +1431,7 @@
"ext_regex_export_script": "导出脚本",
"ext_regex_delete_script": "删除脚本",
"Trigger Stable Diffusion": "触发Stable Diffusion",
"Abort current image generation task": "中止当前图像生成",
"sd_Yourself": "你自己",
"sd_Your_Face": "你的脸",
"sd_Me": "我",
@ -1582,6 +1585,10 @@
"Only used when Main API is selected.": "仅在选择主 API 时使用。",
"Old messages are vectorized gradually as you chat. To process all previous messages, click the button below.": "随着您聊天,旧消息会逐渐矢量化。\n要处理所有以前的消息请单击下面的按钮。",
"View Stats": "查看统计数据",
"Title/Memo": "标题/备忘录",
"Status": "状态",
"Position": "位置",
"Trigger %": "触发率 ",
"Manager Users": "管理用户",
"New User": "新用户",
"Status:": "地位:",

View File

@ -391,7 +391,7 @@
"Alt Method": "替代方法",
"AI21 API Key": "AI21 API 金鑰",
"AI21 Model": "AI21 模型",
"MakerSuite API Key": "MakerSuite API 金鑰",
"Google AI Studio API Key": "Google AI Studio API 金鑰",
"Google Model": "Google 模型",
"MistralAI API Key": "MistralAI API 金鑰",
"MistralAI Model": "MistralAI 模型",

View File

@ -84,6 +84,7 @@ import {
context_presets,
resetMovableStyles,
forceCharacterEditorTokenize,
applyPowerUserSettings,
} from './scripts/power-user.js';
import {
@ -156,6 +157,7 @@ import {
ensureImageFormatSupported,
flashHighlight,
isTrueBoolean,
toggleDrawer,
} from './scripts/utils.js';
import { debounce_timeout } from './scripts/constants.js';
@ -224,7 +226,7 @@ import {
import { getBackgrounds, initBackgrounds, loadBackgroundSettings, background_settings } from './scripts/backgrounds.js';
import { hideLoader, showLoader } from './scripts/loader.js';
import { BulkEditOverlay, CharacterContextMenu } from './scripts/BulkEditOverlay.js';
import { loadFeatherlessModels, loadMancerModels, loadOllamaModels, loadTogetherAIModels, loadInfermaticAIModels, loadOpenRouterModels, loadVllmModels, loadAphroditeModels, loadDreamGenModels } from './scripts/textgen-models.js';
import { loadFeatherlessModels, loadMancerModels, loadOllamaModels, loadTogetherAIModels, loadInfermaticAIModels, loadOpenRouterModels, loadVllmModels, loadAphroditeModels, loadDreamGenModels, initTextGenModels } from './scripts/textgen-models.js';
import { appendFileContent, hasPendingFileAttachment, populateFileAttachment, decodeStyleTags, encodeStyleTags, isExternalMediaAllowed, getCurrentEntityId } from './scripts/chats.js';
import { initPresetManager } from './scripts/preset-manager.js';
import { MacrosParser, evaluateMacros, getLastMessageId } from './scripts/macros.js';
@ -241,7 +243,7 @@ import { DragAndDropHandler } from './scripts/dragdrop.js';
import { INTERACTABLE_CONTROL_CLASS, initKeyboard } from './scripts/keyboard.js';
import { initDynamicStyles } from './scripts/dynamic-styles.js';
import { SlashCommandEnumValue, enumTypes } from './scripts/slash-commands/SlashCommandEnumValue.js';
import { enumIcons } from './scripts/slash-commands/SlashCommandCommonEnumsProvider.js';
import { commonEnumProviders, enumIcons } from './scripts/slash-commands/SlashCommandCommonEnumsProvider.js';
//exporting functions and vars for mods
export {
@ -414,6 +416,7 @@ export const event_types = {
GENERATION_STOPPED: 'generation_stopped',
GENERATION_ENDED: 'generation_ended',
EXTENSIONS_FIRST_LOAD: 'extensions_first_load',
EXTENSION_SETTINGS_LOADED: 'extension_settings_loaded',
SETTINGS_LOADED: 'settings_loaded',
SETTINGS_UPDATED: 'settings_updated',
GROUP_UPDATED: 'group_updated',
@ -424,6 +427,8 @@ export const event_types = {
CHATCOMPLETION_MODEL_CHANGED: 'chatcompletion_model_changed',
OAI_PRESET_CHANGED_BEFORE: 'oai_preset_changed_before',
OAI_PRESET_CHANGED_AFTER: 'oai_preset_changed_after',
OAI_PRESET_EXPORT_READY: 'oai_preset_export_ready',
OAI_PRESET_IMPORT_READY: 'oai_preset_import_ready',
WORLDINFO_SETTINGS_UPDATED: 'worldinfo_settings_updated',
WORLDINFO_UPDATED: 'worldinfo_updated',
CHARACTER_EDITED: 'character_edited',
@ -456,6 +461,7 @@ export const event_types = {
LLM_FUNCTION_TOOL_REGISTER: 'llm_function_tool_register',
LLM_FUNCTION_TOOL_CALL: 'llm_function_tool_call',
ONLINE_STATUS_CHANGED: 'online_status_changed',
IMAGE_SWIPED: 'image_swiped',
};
export const eventSource = new EventEmitter();
@ -911,6 +917,7 @@ async function firstLoadInit() {
await readSecretState();
initLocales();
initDefaultSlashCommands();
initTextGenModels();
await getSystemMessages();
sendSystemMessage(system_message_types.WELCOME);
await getSettings();
@ -1874,7 +1881,12 @@ export function messageFormatting(mes, ch_name, isSystem, isUser, messageId) {
}
if (Number(messageId) === 0 && !isSystem && !isUser) {
const mesBeforeReplace = mes;
const chatMessage = chat[messageId];
mes = substituteParams(mes, undefined, ch_name);
if (chatMessage && chatMessage.mes === mesBeforeReplace && chatMessage.extra?.display_text !== mesBeforeReplace) {
chatMessage.mes = mes;
}
}
mesForShowdownParse = mes;
@ -2108,6 +2120,7 @@ export function updateMessageBlock(messageId, message) {
export function appendMediaToMessage(mes, messageElement, adjustScroll = true) {
// Add image to message
if (mes.extra?.image) {
const container = messageElement.find('.mes_img_container');
const chatHeight = $('#chat').prop('scrollHeight');
const image = messageElement.find('.mes_img');
const text = messageElement.find('.mes_text');
@ -2123,9 +2136,27 @@ export function appendMediaToMessage(mes, messageElement, adjustScroll = true) {
});
image.attr('src', mes.extra?.image);
image.attr('title', mes.extra?.title || mes.title || '');
messageElement.find('.mes_img_container').addClass('img_extra');
container.addClass('img_extra');
image.toggleClass('img_inline', isInline);
text.toggleClass('displayNone', !isInline);
const imageSwipes = mes.extra.image_swipes;
if (Array.isArray(imageSwipes) && imageSwipes.length > 0) {
container.addClass('img_swipes');
const counter = container.find('.mes_img_swipe_counter');
const currentImage = imageSwipes.indexOf(mes.extra.image) + 1;
counter.text(`${currentImage}/${imageSwipes.length}`);
const swipeLeft = container.find('.mes_img_swipe_left');
swipeLeft.off('click').on('click', function () {
eventSource.emit(event_types.IMAGE_SWIPED, { message: mes, element: messageElement, direction: 'left' });
});
const swipeRight = container.find('.mes_img_swipe_right');
swipeRight.off('click').on('click', function () {
eventSource.emit(event_types.IMAGE_SWIPED, { message: mes, element: messageElement, direction: 'right' });
});
}
}
// Add file to message
@ -2492,8 +2523,8 @@ export function getStoppingStrings(isImpersonate, isContinue) {
result.push(charString);
}
// Add other group members as the stopping strings
if (selected_group) {
// Add group members as stopping strings if generating for a specific group member or user. (Allow slash commands to work around name stopping string restrictions)
if (selected_group && (name2 || isImpersonate)) {
const group = groups.find(x => x.id === selected_group);
if (group && Array.isArray(group.members)) {
@ -2815,7 +2846,14 @@ function hideStopButton() {
}
class StreamingProcessor {
constructor(type, force_name2, timeStarted, messageAlreadyGenerated) {
/**
* Creates a new streaming processor.
* @param {string} type Generation type
* @param {boolean} forceName2 If true, force the use of name2
* @param {Date} timeStarted Date when generation was started
* @param {string} continueMessage Previous message if the type is 'continue'
*/
constructor(type, forceName2, timeStarted, continueMessage) {
this.result = '';
this.messageId = -1;
this.messageDom = null;
@ -2825,14 +2863,14 @@ class StreamingProcessor {
/** @type {HTMLTextAreaElement} */
this.sendTextarea = document.querySelector('#send_textarea');
this.type = type;
this.force_name2 = force_name2;
this.force_name2 = forceName2;
this.isStopped = false;
this.isFinished = false;
this.generator = this.nullStreamingGeneration;
this.abortController = new AbortController();
this.firstMessageText = '...';
this.timeStarted = timeStarted;
this.messageAlreadyGenerated = messageAlreadyGenerated;
this.continueMessage = type === 'continue' ? continueMessage : '';
this.swipes = [];
/** @type {import('./scripts/logprobs.js').TokenLogprobs[]} */
this.messageLogprobs = [];
@ -2985,8 +3023,7 @@ class StreamingProcessor {
await eventSource.emit(event_types.IMPERSONATE_READY, text);
}
const continueMsg = this.type === 'continue' ? this.messageAlreadyGenerated : undefined;
saveLogprobsForActiveMessage(this.messageLogprobs.filter(Boolean), continueMsg);
saveLogprobsForActiveMessage(this.messageLogprobs.filter(Boolean), this.continueMessage);
await saveChatConditional();
unblockGeneration();
generatedPromptCache = '';
@ -3082,7 +3119,7 @@ class StreamingProcessor {
if (logprobs) {
this.messageLogprobs.push(...(Array.isArray(logprobs) ? logprobs : [logprobs]));
}
await sw.tick(() => this.onProgressStreaming(this.messageId, this.messageAlreadyGenerated + text));
await sw.tick(() => this.onProgressStreaming(this.messageId, this.continueMessage + text));
}
const seconds = (timestamps[timestamps.length - 1] - timestamps[0]) / 1000;
console.warn(`Stream stats: ${timestamps.length} tokens, ${seconds.toFixed(2)} seconds, rate: ${Number(timestamps.length / seconds).toFixed(2)} TPS`);
@ -3275,8 +3312,6 @@ export async function Generate(type, { automatic_trigger, force_name2, quiet_pro
const isInstruct = power_user.instruct.enabled && main_api !== 'openai';
const isImpersonate = type == 'impersonate';
let message_already_generated = isImpersonate ? `${name1}: ` : `${name2}: `;
if (!(dryRun || type == 'regenerate' || type == 'swipe' || type == 'quiet')) {
const interruptedByCommand = await processCommands(String($('#send_textarea').val()));
@ -3715,7 +3750,6 @@ export async function Generate(type, { automatic_trigger, force_name2, quiet_pro
let oaiMessageExamples = [];
if (main_api === 'openai') {
message_already_generated = '';
oaiMessages = setOpenAIMessages(coreChat);
oaiMessageExamples = setOpenAIMessageExamples(mesExamplesArray);
}
@ -3858,7 +3892,6 @@ export async function Generate(type, { automatic_trigger, force_name2, quiet_pro
cyclePrompt += oai_settings.continue_postfix;
continue_mag += oai_settings.continue_postfix;
}
message_already_generated = continue_mag;
}
const originalType = type;
@ -3943,7 +3976,7 @@ export async function Generate(type, { automatic_trigger, force_name2, quiet_pro
// Get instruct mode line
if (isInstruct && !isContinue) {
const name = (quiet_prompt && !quietToLoud) ? (quietName ?? 'System') : (isImpersonate ? name1 : name2);
const name = (quiet_prompt && !quietToLoud && !isImpersonate) ? (quietName ?? 'System') : (isImpersonate ? name1 : name2);
const isQuiet = quiet_prompt && type == 'quiet';
lastMesString += formatInstructModePrompt(name, isImpersonate, promptBias, name1, name2, isQuiet, quietToLoud);
}
@ -4285,7 +4318,7 @@ export async function Generate(type, { automatic_trigger, force_name2, quiet_pro
console.debug(`pushed prompt bits to itemizedPrompts array. Length is now: ${itemizedPrompts.length}`);
if (isStreamingEnabled() && type !== 'quiet') {
streamingProcessor = new StreamingProcessor(type, force_name2, generation_started, message_already_generated);
streamingProcessor = new StreamingProcessor(type, force_name2, generation_started, continue_mag);
if (isContinue) {
// Save reply does add cycle text to the prompt, so it's not needed here
streamingProcessor.firstMessageText = '';
@ -5313,17 +5346,10 @@ export function cleanUpMessage(getMessage, isImpersonate, isContinue, displayInc
// Regex uses vars, so add before formatting
getMessage = getRegexedString(getMessage, isImpersonate ? regex_placement.USER_INPUT : regex_placement.AI_OUTPUT);
if (!displayIncompleteSentences && power_user.trim_sentences) {
getMessage = trimToEndSentence(getMessage, power_user.include_newline);
}
if (power_user.collapse_newlines) {
getMessage = collapseNewlines(getMessage);
}
if (power_user.trim_spaces) {
getMessage = getMessage.trim();
}
// trailing invisible whitespace before every newlines, on a multiline string
// "trailing whitespace on newlines \nevery line of the string \n?sample text" ->
// "trailing whitespace on newlines\nevery line of the string\nsample text"
@ -5402,9 +5428,11 @@ export function cleanUpMessage(getMessage, isImpersonate, isContinue, displayInc
getMessage = fixMarkdown(getMessage, false);
}
const nameToTrim2 = isImpersonate ? name1 : name2;
const nameToTrim2 = isImpersonate
? (!power_user.allow_name1_display ? name1 : '')
: (!power_user.allow_name2_display ? name2 : '');
if (getMessage.startsWith(nameToTrim2 + ':')) {
if (nameToTrim2 && getMessage.startsWith(nameToTrim2 + ':')) {
getMessage = getMessage.replace(nameToTrim2 + ':', '');
getMessage = getMessage.trimStart();
}
@ -5413,6 +5441,14 @@ export function cleanUpMessage(getMessage, isImpersonate, isContinue, displayInc
getMessage = getMessage.trim();
}
if (!displayIncompleteSentences && power_user.trim_sentences) {
getMessage = trimToEndSentence(getMessage, power_user.include_newline);
}
if (power_user.trim_spaces) {
getMessage = getMessage.trim();
}
return getMessage;
}
@ -6426,6 +6462,8 @@ export async function getSettings() {
// Load power user settings
await loadPowerUserSettings(settings, data);
applyPowerUserSettings();
// Load character tags
loadTagsSettings(settings);
@ -6480,9 +6518,10 @@ export async function getSettings() {
selected_button = settings.selected_button;
if (data.enable_extensions) {
const enableAutoUpdate = Boolean(data.enable_extensions_auto_update);
const isVersionChanged = settings.currentVersion !== currentVersion;
await loadExtensionSettings(settings, isVersionChanged);
eventSource.emit(event_types.EXTENSION_SETTINGS_LOADED);
await loadExtensionSettings(settings, isVersionChanged, enableAutoUpdate);
await eventSource.emit(event_types.EXTENSION_SETTINGS_LOADED);
}
firstRun = !!settings.firstRun;
@ -8353,6 +8392,12 @@ const CONNECT_API_MAP = {
button: '#api_button_openai',
source: chat_completion_sources.OPENAI,
},
// Google alias
'google': {
selected: 'openai',
button: '#api_button_openai',
source: chat_completion_sources.MAKERSUITE,
},
// OpenRouter special naming, to differentiate between chat comp and text comp
'openrouter': {
selected: 'openai',
@ -8366,6 +8411,9 @@ const CONNECT_API_MAP = {
},
};
// Collect all unique API names in an array
export const UNIQUE_APIS = [...new Set(Object.values(CONNECT_API_MAP).map(x => x.selected))];
// Fill connections map from textgen_types and chat_completion_sources
for (const textGenType of Object.values(textgen_types)) {
if (CONNECT_API_MAP[textGenType]) continue;
@ -8435,7 +8483,7 @@ async function disableInstructCallback() {
/**
* @param {string} text API name
*/
async function connectAPISlash(_, text) {
async function connectAPISlash(args, text) {
if (!text.trim()) {
for (const [key, config] of Object.entries(CONNECT_API_MAP)) {
if (config.selected !== main_api) continue;
@ -8458,12 +8506,15 @@ async function connectAPISlash(_, text) {
return key;
}
console.error('FIXME: The current API is not in the API map');
return '';
}
const apiConfig = CONNECT_API_MAP[text.toLowerCase()];
if (!apiConfig) {
toastr.error(`Error: ${text} is not a valid API`);
return;
return '';
}
$(`#main_api option[value='${apiConfig.selected || text}']`).prop('selected', true);
@ -8483,14 +8534,18 @@ async function connectAPISlash(_, text) {
$(apiConfig.button).trigger('click');
}
toastr.info(`API set to ${text}, trying to connect..`);
const quiet = isTrueBoolean(args?.quiet);
const toast = quiet ? jQuery() : toastr.info(`API set to ${text}, trying to connect..`);
try {
await waitUntilCondition(() => online_status !== 'no_connection', 10000, 100);
console.log('Connection successful');
} catch {
console.log('Could not connect after 5 seconds, skipping.');
console.log('Could not connect after 10 seconds, skipping.');
}
toastr.clear(toast);
return text;
}
/**
@ -8940,9 +8995,6 @@ jQuery(async function () {
return '';
}
// Collect all unique API names in an array
const uniqueAPIs = [...new Set(Object.values(CONNECT_API_MAP).map(x => x.selected))];
SlashCommandParser.addCommandObject(SlashCommand.fromProps({
name: 'dupe',
callback: duplicateCharacter,
@ -8951,13 +9003,22 @@ jQuery(async function () {
SlashCommandParser.addCommandObject(SlashCommand.fromProps({
name: 'api',
callback: connectAPISlash,
returns: 'the current API',
namedArgumentList: [
SlashCommandNamedArgument.fromProps({
name: 'quiet',
description: 'Suppress the toast message on connection',
typeList: [ARGUMENT_TYPE.BOOLEAN],
defaultValue: 'false',
enumList: commonEnumProviders.boolean('trueFalse')(),
}),
],
unnamedArgumentList: [
SlashCommandArgument.fromProps({
description: 'API to connect to',
typeList: [ARGUMENT_TYPE.STRING],
isRequired: false,
enumList: Object.entries(CONNECT_API_MAP).map(([api, { selected }]) =>
new SlashCommandEnumValue(api, selected, enumTypes.getBasedOnIndex(uniqueAPIs.findIndex(x => x === selected)),
new SlashCommandEnumValue(api, selected, enumTypes.getBasedOnIndex(UNIQUE_APIS.findIndex(x => x === selected)),
selected[0].toUpperCase() ?? enumIcons.default)),
}),
],
@ -10615,15 +10676,31 @@ jQuery(async function () {
}
});
$(document).on('click', '#OpenAllWIEntries', function () {
$('#world_popup_entries_list').children().find('.down').click();
});
$(document).on('click', '#CloseAllWIEntries', function () {
$('#world_popup_entries_list').children().find('.up').click();
document.addEventListener('click', function (e) {
if (!(e.target instanceof HTMLElement)) return;
if (e.target.matches('#OpenAllWIEntries')) {
document.querySelectorAll('#world_popup_entries_list .inline-drawer').forEach((/** @type {HTMLElement} */ drawer) => {
toggleDrawer(drawer, true);
});
} else if (e.target.matches('#CloseAllWIEntries')) {
document.querySelectorAll('#world_popup_entries_list .inline-drawer').forEach((/** @type {HTMLElement} */ drawer) => {
toggleDrawer(drawer, false);
});
}
});
$(document).on('click', '.open_alternate_greetings', openAlternateGreetings);
/* $('#set_character_world').on('click', openCharacterWorldPopup); */
$(document).on('focus', 'input.auto-select, textarea.auto-select', function () {
if (!power_user.enable_auto_select_input) return;
const control = $(this)[0];
if (control instanceof HTMLInputElement || control instanceof HTMLTextAreaElement) {
control.select();
console.debug('Auto-selecting content of input control', control);
}
});
$(document).keyup(function (e) {
if (e.key === 'Escape') {
const isEditVisible = $('#curEditTextarea').is(':visible');
@ -10707,7 +10784,7 @@ jQuery(async function () {
}
} break;
case 'import_tags': {
await importTags(characters[this_chid], { forceShow: true });
await importTags(characters[this_chid], { importSetting: tag_import_setting.ASK });
} break;
/*case 'delete_button':
popup_type = "del_ch";
@ -10736,62 +10813,66 @@ jQuery(async function () {
var isManualInput = false;
var valueBeforeManualInput;
$('.range-block-counter input, .neo-range-input').on('click', function () {
$(document).on('input', '.range-block-counter input, .neo-range-input', function () {
valueBeforeManualInput = $(this).val();
console.log(valueBeforeManualInput);
})
.on('change', function (e) {
e.target.focus();
e.target.dispatchEvent(new Event('keyup'));
})
.on('keydown', function (e) {
const masterSelector = '#' + $(this).data('for');
const masterElement = $(masterSelector);
if (e.key === 'Enter') {
let manualInput = Number($(this).val());
if (isManualInput) {
//disallow manual inputs outside acceptable range
if (manualInput >= Number($(this).attr('min')) && manualInput <= Number($(this).attr('max'))) {
//if value is ok, assign to slider and update handle text and position
//newSlider.val(manualInput)
//handleSlideEvent.call(newSlider, null, { value: parseFloat(manualInput) }, 'manual');
valueBeforeManualInput = manualInput;
$(masterElement).val($(this).val()).trigger('input', { forced: true });
} else {
//if value not ok, warn and reset to last known valid value
toastr.warning(`Invalid value. Must be between ${$(this).attr('min')} and ${$(this).attr('max')}`);
console.log(valueBeforeManualInput);
//newSlider.val(valueBeforeManualInput)
$(this).val(valueBeforeManualInput);
}
}
}
})
.on('keyup', function () {
valueBeforeManualInput = $(this).val();
console.log(valueBeforeManualInput);
isManualInput = true;
})
//trigger slider changes when user clicks away
.on('mouseup blur', function () {
const masterSelector = '#' + $(this).data('for');
const masterElement = $(masterSelector);
});
$(document).on('change', '.range-block-counter input, .neo-range-input', function (e) {
e.target.focus();
e.target.dispatchEvent(new KeyboardEvent('keyup', { bubbles: true }));
});
$(document).on('keydown', '.range-block-counter input, .neo-range-input', function (e) {
const masterSelector = '#' + $(this).data('for');
const masterElement = $(masterSelector);
if (e.key === 'Enter') {
let manualInput = Number($(this).val());
if (isManualInput) {
//if value is between correct range for the slider
//disallow manual inputs outside acceptable range
if (manualInput >= Number($(this).attr('min')) && manualInput <= Number($(this).attr('max'))) {
//if value is ok, assign to slider and update handle text and position
//newSlider.val(manualInput)
//handleSlideEvent.call(newSlider, null, { value: parseFloat(manualInput) }, 'manual');
valueBeforeManualInput = manualInput;
//set the slider value to input value
$(masterElement).val($(this).val()).trigger('input', { forced: true });
} else {
//if value not ok, warn and reset to last known valid value
toastr.warning(`Invalid value. Must be between ${$(this).attr('min')} and ${$(this).attr('max')}`);
console.log(valueBeforeManualInput);
//newSlider.val(valueBeforeManualInput)
$(this).val(valueBeforeManualInput);
}
}
isManualInput = false;
});
}
});
$(document).on('keyup', '.range-block-counter input, .neo-range-input', function () {
valueBeforeManualInput = $(this).val();
console.log(valueBeforeManualInput);
isManualInput = true;
});
//trigger slider changes when user clicks away
$(document).on('mouseup blur', '.range-block-counter input, .neo-range-input', function () {
const masterSelector = '#' + $(this).data('for');
const masterElement = $(masterSelector);
let manualInput = Number($(this).val());
if (isManualInput) {
//if value is between correct range for the slider
if (manualInput >= Number($(this).attr('min')) && manualInput <= Number($(this).attr('max'))) {
valueBeforeManualInput = manualInput;
//set the slider value to input value
$(masterElement).val($(this).val()).trigger('input', { forced: true });
} else {
//if value not ok, warn and reset to last known valid value
toastr.warning(`Invalid value. Must be between ${$(this).attr('min')} and ${$(this).attr('max')}`);
console.log(valueBeforeManualInput);
$(this).val(valueBeforeManualInput);
}
}
isManualInput = false;
});
$('.user_stats_button').on('click', function () {
userStatsHandler();

View File

@ -18,7 +18,7 @@ import {
import { favsToHotswap } from './RossAscends-mods.js';
import { hideLoader, showLoader } from './loader.js';
import { convertCharacterToPersona } from './personas.js';
import { createTagInput, getTagKeyForEntity, getTagsList, printTagList, tag_map, compareTagsForSort, removeTagFromMap } from './tags.js';
import { createTagInput, getTagKeyForEntity, getTagsList, printTagList, tag_map, compareTagsForSort, removeTagFromMap, importTags, tag_import_setting } from './tags.js';
/**
* Static object representing the actions of the
@ -197,10 +197,10 @@ class BulkTagPopupHandler {
#getHtml = () => {
const characterData = JSON.stringify({ characterIds: this.characterIds });
return `<div id="bulk_tag_shadow_popup">
<div id="bulk_tag_popup">
<div id="bulk_tag_popup" class="wider_dialogue_popup">
<div id="bulk_tag_popup_holder">
<h3 class="marginBot5">Modify tags of ${this.characterIds.length} characters</h3>
<small class="bulk_tags_desc m-b-1">Add or remove the mutual tags of all selected characters.</small>
<small class="bulk_tags_desc m-b-1">Add or remove the mutual tags of all selected characters. Import all or existing tags for all selected characters.</small>
<div id="bulk_tags_avatars_block" class="avatars_inline avatars_inline_small tags tags_inline"></div>
<br>
<div id="bulk_tags_div" class="marginBot5" data-characters='${characterData}'>
@ -219,6 +219,12 @@ class BulkTagPopupHandler {
<i class="fa-solid fa-trash-can margin-right-10px"></i>
Mutual
</div>
<div id="bulk_tag_popup_import_all_tags" class="menu_button" title="Import all tags from selected characters" data-i18n="[title]Import all tags from selected characters">
Import All
</div>
<div id="bulk_tag_popup_import_existing_tags" class="menu_button" title="Import existing tags from selected characters" data-i18n="[title]Import existing tags from selected characters">
Import Existing
</div>
<div id="bulk_tag_popup_cancel" class="menu_button" data-i18n="Cancel">Close</div>
</div>
</div>
@ -254,6 +260,30 @@ class BulkTagPopupHandler {
document.querySelector('#bulk_tag_popup_reset').addEventListener('click', this.resetTags.bind(this));
document.querySelector('#bulk_tag_popup_remove_mutual').addEventListener('click', this.removeMutual.bind(this));
document.querySelector('#bulk_tag_popup_cancel').addEventListener('click', this.hide.bind(this));
document.querySelector('#bulk_tag_popup_import_all_tags').addEventListener('click', this.importAllTags.bind(this));
document.querySelector('#bulk_tag_popup_import_existing_tags').addEventListener('click', this.importExistingTags.bind(this));
}
/**
* Import existing tags for all selected characters
*/
async importExistingTags() {
for (const characterId of this.characterIds) {
await importTags(characters[characterId], { importSetting: tag_import_setting.ONLY_EXISTING });
}
$('#bulkTagList').empty();
}
/**
* Import all tags for all selected characters
*/
async importAllTags() {
for (const characterId of this.characterIds) {
await importTags(characters[characterId], { importSetting: tag_import_setting.ALL });
}
$('#bulkTagList').empty();
}
/**
@ -570,7 +600,7 @@ class BulkEditOverlay {
this.container.removeEventListener('mouseup', cancelHold);
this.container.removeEventListener('touchend', cancelHold);
},
BulkEditOverlay.longPressDelay);
BulkEditOverlay.longPressDelay);
};
handleLongPressEnd = (event) => {

View File

@ -380,6 +380,7 @@ function RA_autoconnect(PrevApi) {
|| (secret_state[SECRET_KEYS.PERPLEXITY] && oai_settings.chat_completion_source == chat_completion_sources.PERPLEXITY)
|| (secret_state[SECRET_KEYS.GROQ] && oai_settings.chat_completion_source == chat_completion_sources.GROQ)
|| (secret_state[SECRET_KEYS.ZEROONEAI] && oai_settings.chat_completion_source == chat_completion_sources.ZEROONEAI)
|| (secret_state[SECRET_KEYS.BLOCKENTROPY] && oai_settings.chat_completion_source == chat_completion_sources.BLOCKENTROPY)
|| (isValidUrl(oai_settings.custom_url) && oai_settings.chat_completion_source == chat_completion_sources.CUSTOM)
) {
$('#api_button_openai').trigger('click');
@ -953,6 +954,11 @@ export function initRossMods() {
* @param {KeyboardEvent} event
*/
async function processHotkeys(event) {
// Default hotkeys and shortcuts shouldn't work if any popup is currently open
if (Popup.util.isPopupOpen()) {
return;
}
//Enter to send when send_textarea in focus
if (document.activeElement == hotkeyTargets['send_textarea']) {
const sendOnEnter = shouldSendOnEnter();
@ -1106,10 +1112,6 @@ export function initRossMods() {
}
if (event.key == 'Escape') { //closes various panels
// Do not close panels if we are currently inside a popup
if (Popup.util.isPopupOpen())
return;
//dont override Escape hotkey functions from script.js
//"close edit box" and "cancel stream generation".
if ($('#curEditTextarea').is(':visible') || $('#mes_stop').is(':visible')) {

View File

@ -21,6 +21,7 @@ const defaultUrl = 'http://localhost:5100';
let saveMetadataTimeout = null;
let requiresReload = false;
let stateChanged = false;
export function saveMetadataDebounced() {
const context = getContext();
@ -238,6 +239,7 @@ function onEnableExtensionClick() {
async function enableExtension(name, reload = true) {
extension_settings.disabledExtensions = extension_settings.disabledExtensions.filter(x => x !== name);
stateChanged = true;
await saveSettings();
if (reload) {
location.reload();
@ -248,6 +250,7 @@ async function enableExtension(name, reload = true) {
async function disableExtension(name, reload = true) {
extension_settings.disabledExtensions.push(name);
stateChanged = true;
await saveSettings();
if (reload) {
location.reload();
@ -304,7 +307,7 @@ async function activateExtensions() {
if (!isDisabled) {
const promise = Promise.all([addExtensionScript(name, manifest), addExtensionStyle(name, manifest)]);
promise
await promise
.then(() => activeExtensions.add(name))
.catch(err => console.log('Could not activate extension: ' + name, err));
promises.push(promise);
@ -657,7 +660,29 @@ async function showExtensionsDetails() {
await oldPopup.complete(POPUP_RESULT.CANCELLED);
}
const popup = new Popup(html, POPUP_TYPE.TEXT, '', { okButton: 'Close', wide: true, large: true, customButtons: [updateAllButton], allowVerticalScrolling: true });
let waitingForSave = false;
const popup = new Popup(html, POPUP_TYPE.TEXT, '', {
okButton: 'Close',
wide: true,
large: true,
customButtons: [updateAllButton],
allowVerticalScrolling: true,
onClosing: async () => {
if (waitingForSave) {
return false;
}
if (stateChanged) {
waitingForSave = true;
const toast = toastr.info('The page will be reloaded shortly...', 'Extensions state changed');
await saveSettings();
toastr.clear(toast);
waitingForSave = false;
requiresReload = true;
}
return true;
},
});
popupPromise = popup.show();
} catch (error) {
toastr.error('Error loading extensions. See browser console for details.');
@ -797,16 +822,17 @@ export async function installExtension(url) {
const response = await request.json();
toastr.success(`Extension "${response.display_name}" by ${response.author} (version ${response.version}) has been installed successfully!`, 'Extension installation successful');
console.debug(`Extension "${response.display_name}" has been installed successfully at ${response.extensionPath}`);
await loadExtensionSettings({}, false);
eventSource.emit(event_types.EXTENSION_SETTINGS_LOADED);
await loadExtensionSettings({}, false, false);
await eventSource.emit(event_types.EXTENSION_SETTINGS_LOADED);
}
/**
* Loads extension settings from the app settings.
* @param {object} settings App Settings
* @param {boolean} versionChanged Is this a version change?
* @param {boolean} enableAutoUpdate Enable auto-update
*/
async function loadExtensionSettings(settings, versionChanged) {
async function loadExtensionSettings(settings, versionChanged, enableAutoUpdate) {
if (settings.extension_settings) {
Object.assign(extension_settings, settings.extension_settings);
}
@ -817,11 +843,11 @@ async function loadExtensionSettings(settings, versionChanged) {
$('#extensions_notify_updates').prop('checked', extension_settings.notifyUpdates);
// Activate offline extensions
eventSource.emit(event_types.EXTENSIONS_FIRST_LOAD);
await eventSource.emit(event_types.EXTENSIONS_FIRST_LOAD);
extensionNames = await discoverExtensions();
manifests = await getManifests(extensionNames);
if (versionChanged) {
if (versionChanged && enableAutoUpdate) {
await autoUpdateExtensions(false);
}
@ -989,6 +1015,28 @@ export async function writeExtensionField(characterId, key, value) {
}
}
/**
* Prompts the user to enter the Git URL of the extension to import.
* After obtaining the Git URL, makes a POST request to '/api/extensions/install' to import the extension.
* If the extension is imported successfully, a success message is displayed.
* If the extension import fails, an error message is displayed and the error is logged to the console.
* After successfully importing the extension, the extension settings are reloaded and a 'EXTENSION_SETTINGS_LOADED' event is emitted.
* @param {string} [suggestUrl] Suggested URL to install
* @returns {Promise<void>}
*/
export async function openThirdPartyExtensionMenu(suggestUrl = '') {
const html = await renderTemplateAsync('installExtension');
const input = await callGenericPopup(html, POPUP_TYPE.INPUT, suggestUrl ?? '');
if (!input) {
console.debug('Extension install cancelled');
return;
}
const url = String(input).trim();
await installExtension(url);
}
jQuery(async function () {
await addExtensionsButtonAndMenu();
$('#extensionsMenuButton').css('display', 'flex');
@ -1004,28 +1052,8 @@ jQuery(async function () {
/**
* Handles the click event for the third-party extension import button.
* Prompts the user to enter the Git URL of the extension to import.
* After obtaining the Git URL, makes a POST request to '/api/extensions/install' to import the extension.
* If the extension is imported successfully, a success message is displayed.
* If the extension import fails, an error message is displayed and the error is logged to the console.
* After successfully importing the extension, the extension settings are reloaded and a 'EXTENSION_SETTINGS_LOADED' event is emitted.
*
* @listens #third_party_extension_button#click - The click event of the '#third_party_extension_button' element.
*/
$('#third_party_extension_button').on('click', async () => {
const html = `<h3>Enter the Git URL of the extension to install</h3>
<br>
<p><b>Disclaimer:</b> Please be aware that using external extensions can have unintended side effects and may pose security risks. Always make sure you trust the source before importing an extension. We are not responsible for any damage caused by third-party extensions.</p>
<br>
<p>Example: <tt> https://github.com/author/extension-name </tt></p>`;
const input = await callGenericPopup(html, POPUP_TYPE.INPUT, '');
if (!input) {
console.debug('Extension install cancelled');
return;
}
const url = String(input).trim();
await installExtension(url);
});
$('#third_party_extension_button').on('click', () => openThirdPartyExtensionMenu());
});

View File

@ -169,7 +169,11 @@ async function sendCaptionedMessage(caption, image) {
},
};
context.chat.push(message);
const messageId = context.chat.length - 1;
await eventSource.emit(event_types.MESSAGE_SENT, messageId);
context.addOneMessage(message);
await eventSource.emit(event_types.USER_MESSAGE_RENDERED, messageId);
await context.saveChat();
}
/**

View File

@ -20,7 +20,7 @@
<option value="zerooneai">01.AI (Yi)</option>
<option value="anthropic">Anthropic</option>
<option value="custom" data-i18n="Custom (OpenAI-compatible)">Custom (OpenAI-compatible)</option>
<option value="google">Google MakerSuite</option>
<option value="google">Google AI Studio</option>
<option value="koboldcpp">KoboldCpp</option>
<option value="llamacpp">llama.cpp</option>
<option value="ollama">Ollama</option>
@ -38,6 +38,7 @@
<option data-type="openai" value="gpt-4-turbo">gpt-4-turbo</option>
<option data-type="openai" value="gpt-4o">gpt-4o</option>
<option data-type="openai" value="gpt-4o-mini">gpt-4o-mini</option>
<option data-type="openai" value="chatgpt-4o-latest">chatgpt-4o-latest</option>
<option data-type="anthropic" value="claude-3-5-sonnet-20240620">claude-3-5-sonnet-20240620</option>
<option data-type="anthropic" value="claude-3-opus-20240229">claude-3-opus-20240229</option>
<option data-type="anthropic" value="claude-3-sonnet-20240229">claude-3-sonnet-20240229</option>

View File

@ -1,4 +1,4 @@
import { getStringHash, debounce, waitUntilCondition, extractAllWords } from '../../utils.js';
import { getStringHash, debounce, waitUntilCondition, extractAllWords, isTrueBoolean } from '../../utils.js';
import { getContext, getApiUrl, extension_settings, doExtrasFetch, modules, renderExtensionTemplateAsync } from '../../extensions.js';
import {
activateSendButtons,
@ -25,6 +25,8 @@ import { SlashCommandParser } from '../../slash-commands/SlashCommandParser.js';
import { SlashCommand } from '../../slash-commands/SlashCommand.js';
import { ARGUMENT_TYPE, SlashCommandArgument, SlashCommandNamedArgument } from '../../slash-commands/SlashCommandArgument.js';
import { MacrosParser } from '../../macros.js';
import { countWebLlmTokens, generateWebLlmChatPrompt, getWebLlmContextSize, isWebLlmSupported } from '../shared.js';
import { commonEnumProviders } from '../../slash-commands/SlashCommandCommonEnumsProvider.js';
export { MODULE_NAME };
const MODULE_NAME = '1_memory';
@ -36,6 +38,41 @@ let lastMessageHash = null;
let lastMessageId = null;
let inApiCall = false;
/**
* Count the number of tokens in the provided text.
* @param {string} text Text to count tokens for
* @param {number} padding Number of additional tokens to add to the count
* @returns {Promise<number>} Number of tokens in the text
*/
async function countSourceTokens(text, padding = 0) {
if (extension_settings.memory.source === summary_sources.webllm) {
const count = await countWebLlmTokens(text);
return count + padding;
}
if (extension_settings.memory.source === summary_sources.extras) {
const count = getTextTokens(tokenizers.GPT2, text).length;
return count + padding;
}
return await getTokenCountAsync(text, padding);
}
async function getSourceContextSize() {
const overrideLength = extension_settings.memory.overrideResponseLength;
if (extension_settings.memory.source === summary_sources.webllm) {
const maxContext = await getWebLlmContextSize();
return overrideLength > 0 ? (maxContext - overrideLength) : Math.round(maxContext * 0.75);
}
if (extension_settings.source === summary_sources.extras) {
return 1024 - 64;
}
return getMaxContextSize(overrideLength);
}
const formatMemoryValue = function (value) {
if (!value) {
return '';
@ -55,6 +92,7 @@ const saveChatDebounced = debounce(() => getContext().saveChat(), debounce_timeo
const summary_sources = {
'extras': 'extras',
'main': 'main',
'webllm': 'webllm',
};
const prompt_builders = {
@ -130,12 +168,12 @@ function loadSettings() {
async function onPromptForceWordsAutoClick() {
const context = getContext();
const maxPromptLength = getMaxContextSize(extension_settings.memory.overrideResponseLength);
const maxPromptLength = await getSourceContextSize();
const chat = context.chat;
const allMessages = chat.filter(m => !m.is_system && m.mes).map(m => m.mes);
const messagesWordCount = allMessages.map(m => extractAllWords(m)).flat().length;
const averageMessageWordCount = messagesWordCount / allMessages.length;
const tokensPerWord = await getTokenCountAsync(allMessages.join('\n')) / messagesWordCount;
const tokensPerWord = await countSourceTokens(allMessages.join('\n')) / messagesWordCount;
const wordsPerToken = 1 / tokensPerWord;
const maxPromptLengthWords = Math.round(maxPromptLength * wordsPerToken);
// How many words should pass so that messages will start be dropped out of context;
@ -168,15 +206,15 @@ async function onPromptForceWordsAutoClick() {
async function onPromptIntervalAutoClick() {
const context = getContext();
const maxPromptLength = getMaxContextSize(extension_settings.memory.overrideResponseLength);
const maxPromptLength = await getSourceContextSize();
const chat = context.chat;
const allMessages = chat.filter(m => !m.is_system && m.mes).map(m => m.mes);
const messagesWordCount = allMessages.map(m => extractAllWords(m)).flat().length;
const messagesTokenCount = await getTokenCountAsync(allMessages.join('\n'));
const messagesTokenCount = await countSourceTokens(allMessages.join('\n'));
const tokensPerWord = messagesTokenCount / messagesWordCount;
const averageMessageTokenCount = messagesTokenCount / allMessages.length;
const targetSummaryTokens = Math.round(extension_settings.memory.promptWords * tokensPerWord);
const promptTokens = await getTokenCountAsync(extension_settings.memory.prompt);
const promptTokens = await countSourceTokens(extension_settings.memory.prompt);
const promptAllowance = maxPromptLength - promptTokens - targetSummaryTokens;
const maxMessagesPerSummary = extension_settings.memory.maxMessagesPerRequest || 0;
const averageMessagesPerPrompt = Math.floor(promptAllowance / averageMessageTokenCount);
@ -213,8 +251,8 @@ function onSummarySourceChange(event) {
function switchSourceControls(value) {
$('#memory_settings [data-summary-source]').each((_, element) => {
const source = $(element).data('summary-source');
$(element).toggle(source === value);
const source = element.dataset.summarySource.split(',').map(s => s.trim());
$(element).toggle(source.includes(value));
});
}
@ -353,10 +391,13 @@ function getIndexOfLatestChatSummary(chat) {
async function onChatEvent() {
// Module not enabled
if (extension_settings.memory.source === summary_sources.extras) {
if (!modules.includes('summarize')) {
return;
}
if (extension_settings.memory.source === summary_sources.extras && !modules.includes('summarize')) {
return;
}
// WebLLM is not supported
if (extension_settings.memory.source === summary_sources.webllm && !isWebLlmSupported()) {
return;
}
const context = getContext();
@ -416,7 +457,12 @@ async function onChatEvent() {
}
}
async function forceSummarizeChat() {
/**
* Forces a summary generation for the current chat.
* @param {boolean} quiet If an informational toast should be displayed
* @returns {Promise<string>} Summarized text
*/
async function forceSummarizeChat(quiet) {
if (extension_settings.memory.source === summary_sources.extras) {
toastr.warning('Force summarization is not supported for Extras API');
return;
@ -431,8 +477,12 @@ async function forceSummarizeChat() {
return '';
}
toastr.info('Summarizing chat...', 'Please wait');
const value = await summarizeChatMain(context, true, skipWIAN);
const toast = quiet ? jQuery() : toastr.info('Summarizing chat...', 'Please wait', { timeOut: 0, extendedTimeOut: 0 });
const value = extension_settings.memory.source === summary_sources.main
? await summarizeChatMain(context, true, skipWIAN)
: await summarizeChatWebLLM(context, true);
toastr.clear(toast);
if (!value) {
toastr.warning('Failed to summarize chat');
@ -450,9 +500,10 @@ async function forceSummarizeChat() {
async function summarizeCallback(args, text) {
text = text.trim();
// Using forceSummarizeChat to summarize the current chat
// Summarize the current chat if no text provided
if (!text) {
return await forceSummarizeChat();
const quiet = isTrueBoolean(args.quiet);
return await forceSummarizeChat(quiet);
}
const source = args.source || extension_settings.memory.source;
@ -464,6 +515,11 @@ async function summarizeCallback(args, text) {
return await callExtrasSummarizeAPI(text);
case summary_sources.main:
return await generateRaw(text, '', false, false, prompt, extension_settings.memory.overrideResponseLength);
case summary_sources.webllm: {
const messages = [{ role: 'system', content: prompt }, { role: 'user', content: text }].filter(m => m.content);
const params = extension_settings.memory.overrideResponseLength > 0 ? { max_tokens: extension_settings.memory.overrideResponseLength } : {};
return await generateWebLlmChatPrompt(messages, params);
}
default:
toastr.warning('Invalid summarization source specified');
return '';
@ -484,16 +540,25 @@ async function summarizeChat(context) {
case summary_sources.main:
await summarizeChatMain(context, false, skipWIAN);
break;
case summary_sources.webllm:
await summarizeChatWebLLM(context, false);
break;
default:
break;
}
}
async function summarizeChatMain(context, force, skipWIAN) {
/**
* Check if the chat should be summarized based on the current conditions.
* Return summary prompt if it should be summarized.
* @param {any} context ST context
* @param {boolean} force Summarize the chat regardless of the conditions
* @returns {Promise<string>} Summary prompt or empty string
*/
async function getSummaryPromptForNow(context, force) {
if (extension_settings.memory.promptInterval === 0 && !force) {
console.debug('Prompt interval is set to 0, skipping summarization');
return;
return '';
}
try {
@ -505,17 +570,17 @@ async function summarizeChatMain(context, force, skipWIAN) {
waitUntilCondition(() => is_send_press === false, 30000, 100);
} catch {
console.debug('Timeout waiting for is_send_press');
return;
return '';
}
if (!context.chat.length) {
console.debug('No messages in chat to summarize');
return;
return '';
}
if (context.chat.length < extension_settings.memory.promptInterval && !force) {
console.debug(`Not enough messages in chat to summarize (chat: ${context.chat.length}, interval: ${extension_settings.memory.promptInterval})`);
return;
return '';
}
let messagesSinceLastSummary = 0;
@ -539,7 +604,7 @@ async function summarizeChatMain(context, force, skipWIAN) {
if (!conditionSatisfied && !force) {
console.debug(`Summary conditions not satisfied (messages: ${messagesSinceLastSummary}, interval: ${extension_settings.memory.promptInterval}, words: ${wordsSinceLastSummary}, force words: ${extension_settings.memory.promptForceWords})`);
return;
return '';
}
console.log('Summarizing chat, messages since last summary: ' + messagesSinceLastSummary, 'words since last summary: ' + wordsSinceLastSummary);
@ -547,6 +612,63 @@ async function summarizeChatMain(context, force, skipWIAN) {
if (!prompt) {
console.debug('Summarization prompt is empty. Skipping summarization.');
return '';
}
return prompt;
}
async function summarizeChatWebLLM(context, force) {
if (!isWebLlmSupported()) {
return;
}
const prompt = await getSummaryPromptForNow(context, force);
if (!prompt) {
return;
}
const { rawPrompt, lastUsedIndex } = await getRawSummaryPrompt(context, prompt);
if (lastUsedIndex === null || lastUsedIndex === -1) {
if (force) {
toastr.info('To try again, remove the latest summary.', 'No messages found to summarize');
}
return null;
}
const messages = [
{ role: 'system', content: prompt },
{ role: 'user', content: rawPrompt },
];
const params = {};
if (extension_settings.memory.overrideResponseLength > 0) {
params.max_tokens = extension_settings.memory.overrideResponseLength;
}
const summary = await generateWebLlmChatPrompt(messages, params);
const newContext = getContext();
// something changed during summarization request
if (newContext.groupId !== context.groupId ||
newContext.chatId !== context.chatId ||
(!newContext.groupId && (newContext.characterId !== context.characterId))) {
console.log('Context changed, summary discarded');
return;
}
setMemoryContext(summary, true, lastUsedIndex);
return summary;
}
async function summarizeChatMain(context, force, skipWIAN) {
const prompt = await getSummaryPromptForNow(context, force);
if (!prompt) {
return;
}
@ -634,7 +756,7 @@ async function getRawSummaryPrompt(context, prompt) {
chat.pop(); // We always exclude the last message from the buffer
const chatBuffer = [];
const PADDING = 64;
const PROMPT_SIZE = getMaxContextSize(extension_settings.memory.overrideResponseLength);
const PROMPT_SIZE = await getSourceContextSize();
let latestUsedMessage = null;
for (let index = latestSummaryIndex + 1; index < chat.length; index++) {
@ -651,7 +773,7 @@ async function getRawSummaryPrompt(context, prompt) {
const entry = `${message.name}:\n${message.mes}`;
chatBuffer.push(entry);
const tokens = await getTokenCountAsync(getMemoryString(true), PADDING);
const tokens = await countSourceTokens(getMemoryString(true), PADDING);
if (tokens > PROMPT_SIZE) {
chatBuffer.pop();
@ -680,7 +802,7 @@ async function summarizeChatExtras(context) {
const reversedChat = chat.slice().reverse();
reversedChat.shift();
const memoryBuffer = [];
const CONTEXT_SIZE = 1024 - 64;
const CONTEXT_SIZE = await getSourceContextSize();
for (const message of reversedChat) {
// we reached the point of latest memory
@ -698,14 +820,14 @@ async function summarizeChatExtras(context) {
memoryBuffer.push(entry);
// check if token limit was reached
const tokens = getTextTokens(tokenizers.GPT2, getMemoryString()).length;
const tokens = await countSourceTokens(getMemoryString());
if (tokens >= CONTEXT_SIZE) {
break;
}
}
const resultingString = getMemoryString();
const resultingTokens = getTextTokens(tokenizers.GPT2, resultingString).length;
const resultingTokens = await countSourceTokens(resultingString);
if (!resultingString || resultingTokens < CONTEXT_SIZE) {
console.debug('Not enough context to summarize');
@ -890,7 +1012,7 @@ function setupListeners() {
$('#memory_prompt_words').off('click').on('input', onMemoryPromptWordsInput);
$('#memory_prompt_interval').off('click').on('input', onMemoryPromptIntervalInput);
$('#memory_prompt').off('click').on('input', onMemoryPromptInput);
$('#memory_force_summarize').off('click').on('click', forceSummarizeChat);
$('#memory_force_summarize').off('click').on('click', () => forceSummarizeChat(false));
$('#memory_template').off('click').on('input', onMemoryTemplateInput);
$('#memory_depth').off('click').on('input', onMemoryDepthInput);
$('#memory_role').off('click').on('input', onMemoryRoleInput);
@ -933,13 +1055,20 @@ jQuery(async function () {
name: 'summarize',
callback: summarizeCallback,
namedArgumentList: [
new SlashCommandNamedArgument('source', 'API to use for summarization', [ARGUMENT_TYPE.STRING], false, false, '', ['main', 'extras']),
new SlashCommandNamedArgument('source', 'API to use for summarization', [ARGUMENT_TYPE.STRING], false, false, '', Object.values(summary_sources)),
SlashCommandNamedArgument.fromProps({
name: 'prompt',
description: 'prompt to use for summarization',
typeList: [ARGUMENT_TYPE.STRING],
defaultValue: '',
}),
SlashCommandNamedArgument.fromProps({
name: 'quiet',
description: 'suppress the toast message when summarizing the chat',
typeList: [ARGUMENT_TYPE.BOOLEAN],
defaultValue: 'false',
enumList: commonEnumProviders.boolean('trueFalse')(),
}),
],
unnamedArgumentList: [
new SlashCommandArgument('text to summarize', [ARGUMENT_TYPE.STRING], false, false, ''),

View File

@ -13,6 +13,7 @@
<select id="summary_source">
<option value="main" data-i18n="ext_sum_main_api">Main API</option>
<option value="extras">Extras API</option>
<option value="webllm" data-i18n="ext_sum_webllm">WebLLM Extension</option>
</select><br>
<div class="flex-container justifyspacebetween alignitemscenter">
@ -24,7 +25,7 @@
<textarea id="memory_contents" class="text_pole textarea_compact" rows="6" data-i18n="[placeholder]ext_sum_memory_placeholder" placeholder="Summary will be generated here..."></textarea>
<div class="memory_contents_controls">
<div id="memory_force_summarize" data-summary-source="main" class="menu_button menu_button_icon" title="Trigger a summary update right now." data-i18n="[title]ext_sum_force_tip">
<div id="memory_force_summarize" data-summary-source="main,webllm" class="menu_button menu_button_icon" title="Trigger a summary update right now." data-i18n="[title]ext_sum_force_tip">
<i class="fa-solid fa-database"></i>
<span data-i18n="ext_sum_force_text">Summarize now</span>
</div>
@ -58,7 +59,7 @@
<span data-i18n="ext_sum_prompt_builder_3">Classic, blocking</span>
</label>
</div>
<div data-summary-source="main">
<div data-summary-source="main,webllm">
<label for="memory_prompt" class="title_restorable">
<span data-i18n="Summary Prompt">Summary Prompt</span>
<div id="memory_prompt_restore" data-i18n="[title]ext_sum_restore_default_prompt_tip" title="Restore default prompt" class="right_menu_button">
@ -74,7 +75,7 @@
</label>
<input id="memory_override_response_length" type="range" value="{{defaultSettings.overrideResponseLength}}" min="{{defaultSettings.overrideResponseLengthMin}}" max="{{defaultSettings.overrideResponseLengthMax}}" step="{{defaultSettings.overrideResponseLengthStep}}" />
<label for="memory_max_messages_per_request">
<span data-i18n="ext_sum_raw_max_msg">[Raw] Max messages per request</span> (<span id="memory_max_messages_per_request_value"></span>)
<span data-i18n="ext_sum_raw_max_msg">[Raw/WebLLM] Max messages per request</span> (<span id="memory_max_messages_per_request_value"></span>)
<small class="memory_disabled_hint" data-i18n="ext_sum_0_unlimited">0 = unlimited</small>
</label>
<input id="memory_max_messages_per_request" type="range" value="{{defaultSettings.maxMessagesPerRequest}}" min="{{defaultSettings.maxMessagesPerRequestMin}}" max="{{defaultSettings.maxMessagesPerRequestMax}}" step="{{defaultSettings.maxMessagesPerRequestStep}}" />

View File

@ -204,6 +204,7 @@ export class QuickReplyApi {
* @param {boolean} [props.executeOnAi] whether to execute the quick reply after the AI has sent a message
* @param {boolean} [props.executeOnChatChange] whether to execute the quick reply when a new chat is loaded
* @param {boolean} [props.executeOnGroupMemberDraft] whether to execute the quick reply when a group member is selected
* @param {boolean} [props.executeOnNewChat] whether to execute the quick reply when a new chat is created
* @param {string} [props.automationId] when not empty, the quick reply will be executed when the WI with the given automation ID is activated
* @returns {QuickReply} the new quick reply
*/
@ -218,6 +219,7 @@ export class QuickReplyApi {
executeOnAi,
executeOnChatChange,
executeOnGroupMemberDraft,
executeOnNewChat,
automationId,
} = {}) {
const set = this.getSetByName(setName);
@ -236,6 +238,7 @@ export class QuickReplyApi {
qr.executeOnAi = executeOnAi ?? false;
qr.executeOnChatChange = executeOnChatChange ?? false;
qr.executeOnGroupMemberDraft = executeOnGroupMemberDraft ?? false;
qr.executeOnNewChat = executeOnNewChat ?? false;
qr.automationId = automationId ?? '';
qr.onUpdate();
return qr;
@ -258,6 +261,7 @@ export class QuickReplyApi {
* @param {boolean} [props.executeOnAi] whether to execute the quick reply after the AI has sent a message
* @param {boolean} [props.executeOnChatChange] whether to execute the quick reply when a new chat is loaded
* @param {boolean} [props.executeOnGroupMemberDraft] whether to execute the quick reply when a group member is selected
* @param {boolean} [props.executeOnNewChat] whether to execute the quick reply when a new chat is created
* @param {string} [props.automationId] when not empty, the quick reply will be executed when the WI with the given automation ID is activated
* @returns {QuickReply} the altered quick reply
*/
@ -273,6 +277,7 @@ export class QuickReplyApi {
executeOnAi,
executeOnChatChange,
executeOnGroupMemberDraft,
executeOnNewChat,
automationId,
} = {}) {
const qr = this.getQrByLabel(setName, label);
@ -290,6 +295,7 @@ export class QuickReplyApi {
qr.executeOnAi = executeOnAi ?? qr.executeOnAi;
qr.executeOnChatChange = executeOnChatChange ?? qr.executeOnChatChange;
qr.executeOnGroupMemberDraft = executeOnGroupMemberDraft ?? qr.executeOnGroupMemberDraft;
qr.executeOnNewChat = executeOnNewChat ?? qr.executeOnNewChat;
qr.automationId = automationId ?? qr.automationId;
qr.onUpdate();
return qr;

View File

@ -108,6 +108,10 @@
<input type="checkbox" id="qr--executeOnChatChange" >
<span><i class="fa-solid fa-fw fa-message"></i><span data-i18n="Execute on chat change">Execute on chat change</span></span>
</label>
<label class="checkbox_label">
<input type="checkbox" id="qr--executeOnNewChat">
<span><i class="fa-solid fa-fw fa-comments"></i><span data-i18n="Execute on new chat">Execute on new chat</span></span>
</label>
<label class="checkbox_label">
<input type="checkbox" id="qr--executeOnGroupMemberDraft">
<span><i class="fa-solid fa-fw fa-people-group"></i><span data-i18n="Execute on group member draft">Execute on group member draft</span></span>

View File

@ -11,6 +11,9 @@
<label class="flex-container">
<input type="checkbox" id="qr--isCombined"><span data-i18n="Combine Quick Replies">Combine Quick Replies</span>
</label>
<label class="flex-container">
<input type="checkbox" id="qr--showPopoutButton"><span data-i18n="Show Popout Button">Show Popout Button</span>
</label>
<hr>

View File

@ -105,6 +105,7 @@ const loadSets = async () => {
qr.executeOnAi = slot.autoExecute_botMessage ?? false;
qr.executeOnChatChange = slot.autoExecute_chatLoad ?? false;
qr.executeOnGroupMemberDraft = slot.autoExecute_groupMemberDraft ?? false;
qr.executeOnNewChat = slot.autoExecute_newChat ?? false;
qr.automationId = slot.automationId ?? '';
qr.contextList = (slot.contextMenu ?? []).map(it=>({
set: it.preset,
@ -260,3 +261,8 @@ const onWIActivation = async (entries) => {
await autoExec.handleWIActivation(entries);
};
eventSource.on(event_types.WORLD_INFO_ACTIVATED, (...args) => executeIfReadyElseQueue(onWIActivation, args));
const onNewChat = async () => {
await autoExec.handleNewChat();
};
eventSource.on(event_types.CHAT_CREATED, (...args) => executeIfReadyElseQueue(onNewChat, args));

View File

@ -83,6 +83,15 @@ export class AutoExecuteHandler {
await this.performAutoExecute(qrList);
}
async handleNewChat() {
if (!this.checkExecute()) return;
const qrList = [
...this.settings.config.setList.map(link=>link.set.qrList.filter(qr=>qr.executeOnNewChat)).flat(),
...(this.settings.chatConfig?.setList?.map(link=>link.set.qrList.filter(qr=>qr.executeOnNewChat))?.flat() ?? []),
];
await this.performAutoExecute(qrList);
}
/**
* @param {any[]} entries Set of activated entries
*/

View File

@ -44,6 +44,7 @@ export class QuickReply {
/**@type {boolean}*/ executeOnAi = false;
/**@type {boolean}*/ executeOnChatChange = false;
/**@type {boolean}*/ executeOnGroupMemberDraft = false;
/**@type {boolean}*/ executeOnNewChat = false;
/**@type {string}*/ automationId = '';
/**@type {function}*/ onExecute;
@ -1061,6 +1062,13 @@ export class QuickReply {
this.updateContext();
});
/**@type {HTMLInputElement}*/
const executeOnNewChat = dom.querySelector('#qr--executeOnNewChat');
executeOnNewChat.checked = this.executeOnNewChat;
executeOnNewChat.addEventListener('click', ()=>{
this.executeOnNewChat = executeOnNewChat.checked;
this.updateContext();
});
/**@type {HTMLInputElement}*/
const automationId = dom.querySelector('#qr--automationId');
automationId.value = this.automationId;
automationId.addEventListener('input', () => {

View File

@ -16,6 +16,7 @@ export class QuickReplySettings {
/**@type {Boolean}*/ isEnabled = false;
/**@type {Boolean}*/ isCombined = false;
/**@type {Boolean}*/ isPopout = false;
/**@type {Boolean}*/ showPopoutButton = true;
/**@type {QuickReplyConfig}*/ config;
/**@type {QuickReplyConfig}*/ _chatConfig;
get chatConfig() {
@ -79,6 +80,7 @@ export class QuickReplySettings {
isEnabled: this.isEnabled,
isCombined: this.isCombined,
isPopout: this.isPopout,
showPopoutButton: this.showPopoutButton,
config: this.config,
};
}

View File

@ -69,17 +69,20 @@ export class ButtonUi {
root.id = 'qr--bar';
root.classList.add('flex-container');
root.classList.add('flexGap5');
const popout = document.createElement('div'); {
popout.id = 'qr--popoutTrigger';
popout.classList.add('menu_button');
popout.classList.add('fa-solid');
popout.classList.add('fa-window-restore');
popout.addEventListener('click', ()=>{
this.settings.isPopout = true;
this.refresh();
this.settings.save();
});
root.append(popout);
if (this.settings.showPopoutButton) {
root.classList.add('popoutVisible');
const popout = document.createElement('div'); {
popout.id = 'qr--popoutTrigger';
popout.classList.add('menu_button');
popout.classList.add('fa-solid');
popout.classList.add('fa-window-restore');
popout.addEventListener('click', ()=>{
this.settings.isPopout = true;
this.refresh();
this.settings.save();
});
root.append(popout);
}
}
if (this.settings.isCombined) {
const buttons = document.createElement('div'); {

View File

@ -14,6 +14,7 @@ export class SettingsUi {
/**@type {HTMLInputElement}*/ isEnabled;
/**@type {HTMLInputElement}*/ isCombined;
/**@type {HTMLInputElement}*/ showPopoutButton;
/**@type {HTMLElement}*/ globalSetList;
@ -79,6 +80,10 @@ export class SettingsUi {
this.isCombined = this.dom.querySelector('#qr--isCombined');
this.isCombined.checked = this.settings.isCombined;
this.isCombined.addEventListener('click', ()=>this.onIsCombined());
this.showPopoutButton = this.dom.querySelector('#qr--showPopoutButton');
this.showPopoutButton.checked = this.settings.showPopoutButton;
this.showPopoutButton.addEventListener('click', ()=>this.onShowPopoutButton());
}
prepareGlobalSetList() {
@ -235,6 +240,11 @@ export class SettingsUi {
this.settings.save();
}
async onShowPopoutButton() {
this.settings.showPopoutButton = this.showPopoutButton.checked;
this.settings.save();
}
async onGlobalSetListSort() {
this.settings.config.setList = Array.from(this.globalSetList.children).map((it,idx)=>{
const set = this.settings.config.setList[Number(it.getAttribute('data-order'))];

View File

@ -27,7 +27,6 @@
max-width: 100%;
overflow-x: auto;
order: 1;
padding-right: 2.5em;
position: relative;
}
#qr--bar > #qr--popoutTrigger {
@ -35,6 +34,9 @@
right: 0.25em;
top: 0;
}
#qr--bar.popoutVisible {
padding-right: 2.5em;
}
#qr--popout {
display: flex;
flex-direction: column;

View File

@ -25,7 +25,6 @@
max-width: 100%;
overflow-x: auto;
order: 1;
padding-right: 2.5em;
position: relative;
>#qr--popoutTrigger {
@ -34,6 +33,9 @@
top: 0;
}
}
#qr--bar.popoutVisible {
padding-right: 2.5em;
}
#qr--popout {
display: flex;

View File

@ -54,12 +54,7 @@
<small data-i18n="Replace With">Replace With</small>
</label>
<div>
<textarea
class="regex_replace_string text_pole wide100p textarea_compact"
data-i18n="[placeholder]ext_regex_replace_string_placeholder"
placeholder="Use {{match}} to include the matched text from the Find Regex or $1, $2, etc. for capture groups."
rows="2"
></textarea>
<textarea class="regex_replace_string text_pole wide100p textarea_compact" data-i18n="[placeholder]ext_regex_replace_string_placeholder" placeholder="Use {{match}} to include the matched text from the Find Regex or $1, $2, etc. for capture groups." rows="2"></textarea>
</div>
</div>
<div class="flex1">
@ -67,11 +62,7 @@
<small data-i18n="Trim Out">Trim Out</small>
</label>
<div>
<textarea
class="regex_trim_strings text_pole wide100p textarea_compact" data-i18n="[placeholder]ext_regex_trim_placeholder"
placeholder="Globally trims any unwanted parts from a regex match before replacement. Separate each element by an enter."
rows="3"
></textarea>
<textarea class="regex_trim_strings text_pole wide100p textarea_compact" data-i18n="[placeholder]ext_regex_trim_placeholder" placeholder="Globally trims any unwanted parts from a regex match before replacement. Separate each element by an enter." rows="3"></textarea>
</div>
</div>
</div>
@ -126,17 +117,6 @@
<input type="checkbox" name="disabled" />
<span data-i18n="Disabled">Disabled</span>
</label>
<label class="checkbox flex-container" title="Chat history won't change, only the message rendered in the UI.">
<input type="checkbox" name="only_format_display" />
<span data-i18n="Only Format Display">Only Format Display</span>
</label>
<label class="checkbox flex-container" data-i18n="[title]ext_regex_only_format_prompt_desc" title="Chat history won't change, only the prompt as the request is sent (on generation).">
<input type="checkbox" name="only_format_prompt"/>
<span>
<span data-i18n="Only Format Prompt (?)">Only Format Prompt</span>
<span class="fa-solid fa-circle-question note-link-span"></span>
</span>
</label>
<label class="checkbox flex-container">
<input type="checkbox" name="run_on_edit" />
<span data-i18n="Run On Edit">Run On Edit</span>
@ -148,6 +128,19 @@
<span class="fa-solid fa-circle-question note-link-span"></span>
</span>
</label>
<span>
<small data-i18n="ext_regex_other_options" data-i18n="Ephemerality">Ephemerality</small>
<span class="fa-solid fa-circle-question note-link-span" title="By default, regex scripts alter the chat file directly and irreversibly.&#13;Enabling either (or both) of the options below will prevent chat file alteration, while still altering the specified item(s)."></span>
</span>
<label class="checkbox flex-container" title="Chat history file contents won't change, but regex will be applied to the messages displayed in the Chat UI.">
<input type="checkbox" name="only_format_display" />
<span data-i18n="Only Format Display">Alter Chat Display</span>
</label>
<label class="checkbox flex-container" data-i18n="[title]ext_regex_only_format_prompt_desc" title="Chat history file contents won't change, but regex will be applied to the outgoing prompt before it is sent to the LLM.">
<input type="checkbox" name="only_format_prompt" />
<span data-i18n="Only Format Prompt (?)">Alter Outgoing Prompt</span>
</label>
</div>
</div>
</div>

View File

@ -1,5 +1,5 @@
import { getRequestHeaders } from '../../script.js';
import { extension_settings } from '../extensions.js';
import { extension_settings, openThirdPartyExtensionMenu } from '../extensions.js';
import { oai_settings } from '../openai.js';
import { SECRET_KEYS, secret_state } from '../secrets.js';
import { textgen_types, textgenerationwebui_settings } from '../textgen-settings.js';
@ -141,7 +141,7 @@ function throwIfInvalidModel(useReverseProxy) {
}
if (extension_settings.caption.multimodal_api === 'google' && !secret_state[SECRET_KEYS.MAKERSUITE] && !useReverseProxy) {
throw new Error('MakerSuite API key is not set.');
throw new Error('Google AI Studio API key is not set.');
}
if (extension_settings.caption.multimodal_api === 'ollama' && !textgenerationwebui_settings.server_urls[textgen_types.OLLAMA]) {
@ -176,3 +176,86 @@ function throwIfInvalidModel(useReverseProxy) {
throw new Error('Custom API URL is not set.');
}
}
/**
* Check if the WebLLM extension is installed and supported.
* @returns {boolean} Whether the extension is installed and supported
*/
export function isWebLlmSupported() {
if (!('gpu' in navigator)) {
const warningKey = 'webllm_browser_warning_shown';
if (!sessionStorage.getItem(warningKey)) {
toastr.error('Your browser does not support the WebGPU API. Please use a different browser.', 'WebLLM', {
preventDuplicates: true,
timeOut: 0,
extendedTimeOut: 0,
});
sessionStorage.setItem(warningKey, '1');
}
return false;
}
if (!('llm' in SillyTavern)) {
const warningKey = 'webllm_extension_warning_shown';
if (!sessionStorage.getItem(warningKey)) {
toastr.error('WebLLM extension is not installed. Click here to install it.', 'WebLLM', {
timeOut: 0,
extendedTimeOut: 0,
preventDuplicates: true,
onclick: () => openThirdPartyExtensionMenu('https://github.com/SillyTavern/Extension-WebLLM'),
});
sessionStorage.setItem(warningKey, '1');
}
return false;
}
return true;
}
/**
* Generates text in response to a chat prompt using WebLLM.
* @param {any[]} messages Messages to use for generating
* @param {object} params Additional parameters
* @returns {Promise<string>} Generated response
*/
export async function generateWebLlmChatPrompt(messages, params = {}) {
if (!isWebLlmSupported()) {
throw new Error('WebLLM extension is not installed.');
}
console.debug('WebLLM chat completion request:', messages, params);
const engine = SillyTavern.llm;
const response = await engine.generateChatPrompt(messages, params);
console.debug('WebLLM chat completion response:', response);
return response;
}
/**
* Counts the number of tokens in the provided text using WebLLM's default model.
* @param {string} text Text to count tokens in
* @returns {Promise<number>} Number of tokens in the text
*/
export async function countWebLlmTokens(text) {
if (!isWebLlmSupported()) {
throw new Error('WebLLM extension is not installed.');
}
const engine = SillyTavern.llm;
const response = await engine.countTokens(text);
return response;
}
/**
* Gets the size of the context in the WebLLM's default model.
* @returns {Promise<number>} Size of the context in the WebLLM model
*/
export async function getWebLlmContextSize() {
if (!isWebLlmSupported()) {
throw new Error('WebLLM extension is not installed.');
}
const engine = SillyTavern.llm;
await engine.loadModel();
const model = await engine.getCurrentModelInfo();
return model?.context_size;
}

View File

@ -30,7 +30,7 @@ import { SlashCommand } from '../../slash-commands/SlashCommand.js';
import { ARGUMENT_TYPE, SlashCommandArgument, SlashCommandNamedArgument } from '../../slash-commands/SlashCommandArgument.js';
import { debounce_timeout } from '../../constants.js';
import { SlashCommandEnumValue } from '../../slash-commands/SlashCommandEnumValue.js';
import { POPUP_TYPE, Popup, callGenericPopup } from '../../popup.js';
import { POPUP_RESULT, POPUP_TYPE, Popup, callGenericPopup } from '../../popup.js';
export { MODULE_NAME };
const MODULE_NAME = 'sd';
@ -51,6 +51,8 @@ const sources = {
drawthings: 'drawthings',
pollinations: 'pollinations',
stability: 'stability',
blockentropy: 'blockentropy',
huggingface: 'huggingface',
};
const initiators = {
@ -58,6 +60,7 @@ const initiators = {
action: 'action',
interactive: 'interactive',
wand: 'wand',
swipe: 'swipe',
};
const generationMode = {
@ -452,6 +455,7 @@ async function loadSettings() {
$('#sd_command_visible').prop('checked', extension_settings.sd.command_visible);
$('#sd_interactive_visible').prop('checked', extension_settings.sd.interactive_visible);
$('#sd_stability_style_preset').val(extension_settings.sd.stability_style_preset);
$('#sd_huggingface_model_id').val(extension_settings.sd.huggingface_model_id);
for (const style of extension_settings.sd.styles) {
const option = document.createElement('option');
@ -1089,6 +1093,11 @@ function onComfyUrlInput() {
saveSettingsDebounced();
}
function onHFModelInput() {
extension_settings.sd.huggingface_model_id = $('#sd_huggingface_model_id').val();
saveSettingsDebounced();
}
function onComfyWorkflowChange() {
extension_settings.sd.comfy_workflow = $('#sd_comfy_workflow').find(':selected').val();
saveSettingsDebounced();
@ -1096,7 +1105,18 @@ function onComfyWorkflowChange() {
async function onStabilityKeyClick() {
const popupText = 'Stability AI API Key:';
const key = await callGenericPopup(popupText, POPUP_TYPE.INPUT);
const key = await callGenericPopup(popupText, POPUP_TYPE.INPUT, '', {
customButtons: [{
text: 'Remove Key',
appendAtEnd: true,
result: POPUP_RESULT.NEGATIVE,
action: async () => {
await writeSecret(SECRET_KEYS.STABILITY, '');
toastr.success('API Key removed');
await loadSettingOptions();
},
}],
});
if (!key) {
return;
@ -1222,7 +1242,16 @@ async function onModelChange() {
extension_settings.sd.model = $('#sd_model').find(':selected').val();
saveSettingsDebounced();
const cloudSources = [sources.horde, sources.novel, sources.openai, sources.togetherai, sources.pollinations, sources.stability];
const cloudSources = [
sources.horde,
sources.novel,
sources.openai,
sources.togetherai,
sources.pollinations,
sources.stability,
sources.blockentropy,
sources.huggingface,
];
if (cloudSources.includes(extension_settings.sd.source)) {
return;
@ -1434,6 +1463,12 @@ async function loadSamplers() {
case sources.stability:
samplers = ['N/A'];
break;
case sources.blockentropy:
samplers = ['N/A'];
break;
case sources.huggingface:
samplers = ['N/A'];
break;
}
for (const sampler of samplers) {
@ -1620,6 +1655,12 @@ async function loadModels() {
case sources.stability:
models = await loadStabilityModels();
break;
case sources.blockentropy:
models = await loadBlockEntropyModels();
break;
case sources.huggingface:
models = [{ value: '', text: '<Enter Model ID above>' }];
break;
}
for (const model of models) {
@ -1649,49 +1690,13 @@ async function loadStabilityModels() {
async function loadPollinationsModels() {
return [
{
value: 'pixart',
text: 'PixArt-αlpha',
},
{
value: 'playground',
text: 'Playground v2',
},
{
value: 'dalle3xl',
text: 'DALL•E 3 XL',
},
{
value: 'formulaxl',
text: 'FormulaXL',
},
{
value: 'dreamshaper',
text: 'DreamShaper',
},
{
value: 'deliberate',
text: 'Deliberate',
},
{
value: 'dpo',
text: 'SDXL-DPO',
},
{
value: 'swizz8',
text: 'Swizz8',
},
{
value: 'juggernaut',
text: 'Juggernaut',
value: 'flux',
text: 'FLUX.1 [schnell]',
},
{
value: 'turbo',
text: 'SDXL Turbo',
},
{
value: 'realvis',
text: 'Realistic Vision',
},
];
}
@ -1714,6 +1719,26 @@ async function loadTogetherAIModels() {
return [];
}
async function loadBlockEntropyModels() {
if (!secret_state[SECRET_KEYS.BLOCKENTROPY]) {
console.debug('Block Entropy API key is not set.');
return [];
}
const result = await fetch('/api/sd/blockentropy/models', {
method: 'POST',
headers: getRequestHeaders(),
});
console.log(result);
if (result.ok) {
const data = await result.json();
console.log(data);
return data;
}
return [];
}
async function loadHordeModels() {
const result = await fetch('/api/horde/sd-models', {
method: 'POST',
@ -1980,6 +2005,12 @@ async function loadSchedulers() {
case sources.stability:
schedulers = ['N/A'];
break;
case sources.blockentropy:
schedulers = ['N/A'];
break;
case sources.huggingface:
schedulers = ['N/A'];
break;
}
for (const scheduler of schedulers) {
@ -2056,6 +2087,12 @@ async function loadVaes() {
case sources.stability:
vaes = ['N/A'];
break;
case sources.blockentropy:
vaes = ['N/A'];
break;
case sources.huggingface:
vaes = ['N/A'];
break;
}
for (const vae of vaes) {
@ -2267,9 +2304,9 @@ async function generatePicture(initiator, args, trigger, message, callback) {
const quietPrompt = getQuietPrompt(generationType, trigger);
const context = getContext();
// if context.characterId is not null, then we get context.characters[context.characterId].avatar, else we get groupId and context.groups[groupId].id
// sadly, groups is not an array, but is a dict with keys being index numbers, so we have to filter it
const characterName = context.characterId ? context.characters[context.characterId].name : context.groups[Object.keys(context.groups).filter(x => context.groups[x].id === context.groupId)[0]]?.id?.toString();
const characterName = context.groupId
? context.groups[Object.keys(context.groups).filter(x => context.groups[x].id === context.groupId)[0]]?.id?.toString()
: context.characters[context.characterId]?.name;
if (generationType == generationMode.BACKGROUND) {
const callbackOriginal = callback;
@ -2584,6 +2621,12 @@ async function sendGenerationRequest(generationType, prompt, additionalNegativeP
case sources.stability:
result = await generateStabilityImage(prefixedPrompt, negativePrompt, signal);
break;
case sources.blockentropy:
result = await generateBlockEntropyImage(prefixedPrompt, negativePrompt, signal);
break;
case sources.huggingface:
result = await generateHuggingFaceImage(prefixedPrompt, signal);
break;
}
if (!result.data) {
@ -2639,6 +2682,40 @@ async function generateTogetherAIImage(prompt, negativePrompt, signal) {
}
}
async function generateBlockEntropyImage(prompt, negativePrompt, signal) {
const result = await fetch('/api/sd/blockentropy/generate', {
method: 'POST',
headers: getRequestHeaders(),
signal: signal,
body: JSON.stringify({
prompt: prompt,
negative_prompt: negativePrompt,
model: extension_settings.sd.model,
steps: extension_settings.sd.steps,
width: extension_settings.sd.width,
height: extension_settings.sd.height,
seed: extension_settings.sd.seed >= 0 ? extension_settings.sd.seed : undefined,
}),
});
if (result.ok) {
const data = await result.json();
// Default format is 'jpg'
let format = 'jpg';
// Check if a format is specified in the result
if (data.format) {
format = data.format.toLowerCase();
}
return { format: format, data: data.images[0] };
} else {
const text = await result.text();
throw new Error(text);
}
}
/**
* Generates an image using the Pollinations API.
* @param {string} prompt - The main instruction used to guide the image generation.
@ -3183,6 +3260,34 @@ async function generateComfyImage(prompt, negativePrompt, signal) {
return { format: 'png', data: await promptResult.text() };
}
/**
* Generates an image in Hugging Face Inference API using the provided prompt and configuration settings (model selected).
* @param {string} prompt - The main instruction used to guide the image generation.
* @param {AbortSignal} signal - An AbortSignal object that can be used to cancel the request.
* @returns {Promise<{format: string, data: string}>} - A promise that resolves when the image generation and processing are complete.
*/
async function generateHuggingFaceImage(prompt, signal) {
const result = await fetch('/api/sd/huggingface/generate', {
method: 'POST',
headers: getRequestHeaders(),
signal: signal,
body: JSON.stringify({
model: extension_settings.sd.huggingface_model_id,
prompt: prompt,
}),
});
if (result.ok) {
const data = await result.json();
return { format: 'jpg', data: data.image };
} else {
const text = await result.text();
throw new Error(text);
}
}
async function onComfyOpenWorkflowEditorClick() {
let workflow = await (await fetch('/api/sd/comfy/workflow', {
method: 'POST',
@ -3348,11 +3453,15 @@ async function sendMessage(prompt, image, generationType, additionalNegativePref
generationType: generationType,
negative: additionalNegativePrefix,
inline_image: false,
image_swipes: [image],
},
};
context.chat.push(message);
const messageId = context.chat.length - 1;
await eventSource.emit(event_types.MESSAGE_RECEIVED, messageId);
context.addOneMessage(message);
context.saveChat();
await eventSource.emit(event_types.CHARACTER_MESSAGE_RENDERED, messageId);
await context.saveChat();
}
/**
@ -3396,7 +3505,7 @@ async function addSDGenButtons() {
$(document).on('click touchend', function (e) {
const target = $(e.target);
if (target.is(dropdown) || target.closest(dropdown).length) return;
if ((target.is(button) || target.closest(button).length) && !dropdown.is(':visible') && $('#send_but').is(':visible')) {
if ((target.is(button) || target.closest(button).length) && !dropdown.is(':visible')) {
e.preventDefault();
dropdown.fadeIn(animation_duration);
@ -3456,6 +3565,10 @@ function isValidState() {
return true;
case sources.stability:
return secret_state[SECRET_KEYS.STABILITY];
case sources.blockentropy:
return secret_state[SECRET_KEYS.BLOCKENTROPY];
case sources.huggingface:
return secret_state[SECRET_KEYS.HUGGINGFACE];
}
}
@ -3485,7 +3598,9 @@ async function sdMessageButton(e) {
const $mes = $icon.closest('.mes');
const message_id = $mes.attr('mesid');
const message = context.chat[message_id];
const characterFileName = context.characterId ? context.characters[context.characterId].name : context.groups[Object.keys(context.groups).filter(x => context.groups[x].id === context.groupId)[0]]?.id?.toString();
const characterFileName = context.groupId
? context.groups[Object.keys(context.groups).filter(x => context.groups[x].id === context.groupId)[0]]?.id?.toString()
: context.characters[context.characterId]?.name;
const messageText = message?.mes;
const hasSavedImage = message?.extra?.image && message?.extra?.title;
const hasSavedNegative = message?.extra?.negative;
@ -3529,10 +3644,23 @@ async function sdMessageButton(e) {
function saveGeneratedImage(prompt, image, generationType, negative) {
// Some message sources may not create the extra object
if (typeof message.extra !== 'object') {
if (typeof message.extra !== 'object' || message.extra === null) {
message.extra = {};
}
// Add image to the swipe list if it's not already there
if (!Array.isArray(message.extra.image_swipes)) {
message.extra.image_swipes = [];
}
const swipes = message.extra.image_swipes;
if (message.extra.image && !swipes.includes(message.extra.image)) {
swipes.push(message.extra.image);
}
swipes.push(image);
// If already contains an image and it's not inline - leave it as is
message.extra.inline_image = message.extra.image && !message.extra.inline_image ? false : true;
message.extra.image = image;
@ -3571,6 +3699,99 @@ async function writePromptFields(characterId) {
await writeExtensionField(characterId, 'sd_character_prompt', promptObject);
}
/**
* Switches an image to the next or previous one in the swipe list.
* @param {object} args Event arguments
* @param {any} args.message Message object
* @param {JQuery<HTMLElement>} args.element Message element
* @param {string} args.direction Swipe direction
* @returns {Promise<void>}
*/
async function onImageSwiped({ message, element, direction }) {
const context = getContext();
const animationClass = 'fa-fade';
const messageImg = element.find('.mes_img');
// Current image is already animating
if (messageImg.hasClass(animationClass)) {
return;
}
const swipes = message?.extra?.image_swipes;
if (!Array.isArray(swipes)) {
console.warn('No image swipes found in the message');
return;
}
const currentIndex = swipes.indexOf(message.extra.image);
if (currentIndex === -1) {
console.warn('Current image not found in the swipes');
return;
}
// Switch to previous image or wrap around if at the beginning
if (direction === 'left') {
const newIndex = currentIndex === 0 ? swipes.length - 1 : currentIndex - 1;
message.extra.image = swipes[newIndex];
// Update the image in the message
appendMediaToMessage(message, element, false);
}
// Switch to next image or generate a new one if at the end
if (direction === 'right') {
const newIndex = currentIndex === swipes.length - 1 ? swipes.length : currentIndex + 1;
if (newIndex === swipes.length) {
const abortController = new AbortController();
const swipeControls = element.find('.mes_img_swipes');
const stopButton = document.getElementById('sd_stop_gen');
const stopListener = () => abortController.abort('Aborted by user');
const generationType = message?.extra?.generationType ?? generationMode.FREE;
const dimensions = setTypeSpecificDimensions(generationType);
const originalSeed = extension_settings.sd.seed;
extension_settings.sd.seed = Math.round(Math.random() * Number.MAX_SAFE_INTEGER);
let imagePath = '';
try {
$(stopButton).show();
eventSource.once(CUSTOM_STOP_EVENT, stopListener);
const callback = () => { };
const hasNegative = message.extra.negative;
const prompt = await refinePrompt(message.extra.title, false, false);
const negativePromptPrefix = hasNegative ? await refinePrompt(message.extra.negative, false, true) : '';
const characterName = context.groupId
? context.groups[Object.keys(context.groups).filter(x => context.groups[x].id === context.groupId)[0]]?.id?.toString()
: context.characters[context.characterId]?.name;
messageImg.addClass(animationClass);
swipeControls.hide();
imagePath = await sendGenerationRequest(generationType, prompt, negativePromptPrefix, characterName, callback, initiators.swipe, abortController.signal);
} finally {
$(stopButton).hide();
messageImg.removeClass(animationClass);
swipeControls.show();
eventSource.removeListener(CUSTOM_STOP_EVENT, stopListener);
restoreOriginalDimensions(dimensions);
extension_settings.sd.seed = originalSeed;
}
if (!imagePath) {
return;
}
swipes.push(imagePath);
}
message.extra.image = swipes[newIndex];
appendMediaToMessage(message, element, false);
}
await context.saveChat();
}
jQuery(async () => {
await addSDGenButtons();
@ -3688,6 +3909,7 @@ jQuery(async () => {
$('#sd_swap_dimensions').on('click', onSwapDimensionsClick);
$('#sd_stability_key').on('click', onStabilityKeyClick);
$('#sd_stability_style_preset').on('change', onStabilityStylePresetChange);
$('#sd_huggingface_model_id').on('input', onHFModelInput);
$('.sd_settings .inline-drawer-toggle').on('click', function () {
initScrollHeight($('#sd_prompt_prefix'));
@ -3709,6 +3931,8 @@ jQuery(async () => {
}
});
eventSource.on(event_types.IMAGE_SWIPED, onImageSwiped);
eventSource.on(event_types.CHAT_CHANGED, onChatChanged);
await loadSettings();

View File

@ -29,7 +29,8 @@
</label>
<label for="sd_expand" class="checkbox_label" data-i18n="[title]sd_expand" title="Automatically extend prompts using text generation model">
<input id="sd_expand" type="checkbox" />
<span data-i18n="sd_expand_txt">Auto-enhance prompts</span>
<span data-i18n="sd_expand_txt">Auto-extend prompts</span>
<span class="right_menu_button fa-solid fa-triangle-exclamation" data-i18n="[title]sd_expand_warning" title="May produce unexpected results. Manual prompt editing is recommended."></span>
</label>
<label for="sd_snap" class="checkbox_label" data-i18n="[title]sd_snap" title="Snap generation requests with a forced aspect ratio (portraits, backgrounds) to the nearest known resolution, while trying to preserve the absolute pixel counts (recommended for SDXL).">
<input id="sd_snap" type="checkbox" />
@ -37,9 +38,11 @@
</label>
<label for="sd_source" data-i18n="Source">Source</label>
<select id="sd_source">
<option value="blockentropy">Block Entropy</option>
<option value="comfy">ComfyUI</option>
<option value="drawthings">DrawThings HTTP API</option>
<option value="extras">Extras API (local / remote)</option>
<option value="huggingface">HuggingFace Inference API (serverless)</option>
<option value="novel">NovelAI Diffusion</option>
<option value="openai">OpenAI (DALL-E)</option>
<option value="pollinations">Pollinations</option>
@ -81,6 +84,11 @@
<!-- (Original Text)<b>Important:</b> run DrawThings app with HTTP API switch enabled in the UI! The server must be accessible from the SillyTavern host machine. -->
<i><b data-i18n="Important:">Important:</b></i><i data-i18n="sd_drawthings_auth_txt"> run DrawThings app with HTTP API switch enabled in the UI! The server must be accessible from the SillyTavern host machine.</i>
</div>
<div data-sd-source="huggingface">
<i>Hint: Save an API key in the Hugging Face (Text Completion) API settings to use it here.</i>
<label for="sd_huggingface_model_id" data-i18n="Model ID">Model ID</label>
<input id="sd_huggingface_model_id" type="text" class="text_pole" data-i18n="[placeholder]e.g. black-forest-labs/FLUX.1-dev" placeholder="e.g. black-forest-labs/FLUX.1-dev" value="" />
</div>
<div data-sd-source="vlad">
<label for="sd_vlad_url">SD.Next API URL</label>
<div class="flex-container flexnowrap">
@ -378,7 +386,7 @@
</label>
</div>
<div data-sd-source="novel,togetherai,pollinations,comfy,drawthings,vlad,auto,horde,extras,stability" class="marginTop5">
<div data-sd-source="novel,togetherai,pollinations,comfy,drawthings,vlad,auto,horde,extras,stability,blockentropy" class="marginTop5">
<label for="sd_seed">
<span data-i18n="Seed">Seed</span>
<small data-i18n="(-1 for random)">(-1 for random)</small>

View File

@ -10,7 +10,7 @@ import {
updateMessageBlock,
} from '../../../script.js';
import { extension_settings, getContext, renderExtensionTemplateAsync } from '../../extensions.js';
import { POPUP_TYPE, callGenericPopup } from '../../popup.js';
import { POPUP_RESULT, POPUP_TYPE, callGenericPopup } from '../../popup.js';
import { findSecret, secret_state, writeSecret } from '../../secrets.js';
import { SlashCommand } from '../../slash-commands/SlashCommand.js';
import { ARGUMENT_TYPE, SlashCommandArgument, SlashCommandNamedArgument } from '../../slash-commands/SlashCommandArgument.js';
@ -621,7 +621,18 @@ jQuery(async () => {
const secretKey = extension_settings.translate.provider + '_url';
const savedUrl = secret_state[secretKey] ? await findSecret(secretKey) : '';
const url = await callGenericPopup(popupText, POPUP_TYPE.INPUT, savedUrl);
const url = await callGenericPopup(popupText, POPUP_TYPE.INPUT, savedUrl,{
customButtons: [{
text: 'Remove URL',
appendAtEnd: true,
result: POPUP_RESULT.NEGATIVE,
action: async () => {
await writeSecret(secretKey, '');
toastr.success('API URL removed');
$('#translate_url_button').toggleClass('success', !!secret_state[secretKey]);
},
}],
});
if (url == false || url == '') {
return;

View File

@ -1,5 +1,5 @@
import { getRequestHeaders } from '../../../script.js';
import { POPUP_TYPE, callGenericPopup } from '../../popup.js';
import { POPUP_RESULT, POPUP_TYPE, callGenericPopup } from '../../popup.js';
import { SECRET_KEYS, findSecret, secret_state, writeSecret } from '../../secrets.js';
import { getPreviewString, saveTtsProviderSettings } from './index.js';
export { AzureTtsProvider };
@ -70,7 +70,19 @@ class AzureTtsProvider {
const popupText = 'Azure TTS API Key';
const savedKey = secret_state[SECRET_KEYS.AZURE_TTS] ? await findSecret(SECRET_KEYS.AZURE_TTS) : '';
const key = await callGenericPopup(popupText, POPUP_TYPE.INPUT, savedKey);
const key = await callGenericPopup(popupText, POPUP_TYPE.INPUT, savedKey, {
customButtons: [{
text: 'Remove Key',
appendAtEnd: true,
result: POPUP_RESULT.NEGATIVE,
action: async () => {
await writeSecret(SECRET_KEYS.AZURE_TTS, '');
$('#azure_tts_key').toggleClass('success', secret_state[SECRET_KEYS.AZURE_TTS]);
toastr.success('API Key removed');
await this.onRefreshClick();
},
}],
});
if (key == false || key == '') {
return;

View File

@ -9,6 +9,7 @@ import { SystemTtsProvider } from './system.js';
import { NovelTtsProvider } from './novel.js';
import { power_user } from '../../power-user.js';
import { OpenAITtsProvider } from './openai.js';
import { OpenAICompatibleTtsProvider } from './openai-compatible.js';
import { XTTSTtsProvider } from './xtts.js';
import { VITSTtsProvider } from './vits.js';
import { GSVITtsProvider } from './gsvi.js';
@ -82,20 +83,21 @@ export function getPreviewString(lang) {
}
const ttsProviders = {
ElevenLabs: ElevenLabsTtsProvider,
Silero: SileroTtsProvider,
XTTSv2: XTTSTtsProvider,
VITS: VITSTtsProvider,
GSVI: GSVITtsProvider,
SBVits2: SBVits2TtsProvider,
System: SystemTtsProvider,
AllTalk: AllTalkTtsProvider,
Azure: AzureTtsProvider,
Coqui: CoquiTtsProvider,
Edge: EdgeTtsProvider,
ElevenLabs: ElevenLabsTtsProvider,
GSVI: GSVITtsProvider,
Novel: NovelTtsProvider,
OpenAI: OpenAITtsProvider,
AllTalk: AllTalkTtsProvider,
'OpenAI Compatible': OpenAICompatibleTtsProvider,
SBVits2: SBVits2TtsProvider,
Silero: SileroTtsProvider,
SpeechT5: SpeechT5TtsProvider,
Azure: AzureTtsProvider,
System: SystemTtsProvider,
VITS: VITSTtsProvider,
XTTSv2: XTTSTtsProvider,
};
let ttsProvider;
let ttsProviderName;
@ -753,6 +755,11 @@ async function onMessageEvent(messageId, lastCharIndex) {
const message = structuredClone(context.chat[messageId]);
const hashNew = getStringHash(message?.mes ?? '');
// Ignore prompt-hidden messages
if (message.is_system) {
return;
}
// if no new messages, or same message, or same message hash, do nothing
if (hashNew === lastMessageHash) {
return;

View File

@ -0,0 +1,193 @@
import { getRequestHeaders } from '../../../script.js';
import { callGenericPopup, POPUP_RESULT, POPUP_TYPE } from '../../popup.js';
import { findSecret, SECRET_KEYS, secret_state, writeSecret } from '../../secrets.js';
import { getPreviewString, saveTtsProviderSettings } from './index.js';
export { OpenAICompatibleTtsProvider };
class OpenAICompatibleTtsProvider {
settings;
voices = [];
separator = ' . ';
audioElement = document.createElement('audio');
defaultSettings = {
voiceMap: {},
model: 'tts-1',
speed: 1,
available_voices: ['alloy', 'echo', 'fable', 'onyx', 'nova', 'shimmer'],
provider_endpoint: 'http://127.0.0.1:8000/v1/audio/speech',
};
get settingsHtml() {
let html = `
<label for="openai_compatible_tts_endpoint">Provider Endpoint:</label>
<div class="flex-container alignItemsCenter">
<div class="flex1">
<input id="openai_compatible_tts_endpoint" type="text" class="text_pole" maxlength="250" value="${this.defaultSettings.provider_endpoint}"/>
</div>
<div id="openai_compatible_tts_key" class="menu_button menu_button_icon">
<i class="fa-solid fa-key"></i>
<span>API Key</span>
</div>
</div>
<label for="openai_compatible_model">Model:</label>
<input id="openai_compatible_model" type="text" class="text_pole" maxlength="250" value="${this.defaultSettings.model}"/>
<label for="openai_compatible_tts_voices">Available Voices (comma separated):</label>
<input id="openai_compatible_tts_voices" type="text" class="text_pole" maxlength="250" value="${this.defaultSettings.available_voices.join()}"/>
<label for="openai_compatible_tts_speed">Speed: <span id="openai_compatible_tts_speed_output"></span></label>
<input type="range" id="openai_compatible_tts_speed" value="1" min="0.25" max="4" step="0.05">`;
return html;
}
async loadSettings(settings) {
// Populate Provider UI given input settings
if (Object.keys(settings).length == 0) {
console.info('Using default TTS Provider settings');
}
// Only accept keys defined in defaultSettings
this.settings = this.defaultSettings;
for (const key in settings) {
if (key in this.settings) {
this.settings[key] = settings[key];
} else {
throw `Invalid setting passed to TTS Provider: ${key}`;
}
}
$('#openai_compatible_tts_endpoint').val(this.settings.provider_endpoint);
$('#openai_compatible_tts_endpoint').on('input', () => { this.onSettingsChange(); });
$('#openai_compatible_model').val(this.defaultSettings.model);
$('#openai_compatible_model').on('input', () => { this.onSettingsChange(); });
$('#openai_compatible_tts_voices').val(this.settings.available_voices.join());
$('#openai_compatible_tts_voices').on('input', () => { this.onSettingsChange(); });
$('#openai_compatible_tts_speed').val(this.settings.speed);
$('#openai_compatible_tts_speed').on('input', () => {
this.onSettingsChange();
});
$('#openai_compatible_tts_speed_output').text(this.settings.speed);
$('#openai_compatible_tts_key').toggleClass('success', secret_state[SECRET_KEYS.CUSTOM_OPENAI_TTS]);
$('#openai_compatible_tts_key').on('click', async () => {
const popupText = 'OpenAI-compatible TTS API Key';
const savedKey = secret_state[SECRET_KEYS.CUSTOM_OPENAI_TTS] ? await findSecret(SECRET_KEYS.CUSTOM_OPENAI_TTS) : '';
const key = await callGenericPopup(popupText, POPUP_TYPE.INPUT, savedKey, {
customButtons: [{
text: 'Remove Key',
appendAtEnd: true,
result: POPUP_RESULT.NEGATIVE,
action: async () => {
await writeSecret(SECRET_KEYS.CUSTOM_OPENAI_TTS, '');
$('#openai_compatible_tts_key').toggleClass('success', secret_state[SECRET_KEYS.CUSTOM_OPENAI_TTS]);
toastr.success('API Key removed');
await this.onRefreshClick();
},
}],
});
if (key == false || key == '') {
return;
}
await writeSecret(SECRET_KEYS.CUSTOM_OPENAI_TTS, String(key));
toastr.success('API Key saved');
$('#openai_compatible_tts_key').toggleClass('success', secret_state[SECRET_KEYS.CUSTOM_OPENAI_TTS]);
await this.onRefreshClick();
});
await this.checkReady();
console.debug('OpenAI Compatible TTS: Settings loaded');
}
onSettingsChange() {
// Update dynamically
this.settings.provider_endpoint = String($('#openai_compatible_tts_endpoint').val());
this.settings.model = String($('#openai_compatible_model').val());
this.settings.available_voices = String($('#openai_compatible_tts_voices').val()).split(',');
this.settings.speed = Number($('#openai_compatible_tts_speed').val());
$('#openai_compatible_tts_speed_output').text(this.settings.speed);
saveTtsProviderSettings();
}
async checkReady() {
await this.fetchTtsVoiceObjects();
}
async onRefreshClick() {
return;
}
async getVoice(voiceName) {
if (this.voices.length == 0) {
this.voices = await this.fetchTtsVoiceObjects();
}
const match = this.voices.filter(
oaicVoice => oaicVoice.name == voiceName,
)[0];
if (!match) {
throw `TTS Voice name ${voiceName} not found`;
}
return match;
}
async generateTts(text, voiceId) {
const response = await this.fetchTtsGeneration(text, voiceId);
return response;
}
async fetchTtsVoiceObjects() {
return this.settings.available_voices.map(v => {
return { name: v, voice_id: v, lang: 'en-US' };
});
}
async previewTtsVoice(voiceId) {
this.audioElement.pause();
this.audioElement.currentTime = 0;
const text = getPreviewString('en-US');
const response = await this.fetchTtsGeneration(text, voiceId);
if (!response.ok) {
throw new Error(`HTTP ${response.status}`);
}
const audio = await response.blob();
const url = URL.createObjectURL(audio);
this.audioElement.src = url;
this.audioElement.play();
this.audioElement.onended = () => URL.revokeObjectURL(url);
}
async fetchTtsGeneration(inputText, voiceId) {
console.info(`Generating new TTS for voice_id ${voiceId}`);
const response = await fetch('/api/openai/custom/generate-voice', {
method: 'POST',
headers: getRequestHeaders(),
body: JSON.stringify({
provider_endpoint: this.settings.provider_endpoint,
model: this.settings.model,
input: inputText,
voice: voiceId,
response_format: 'mp3',
speed: this.settings.speed,
}),
});
if (!response.ok) {
toastr.error(response.statusText, 'TTS Generation Failed');
throw new Error(`HTTP ${response.status}: ${await response.text()}`);
}
return response;
}
}

View File

@ -124,7 +124,7 @@ class SystemTtsProvider {
if (hasEnabledVoice) {
return;
}
const utterance = new SpeechSynthesisUtterance('hi');
const utterance = new SpeechSynthesisUtterance(' . ');
utterance.volume = 0;
speechSynthesis.speak(utterance);
hasEnabledVoice = true;

View File

@ -30,6 +30,13 @@ import { textgen_types, textgenerationwebui_settings } from '../../textgen-setti
import { SlashCommandParser } from '../../slash-commands/SlashCommandParser.js';
import { SlashCommand } from '../../slash-commands/SlashCommand.js';
import { ARGUMENT_TYPE, SlashCommandArgument, SlashCommandNamedArgument } from '../../slash-commands/SlashCommandArgument.js';
import { callGenericPopup, POPUP_RESULT, POPUP_TYPE } from '../../popup.js';
import { generateWebLlmChatPrompt, isWebLlmSupported } from '../shared.js';
/**
* @typedef {object} HashedMessage
* @property {string} text - The hashed message text
*/
const MODULE_NAME = 'vectors';
@ -191,6 +198,11 @@ function splitByChunks(items) {
return chunkedItems;
}
/**
* Summarizes messages using the Extras API method.
* @param {HashedMessage[]} hashedMessages Array of hashed messages
* @returns {Promise<HashedMessage[]>} Summarized messages
*/
async function summarizeExtra(hashedMessages) {
for (const element of hashedMessages) {
try {
@ -222,6 +234,11 @@ async function summarizeExtra(hashedMessages) {
return hashedMessages;
}
/**
* Summarizes messages using the main API method.
* @param {HashedMessage[]} hashedMessages Array of hashed messages
* @returns {Promise<HashedMessage[]>} Summarized messages
*/
async function summarizeMain(hashedMessages) {
for (const element of hashedMessages) {
element.text = await generateRaw(element.text, '', false, false, settings.summary_prompt);
@ -230,12 +247,39 @@ async function summarizeMain(hashedMessages) {
return hashedMessages;
}
/**
* Summarizes messages using WebLLM.
* @param {HashedMessage[]} hashedMessages Array of hashed messages
* @returns {Promise<HashedMessage[]>} Summarized messages
*/
async function summarizeWebLLM(hashedMessages) {
if (!isWebLlmSupported()) {
console.warn('Vectors: WebLLM is not supported');
return hashedMessages;
}
for (const element of hashedMessages) {
const messages = [{ role:'system', content: settings.summary_prompt }, { role:'user', content: element.text }];
element.text = await generateWebLlmChatPrompt(messages);
}
return hashedMessages;
}
/**
* Summarizes messages using the chosen method.
* @param {HashedMessage[]} hashedMessages Array of hashed messages
* @param {string} endpoint Type of endpoint to use
* @returns {Promise<HashedMessage[]>} Summarized messages
*/
async function summarize(hashedMessages, endpoint = 'main') {
switch (endpoint) {
case 'main':
return await summarizeMain(hashedMessages);
case 'extras':
return await summarizeExtra(hashedMessages);
case 'webllm':
return await summarizeWebLLM(hashedMessages);
default:
console.error('Unsupported endpoint', endpoint);
}
@ -357,7 +401,7 @@ async function processFiles(chat) {
const dataBankCollectionIds = await ingestDataBankAttachments();
if (dataBankCollectionIds.length) {
const queryText = await getQueryText(chat);
const queryText = await getQueryText(chat, 'file');
await injectDataBankChunks(queryText, dataBankCollectionIds);
}
@ -391,7 +435,7 @@ async function processFiles(chat) {
await vectorizeFile(fileText, fileName, collectionId, settings.chunk_size, settings.overlap_percent);
}
const queryText = await getQueryText(chat);
const queryText = await getQueryText(chat, 'file');
const fileChunks = await retrieveFileChunks(queryText, collectionId);
message.mes = `${fileChunks}\n\n${message.mes}`;
@ -552,7 +596,7 @@ async function rearrangeChat(chat) {
return;
}
const queryText = await getQueryText(chat);
const queryText = await getQueryText(chat, 'chat');
if (queryText.length === 0) {
console.debug('Vectors: No text to query');
@ -639,15 +683,16 @@ const onChatEvent = debounce(async () => await moduleWorker.update(), debounce_t
/**
* Gets the text to query from the chat
* @param {object[]} chat Chat messages
* @param {'file'|'chat'|'world-info'} initiator Initiator of the query
* @returns {Promise<string>} Text to query
*/
async function getQueryText(chat) {
async function getQueryText(chat, initiator) {
let queryText = '';
let i = 0;
let hashedMessages = chat.map(x => ({ text: String(substituteParams(x.mes)) }));
if (settings.summarize && settings.summarize_sent) {
if (initiator === 'chat' && settings.enabled_chats && settings.summarize && settings.summarize_sent) {
hashedMessages = await summarize(hashedMessages, settings.summary_source);
}
@ -1235,7 +1280,7 @@ async function activateWorldInfo(chat) {
}
// Perform a multi-query
const queryText = await getQueryText(chat);
const queryText = await getQueryText(chat, 'world-info');
if (queryText.length === 0) {
console.debug('Vectors: No text to query for WI');
@ -1299,11 +1344,30 @@ jQuery(async () => {
saveSettingsDebounced();
toggleSettings();
});
$('#api_key_nomicai').on('change', () => {
const nomicKey = String($('#api_key_nomicai').val()).trim();
if (nomicKey.length) {
writeSecret(SECRET_KEYS.NOMICAI, nomicKey);
$('#api_key_nomicai').on('click', async () => {
const popupText = 'NomicAI API Key:';
const key = await callGenericPopup(popupText, POPUP_TYPE.INPUT, '', {
customButtons: [{
text: 'Remove Key',
appendAtEnd: true,
result: POPUP_RESULT.NEGATIVE,
action: async () => {
await writeSecret(SECRET_KEYS.NOMICAI, '');
toastr.success('API Key removed');
$('#api_key_nomicai').toggleClass('success', !!secret_state[SECRET_KEYS.NOMICAI]);
saveSettingsDebounced();
},
}],
});
if (!key) {
return;
}
await writeSecret(SECRET_KEYS.NOMICAI, String(key));
$('#api_key_nomicai').toggleClass('success', !!secret_state[SECRET_KEYS.NOMICAI]);
toastr.success('API Key saved');
saveSettingsDebounced();
});
$('#vectors_togetherai_model').val(settings.togetherai_model).on('change', () => {
@ -1531,9 +1595,7 @@ jQuery(async () => {
$('#dialogue_popup_input').val(presetModel);
});
const validSecret = !!secret_state[SECRET_KEYS.NOMICAI];
const placeholder = validSecret ? '✔️ Key saved' : '❌ Missing key';
$('#api_key_nomicai').attr('placeholder', placeholder);
$('#api_key_nomicai').toggleClass('success', !!secret_state[SECRET_KEYS.NOMICAI]);
toggleSettings();
eventSource.on(event_types.MESSAGE_DELETED, onChatEvent);

View File

@ -12,7 +12,7 @@
<select id="vectors_source" class="text_pole">
<option value="cohere">Cohere</option>
<option value="extras">Extras</option>
<option value="palm">Google MakerSuite</option>
<option value="palm">Google AI Studio</option>
<option value="llamacpp">llama.cpp</option>
<option value="transformers" data-i18n="Local (Transformers)">Local (Transformers)</option>
<option value="mistral">MistralAI</option>
@ -103,17 +103,13 @@
</span>
</small>
<div class="flex-container flexFlowColumn" id="nomicai_apiKey">
<label for="api_key_nomicai">
<div class="flex-container alignItemsCenter" id="nomicai_apiKey">
<label for="api_key_nomicai" class="flex1">
<span data-i18n="NomicAI API Key">NomicAI API Key</span>
</label>
<div class="flex-container">
<input id="api_key_nomicai" name="api_key_nomicai" class="text_pole flex1 wide100p" maxlength="500" size="35" type="text" autocomplete="off">
<div title="Clear your API key" class="menu_button fa-solid fa-circle-xmark clear-api-key" data-key="api_key_nomicai">
</div>
</div>
<div data-for="api_key_nomicai" class="neutral_warning" data-i18n="For privacy reasons, your API key will be hidden after you reload the page.">
For privacy reasons, your API key will be hidden after you reload the page.
<div id="api_key_nomicai" class="menu_button menu_button_icon">
<i class="fa-solid fa-key"></i>
<span data-i18n="Click to set">Click to set</span>
</div>
</div>
@ -378,10 +374,11 @@
<select id="vectors_summary_source" class="text_pole">
<option value="main" data-i18n="Main API">Main API</option>
<option value="extras" data-i18n="Extras API">Extras API</option>
<option value="webllm" data-i18n="WebLLM Extension">WebLLM Extension</option>
</select>
<label for="vectors_summary_prompt" title="Summary Prompt:">Summary Prompt:</label>
<small data-i18n="Only used when Main API is selected.">Only used when Main API is selected.</small>
<small data-i18n="Only used when Main API or WebLLM Extension is selected.">Only used when Main API or WebLLM Extension is selected.</small>
<textarea id="vectors_summary_prompt" class="text_pole textarea_compact" rows="6" placeholder="This prompt will be sent to AI to request the summary generation."></textarea>
</div>
</div>

View File

@ -120,6 +120,7 @@ const default_bias_presets = {
const max_2k = 2047;
const max_4k = 4095;
const max_8k = 8191;
const max_12k = 12287;
const max_16k = 16383;
const max_32k = 32767;
const max_64k = 65535;
@ -186,6 +187,7 @@ export const chat_completion_sources = {
PERPLEXITY: 'perplexity',
GROQ: 'groq',
ZEROONEAI: '01ai',
BLOCKENTROPY: 'blockentropy',
};
const character_names_behavior = {
@ -238,7 +240,7 @@ const default_settings = {
top_p_openai: 1.0,
top_k_openai: 0,
min_p_openai: 0,
top_a_openai: 1,
top_a_openai: 0,
repetition_penalty_openai: 1,
stream_openai: false,
websearch_cohere: false,
@ -268,6 +270,7 @@ const default_settings = {
perplexity_model: 'llama-3.1-70b-instruct',
groq_model: 'llama-3.1-70b-versatile',
zerooneai_model: 'yi-large',
blockentropy_model: 'be-70b-base-llama3.1',
custom_model: '',
custom_url: '',
custom_include_body: '',
@ -318,7 +321,7 @@ const oai_settings = {
top_p_openai: 1.0,
top_k_openai: 0,
min_p_openai: 0,
top_a_openai: 1,
top_a_openai: 0,
repetition_penalty_openai: 1,
stream_openai: false,
websearch_cohere: false,
@ -348,6 +351,7 @@ const oai_settings = {
perplexity_model: 'llama-3.1-70b-instruct',
groq_model: 'llama-3.1-70b-versatile',
zerooneai_model: 'yi-large',
blockentropy_model: 'be-70b-base-llama3.1',
custom_model: '',
custom_url: '',
custom_include_body: '',
@ -804,7 +808,8 @@ async function populateChatHistory(messages, prompts, chatCompletion, type = nul
// Reserve budget for group nudge
let groupNudgeMessage = null;
if (selected_group) {
const noGroupNudgeTypes = ['impersonate'];
if (selected_group && prompts.has('groupNudge') && !noGroupNudgeTypes.includes(type)) {
groupNudgeMessage = Message.fromPrompt(prompts.get('groupNudge'));
chatCompletion.reserveBudget(groupNudgeMessage);
}
@ -1542,6 +1547,8 @@ function getChatCompletionModel() {
return oai_settings.groq_model;
case chat_completion_sources.ZEROONEAI:
return oai_settings.zerooneai_model;
case chat_completion_sources.BLOCKENTROPY:
return oai_settings.blockentropy_model;
default:
throw new Error(`Unknown chat completion source: ${oai_settings.chat_completion_source}`);
}
@ -1655,6 +1662,23 @@ function saveModelList(data) {
$('#model_01ai_select').val(oai_settings.zerooneai_model).trigger('change');
}
if (oai_settings.chat_completion_source == chat_completion_sources.BLOCKENTROPY) {
$('#model_blockentropy_select').empty();
model_list.forEach((model) => {
$('#model_blockentropy_select').append(
$('<option>', {
value: model.id,
text: model.id,
}));
});
if (!oai_settings.blockentropy_model && model_list.length > 0) {
oai_settings.blockentropy_model = model_list[0].id;
}
$('#model_blockentropy_select').val(oai_settings.blockentropy_model).trigger('change');
}
}
function appendOpenRouterOptions(model_list, groupModels = false, sort = false) {
@ -3015,6 +3039,7 @@ function loadOpenAISettings(data, settings) {
oai_settings.cohere_model = settings.cohere_model ?? default_settings.cohere_model;
oai_settings.perplexity_model = settings.perplexity_model ?? default_settings.perplexity_model;
oai_settings.groq_model = settings.groq_model ?? default_settings.groq_model;
oai_settings.blockentropy_model = settings.blockentropy_model ?? default_settings.blockentropy_model;
oai_settings.zerooneai_model = settings.zerooneai_model ?? default_settings.zerooneai_model;
oai_settings.custom_model = settings.custom_model ?? default_settings.custom_model;
oai_settings.custom_url = settings.custom_url ?? default_settings.custom_url;
@ -3048,6 +3073,7 @@ function loadOpenAISettings(data, settings) {
oai_settings.names_behavior = settings.names_behavior ?? default_settings.names_behavior;
oai_settings.continue_postfix = settings.continue_postfix ?? default_settings.continue_postfix;
oai_settings.function_calling = settings.function_calling ?? default_settings.function_calling;
oai_settings.openrouter_providers = settings.openrouter_providers ?? default_settings.openrouter_providers;
// Migrate from old settings
if (settings.names_in_completion === true) {
@ -3093,6 +3119,7 @@ function loadOpenAISettings(data, settings) {
$('#model_groq_select').val(oai_settings.groq_model);
$(`#model_groq_select option[value="${oai_settings.groq_model}"`).attr('selected', true);
$('#model_01ai_select').val(oai_settings.zerooneai_model);
$('#model_blockentropy_select').val(oai_settings.blockentropy_model);
$('#custom_model_id').val(oai_settings.custom_model);
$('#custom_api_url_text').val(oai_settings.custom_url);
$('#openai_max_context').val(oai_settings.openai_max_context);
@ -3354,6 +3381,7 @@ async function saveOpenAIPreset(name, settings, triggerUi = true) {
perplexity_model: settings.perplexity_model,
groq_model: settings.groq_model,
zerooneai_model: settings.zerooneai_model,
blockentropy_model: settings.blockentropy_model,
custom_model: settings.custom_model,
custom_url: settings.custom_url,
custom_include_body: settings.custom_include_body,
@ -3596,6 +3624,8 @@ async function onPresetImportFileChange(e) {
}
}
await eventSource.emit(event_types.OAI_PRESET_IMPORT_READY, { data: presetBody, presetName: name });
const savePresetSettings = await fetch(`/api/presets/save-openai?name=${name}`, {
method: 'POST',
headers: getRequestHeaders(),
@ -3651,6 +3681,7 @@ async function onExportPresetClick() {
sensitiveFields.forEach(field => delete preset[field]);
}
await eventSource.emit(event_types.OAI_PRESET_EXPORT_READY, preset);
const presetJsonString = JSON.stringify(preset, null, 4);
const presetFileName = `${oai_settings.preset_settings_openai}.json`;
download(presetJsonString, presetFileName, 'application/json');
@ -3791,6 +3822,7 @@ function onSettingsPresetChange() {
perplexity_model: ['#model_perplexity_select', 'perplexity_model', false],
groq_model: ['#model_groq_select', 'groq_model', false],
zerooneai_model: ['#model_01ai_select', 'zerooneai_model', false],
blockentropy_model: ['#model_blockentropy_select', 'blockentropy_model', false],
custom_model: ['#custom_model_id', 'custom_model', false],
custom_url: ['#custom_api_url_text', 'custom_url', false],
custom_include_body: ['#custom_include_body', 'custom_include_body', false],
@ -3889,7 +3921,7 @@ function getMaxContextOpenAI(value) {
if (oai_settings.max_context_unlocked) {
return unlocked_max;
}
else if (value.includes('gpt-4-turbo') || value.includes('gpt-4o') || value.includes('gpt-4-1106') || value.includes('gpt-4-0125') || value.includes('gpt-4-vision')) {
else if (value.includes('chatgpt-4o-latest') || value.includes('gpt-4-turbo') || value.includes('gpt-4o') || value.includes('gpt-4-1106') || value.includes('gpt-4-0125') || value.includes('gpt-4-vision')) {
return max_128k;
}
else if (value.includes('gpt-3.5-turbo-1106')) {
@ -4038,6 +4070,12 @@ async function onModelChange() {
oai_settings.zerooneai_model = value;
}
if (value && $(this).is('#model_blockentropy_select')) {
console.log('Block Entropy model changed to', value);
oai_settings.blockentropy_model = value;
$('#blockentropy_model_id').val(value).trigger('input');
}
if (value && $(this).is('#model_custom_select')) {
console.log('Custom model changed to', value);
oai_settings.custom_model = value;
@ -4326,6 +4364,29 @@ async function onModelChange() {
oai_settings.temp_openai = Math.min(oai_max_temp, oai_settings.temp_openai);
$('#temp_openai').attr('max', oai_max_temp).val(oai_settings.temp_openai).trigger('input');
}
if (oai_settings.chat_completion_source === chat_completion_sources.BLOCKENTROPY) {
if (oai_settings.max_context_unlocked) {
$('#openai_max_context').attr('max', unlocked_max);
}
else if (oai_settings.blockentropy_model.includes('llama3.1')) {
$('#openai_max_context').attr('max', max_16k);
}
else if (oai_settings.blockentropy_model.includes('72b')) {
$('#openai_max_context').attr('max', max_16k);
}
else if (oai_settings.blockentropy_model.includes('120b')) {
$('#openai_max_context').attr('max', max_12k);
}
else {
$('#openai_max_context').attr('max', max_8k);
}
oai_settings.openai_max_context = Math.min(oai_settings.openai_max_context, Number($('#openai_max_context').attr('max')));
$('#openai_max_context').val(oai_settings.openai_max_context).trigger('input');
oai_settings.temp_openai = Math.min(oai_max_temp, oai_settings.temp_openai);
$('#temp_openai').attr('max', oai_max_temp).val(oai_settings.temp_openai).trigger('input');
}
$('#openai_max_context_counter').attr('max', Number($('#openai_max_context').attr('max')));
@ -4412,7 +4473,7 @@ async function onConnectButtonClick(e) {
}
if (!secret_state[SECRET_KEYS.MAKERSUITE] && !oai_settings.reverse_proxy) {
console.log('No secret key saved for MakerSuite');
console.log('No secret key saved for Google AI Studio');
return;
}
}
@ -4533,6 +4594,18 @@ async function onConnectButtonClick(e) {
return;
}
}
if (oai_settings.chat_completion_source == chat_completion_sources.BLOCKENTROPY) {
const api_key_blockentropy = String($('#api_key_blockentropy').val()).trim();
if (api_key_blockentropy.length) {
await writeSecret(SECRET_KEYS.BLOCKENTROPY, api_key_blockentropy);
}
if (!secret_state[SECRET_KEYS.BLOCKENTROPY]) {
console.log('No secret key saved for Block Entropy');
return;
}
}
startStatusLoading();
saveSettingsDebounced();
@ -4584,6 +4657,9 @@ function toggleChatCompletionForms() {
else if (oai_settings.chat_completion_source == chat_completion_sources.CUSTOM) {
$('#model_custom_select').trigger('change');
}
else if (oai_settings.chat_completion_source == chat_completion_sources.BLOCKENTROPY) {
$('#model_blockentropy_select').trigger('change');
}
$('[data-source]').each(function () {
const validSources = $(this).data('source').split(',');
$(this).toggle(validSources.includes(oai_settings.chat_completion_source));
@ -4687,6 +4763,7 @@ export function isImageInliningSupported() {
'gpt-4-turbo',
'gpt-4o',
'gpt-4o-mini',
'chatgpt-4o-latest',
'yi-vision',
];
@ -5313,6 +5390,7 @@ $(document).ready(async function () {
$('#model_perplexity_select').on('change', onModelChange);
$('#model_groq_select').on('change', onModelChange);
$('#model_01ai_select').on('change', onModelChange);
$('#model_blockentropy_select').on('change', onModelChange);
$('#model_custom_select').on('change', onModelChange);
$('#settings_preset_openai').on('change', onSettingsPresetChange);
$('#new_oai_preset').on('click', onNewPresetClick);

View File

@ -40,8 +40,8 @@ export const POPUP_RESULT = {
* @property {POPUP_RESULT|number?} [defaultResult=POPUP_RESULT.AFFIRMATIVE] - The default result of this popup when Enter is pressed. Can be changed from `POPUP_RESULT.AFFIRMATIVE`.
* @property {CustomPopupButton[]|string[]?} [customButtons=null] - Custom buttons to add to the popup. If only strings are provided, the buttons will be added with default options, and their result will be in order from `2` onward.
* @property {CustomPopupInput[]?} [customInputs=null] - Custom inputs to add to the popup. The display below the content and the input box, one by one.
* @property {(popup: Popup) => boolean?} [onClosing=null] - Handler called before the popup closes, return `false` to cancel the close
* @property {(popup: Popup) => void?} [onClose=null] - Handler called after the popup closes, but before the DOM is cleaned up
* @property {(popup: Popup) => Promise<boolean?>|boolean?} [onClosing=null] - Handler called before the popup closes, return `false` to cancel the close
* @property {(popup: Popup) => Promise<void?>|void?} [onClose=null] - Handler called after the popup closes, but before the DOM is cleaned up
* @property {number?} [cropAspect=null] - Aspect ratio for the crop popup
* @property {string?} [cropImage=null] - Image URL to display in the crop popup
*/
@ -138,8 +138,8 @@ export class Popup {
/** @readonly @type {CustomPopupButton[]|string[]?} */ customButtons;
/** @readonly @type {CustomPopupInput[]} */ customInputs;
/** @type {(popup: Popup) => boolean?} */ onClosing;
/** @type {(popup: Popup) => void?} */ onClose;
/** @type {(popup: Popup) => Promise<boolean?>|boolean?} */ onClosing;
/** @type {(popup: Popup) => Promise<void?>|void?} */ onClose;
/** @type {POPUP_RESULT|number} */ result;
/** @type {any} */ value;
@ -509,7 +509,7 @@ export class Popup {
this.result = result;
if (this.onClosing) {
const shouldClose = this.onClosing(this);
const shouldClose = await this.onClosing(this);
if (!shouldClose) {
this.#isClosingPrevented = true;
// Set values back if we cancel out of closing the popup
@ -547,13 +547,13 @@ export class Popup {
fixToastrForDialogs();
// After the dialog is actually completely closed, remove it from the DOM
runAfterAnimation(this.dlg, () => {
runAfterAnimation(this.dlg, async () => {
// Call the close on the dialog
this.dlg.close();
// Run a possible custom handler right before DOM removal
if (this.onClose) {
this.onClose(this);
await this.onClose(this);
}
// Remove it from the dom
@ -596,7 +596,7 @@ export class Popup {
/** @returns {boolean} Checks if any modal popup dialog is open */
isPopupOpen() {
return Popup.util.popups.length > 0;
return Popup.util.popups.filter(x => x.dlg.hasAttribute('open')).length > 0;
},
/**

View File

@ -60,6 +60,7 @@ export {
power_user,
send_on_enter_options,
getContextSettings,
applyPowerUserSettings,
};
export const MAX_CONTEXT_DEFAULT = 8192;
@ -202,6 +203,7 @@ let power_user = {
trim_spaces: true,
relaxed_api_urls: false,
world_import_dialog: true,
enable_auto_select_input: false,
tag_import_setting: tag_import_setting.ASK,
disable_group_trimming: false,
single_line: false,
@ -300,45 +302,9 @@ let movingUIPresets = [];
export let context_presets = [];
const storage_keys = {
fast_ui_mode: 'TavernAI_fast_ui_mode',
avatar_style: 'TavernAI_avatar_style',
chat_display: 'TavernAI_chat_display',
chat_width: 'chat_width',
font_scale: 'TavernAI_font_scale',
main_text_color: 'TavernAI_main_text_color',
italics_text_color: 'TavernAI_italics_text_color',
underline_text_color: 'TavernAI_underline_text_color',
quote_text_color: 'TavernAI_quote_text_color',
blur_tint_color: 'TavernAI_blur_tint_color',
chat_tint_color: 'TavernAI_chat_tint_color',
user_mes_blur_tint_color: 'TavernAI_user_mes_blur_tint_color',
bot_mes_blur_tint_color: 'TavernAI_bot_mes_blur_tint_color',
blur_strength: 'TavernAI_blur_strength',
shadow_color: 'TavernAI_shadow_color',
shadow_width: 'TavernAI_shadow_width',
border_color: 'TavernAI_border_color',
custom_css: 'TavernAI_custom_css',
waifuMode: 'TavernAI_waifuMode',
movingUI: 'TavernAI_movingUI',
noShadows: 'TavernAI_noShadows',
hotswap_enabled: 'HotswapEnabled',
timer_enabled: 'TimerEnabled',
timestamps_enabled: 'TimestampsEnabled',
timestamp_model_icon: 'TimestampModelIcon',
mesIDDisplay_enabled: 'mesIDDisplayEnabled',
hideChatAvatars_enabled: 'hideChatAvatarsEnabled',
message_token_count_enabled: 'MessageTokenCountEnabled',
expand_message_actions: 'ExpandMessageActions',
enableZenSliders: 'enableZenSliders',
enableLabMode: 'enableLabMode',
reduced_motion: 'reduced_motion',
compact_input_area: 'compact_input_area',
auto_connect_legacy: 'AutoConnectEnabled',
auto_load_chat_legacy: 'AutoLoadChatEnabled',
hideChatAvatars_legacy: 'hideChatAvatarsEnabled',
storyStringValidationCache: 'StoryStringValidationCache',
};
@ -458,73 +424,47 @@ function fixMarkdown(text, forDisplay) {
}
function switchHotswap() {
const value = localStorage.getItem(storage_keys.hotswap_enabled);
power_user.hotswap_enabled = value === null ? true : value == 'true';
$('body').toggleClass('no-hotswap', !power_user.hotswap_enabled);
$('#hotswapEnabled').prop('checked', power_user.hotswap_enabled);
}
function switchTimer() {
const value = localStorage.getItem(storage_keys.timer_enabled);
power_user.timer_enabled = value === null ? true : value == 'true';
$('body').toggleClass('no-timer', !power_user.timer_enabled);
$('#messageTimerEnabled').prop('checked', power_user.timer_enabled);
}
function switchTimestamps() {
const value = localStorage.getItem(storage_keys.timestamps_enabled);
power_user.timestamps_enabled = value === null ? true : value == 'true';
$('body').toggleClass('no-timestamps', !power_user.timestamps_enabled);
$('#messageTimestampsEnabled').prop('checked', power_user.timestamps_enabled);
}
function switchIcons() {
const value = localStorage.getItem(storage_keys.timestamp_model_icon);
power_user.timestamp_model_icon = value === null ? true : value == 'true';
$('body').toggleClass('no-modelIcons', !power_user.timestamp_model_icon);
$('#messageModelIconEnabled').prop('checked', power_user.timestamp_model_icon);
}
function switchTokenCount() {
const value = localStorage.getItem(storage_keys.message_token_count_enabled);
power_user.message_token_count_enabled = value === null ? false : value == 'true';
$('body').toggleClass('no-tokenCount', !power_user.message_token_count_enabled);
$('#messageTokensEnabled').prop('checked', power_user.message_token_count_enabled);
}
function switchMesIDDisplay() {
const value = localStorage.getItem(storage_keys.mesIDDisplay_enabled);
power_user.mesIDDisplay_enabled = value === null ? true : value == 'true';
/* console.log(`
localstorage value:${value},
poweruser before:${before},
poweruser after:${power_user.mesIDDisplay_enabled}`) */
$('body').toggleClass('no-mesIDDisplay', !power_user.mesIDDisplay_enabled);
$('#mesIDDisplayEnabled').prop('checked', power_user.mesIDDisplay_enabled);
}
function switchHideChatAvatars() {
const value = localStorage.getItem(storage_keys.hideChatAvatars_enabled);
power_user.hideChatAvatars_enabled = value === null ? false : value == 'true';
/*console.log(`
localstorage value:${value},
poweruser after:${power_user.hideChatAvatars_enabled}`)
*/
$('body').toggleClass('hideChatAvatars', power_user.hideChatAvatars_enabled);
$('#hideChatAvatarsEnabled').prop('checked', power_user.hideChatAvatars_enabled);
}
function switchMessageActions() {
const value = localStorage.getItem(storage_keys.expand_message_actions);
power_user.expand_message_actions = value === null ? false : value == 'true';
$('body').toggleClass('expandMessageActions', power_user.expand_message_actions);
$('#expandMessageActions').prop('checked', power_user.expand_message_actions);
$('.extraMesButtons, .extraMesButtonsHint').removeAttr('style');
}
function switchReducedMotion() {
const value = localStorage.getItem(storage_keys.reduced_motion);
power_user.reduced_motion = value === null ? false : value == 'true';
jQuery.fx.off = power_user.reduced_motion;
const overrideDuration = power_user.reduced_motion ? 0 : ANIMATION_DURATION_DEFAULT;
setAnimationDuration(overrideDuration);
@ -533,8 +473,6 @@ function switchReducedMotion() {
}
function switchCompactInputArea() {
const value = localStorage.getItem(storage_keys.compact_input_area);
power_user.compact_input_area = value === null ? true : value == 'true';
$('#send_form').toggleClass('compact', power_user.compact_input_area);
$('#compact_input_area').prop('checked', power_user.compact_input_area);
}
@ -550,8 +488,6 @@ async function switchLabMode() {
}
*/
await delay(100);
const value = localStorage.getItem(storage_keys.enableLabMode);
power_user.enableLabMode = value === null ? false : value == 'true';
$('body').toggleClass('enableLabMode', power_user.enableLabMode);
$('#enableLabMode').prop('checked', power_user.enableLabMode);
@ -598,8 +534,6 @@ async function switchLabMode() {
async function switchZenSliders() {
await delay(100);
const value = localStorage.getItem(storage_keys.enableZenSliders);
power_user.enableZenSliders = value === null ? false : value == 'true';
$('body').toggleClass('enableZenSliders', power_user.enableZenSliders);
$('#enableZenSliders').prop('checked', power_user.enableZenSliders);
@ -971,8 +905,6 @@ async function CreateZenSliders(elmnt) {
}
}
function switchUiMode() {
const fastUi = localStorage.getItem(storage_keys.fast_ui_mode);
power_user.fast_ui_mode = fastUi === null ? true : fastUi == 'true';
$('body').toggleClass('no-blur', power_user.fast_ui_mode);
$('#fast_ui_mode').prop('checked', power_user.fast_ui_mode);
if (power_user.fast_ui_mode) {
@ -1022,8 +954,6 @@ function switchMovingUI() {
$('.drawer-content.maximized').each(function () {
$(this).find('.inline-drawer-maximize').trigger('click');
});
const movingUI = localStorage.getItem(storage_keys.movingUI);
power_user.movingUI = movingUI === null ? false : movingUI == 'true';
$('body').toggleClass('movingUI', power_user.movingUI);
if (power_user.movingUI === true) {
initMovingUI();
@ -1039,9 +969,7 @@ function switchMovingUI() {
}
}
function noShadows() {
const noShadows = localStorage.getItem(storage_keys.noShadows);
power_user.noShadows = noShadows === null ? false : noShadows == 'true';
function applyNoShadows() {
$('body').toggleClass('noShadows', power_user.noShadows);
$('#noShadowsmode').prop('checked', power_user.noShadows);
if (power_user.noShadows) {
@ -1055,12 +983,9 @@ function noShadows() {
}
function applyAvatarStyle() {
power_user.avatar_style = Number(localStorage.getItem(storage_keys.avatar_style) ?? avatar_styles.ROUND);
$('body').toggleClass('big-avatars', power_user.avatar_style === avatar_styles.RECTANGULAR);
$('body').toggleClass('square-avatars', power_user.avatar_style === avatar_styles.SQUARE);
$('#avatar_style').val(power_user.avatar_style).prop('selected', true);
//$(`input[name="avatar_style"][value="${power_user.avatar_style}"]`).prop("checked", true);
}
function applyChatDisplay() {
@ -1095,8 +1020,6 @@ function applyChatDisplay() {
}
function applyChatWidth(type) {
power_user.chat_width = Number(localStorage.getItem(storage_keys.chat_width) ?? 50);
if (type === 'forced') {
let r = document.documentElement;
r.style.setProperty('--sheldWidth', `${power_user.chat_width}vw`);
@ -1158,8 +1081,6 @@ async function applyThemeColor(type) {
}
async function applyCustomCSS() {
power_user.custom_css = String(localStorage.getItem(storage_keys.custom_css) ?? '');
$('#customCSS').val(power_user.custom_css);
var styleId = 'custom-style';
var style = document.getElementById(styleId);
@ -1173,32 +1094,26 @@ async function applyCustomCSS() {
}
async function applyBlurStrength() {
power_user.blur_strength = Number(localStorage.getItem(storage_keys.blur_strength) ?? 1);
document.documentElement.style.setProperty('--blurStrength', power_user.blur_strength);
document.documentElement.style.setProperty('--blurStrength', String(power_user.blur_strength));
$('#blur_strength_counter').val(power_user.blur_strength);
$('#blur_strength').val(power_user.blur_strength);
}
async function applyShadowWidth() {
power_user.shadow_width = Number(localStorage.getItem(storage_keys.shadow_width) ?? 2);
document.documentElement.style.setProperty('--shadowWidth', power_user.shadow_width);
document.documentElement.style.setProperty('--shadowWidth', String(power_user.shadow_width));
$('#shadow_width_counter').val(power_user.shadow_width);
$('#shadow_width').val(power_user.shadow_width);
}
async function applyFontScale(type) {
power_user.font_scale = Number(localStorage.getItem(storage_keys.font_scale) ?? 1);
//this is to allow forced setting on page load, theme swap, etc
if (type === 'forced') {
document.documentElement.style.setProperty('--fontScale', power_user.font_scale);
document.documentElement.style.setProperty('--fontScale', String(power_user.font_scale));
} else {
//this is to prevent the slider from updating page in real time
$('#font_scale').off('mouseup touchend').on('mouseup touchend', () => {
document.documentElement.style.setProperty('--fontScale', power_user.font_scale);
document.documentElement.style.setProperty('--fontScale', String(power_user.font_scale));
});
}
@ -1227,64 +1142,55 @@ async function applyTheme(name) {
{
key: 'blur_strength',
action: async () => {
localStorage.setItem(storage_keys.blur_strength, power_user.blur_strength);
await applyBlurStrength();
},
},
{
key: 'custom_css',
action: async () => {
localStorage.setItem(storage_keys.custom_css, power_user.custom_css);
await applyCustomCSS();
},
},
{
key: 'shadow_width',
action: async () => {
localStorage.setItem(storage_keys.shadow_width, power_user.shadow_width);
await applyShadowWidth();
},
},
{
key: 'font_scale',
action: async () => {
localStorage.setItem(storage_keys.font_scale, power_user.font_scale);
await applyFontScale('forced');
},
},
{
key: 'fast_ui_mode',
action: async () => {
localStorage.setItem(storage_keys.fast_ui_mode, power_user.fast_ui_mode);
switchUiMode();
},
},
{
key: 'waifuMode',
action: async () => {
localStorage.setItem(storage_keys.waifuMode, power_user.waifuMode);
switchWaifuMode();
},
},
{
key: 'chat_display',
action: async () => {
localStorage.setItem(storage_keys.chat_display, power_user.chat_display);
applyChatDisplay();
},
},
{
key: 'avatar_style',
action: async () => {
localStorage.setItem(storage_keys.avatar_style, power_user.avatar_style);
applyAvatarStyle();
},
},
{
key: 'noShadows',
action: async () => {
localStorage.setItem(storage_keys.noShadows, power_user.noShadows);
noShadows();
applyNoShadows();
},
},
{
@ -1294,78 +1200,66 @@ async function applyTheme(name) {
if (!power_user.chat_width) {
power_user.chat_width = 50;
}
localStorage.setItem(storage_keys.chat_width, String(power_user.chat_width));
applyChatWidth('forced');
},
},
{
key: 'timer_enabled',
action: async () => {
localStorage.setItem(storage_keys.timer_enabled, Boolean(power_user.timer_enabled));
switchTimer();
},
},
{
key: 'timestamps_enabled',
action: async () => {
localStorage.setItem(storage_keys.timestamps_enabled, Boolean(power_user.timestamps_enabled));
switchTimestamps();
},
},
{
key: 'timestamp_model_icon',
action: async () => {
localStorage.setItem(storage_keys.timestamp_model_icon, Boolean(power_user.timestamp_model_icon));
switchIcons();
},
},
{
key: 'message_token_count_enabled',
action: async () => {
localStorage.setItem(storage_keys.message_token_count_enabled, Boolean(power_user.message_token_count_enabled));
switchTokenCount();
},
},
{
key: 'mesIDDisplay_enabled',
action: async () => {
localStorage.setItem(storage_keys.mesIDDisplay_enabled, Boolean(power_user.mesIDDisplay_enabled));
switchMesIDDisplay();
},
},
{
key: 'hideChatAvatars_enabled',
action: async () => {
localStorage.setItem(storage_keys.hideChatAvatars_enabled, Boolean(power_user.hideChatAvatars_enabled));
switchHideChatAvatars();
},
},
{
key: 'expand_message_actions',
action: async () => {
localStorage.setItem(storage_keys.expand_message_actions, Boolean(power_user.expand_message_actions));
switchMessageActions();
},
},
{
key: 'enableZenSliders',
action: async () => {
localStorage.setItem(storage_keys.enableZenSliders, Boolean(power_user.enableZenSliders));
switchMessageActions();
},
},
{
key: 'enableLabMode',
action: async () => {
localStorage.setItem(storage_keys.enableLabMode, Boolean(power_user.enableLabMode));
switchMessageActions();
},
},
{
key: 'hotswap_enabled',
action: async () => {
localStorage.setItem(storage_keys.hotswap_enabled, Boolean(power_user.hotswap_enabled));
switchHotswap();
},
},
@ -1386,7 +1280,6 @@ async function applyTheme(name) {
{
key: 'reduced_motion',
action: async () => {
localStorage.setItem(storage_keys.reduced_motion, String(power_user.reduced_motion));
$('#reduced_motion').prop('checked', power_user.reduced_motion);
switchReducedMotion();
},
@ -1394,7 +1287,6 @@ async function applyTheme(name) {
{
key: 'compact_input_area',
action: async () => {
localStorage.setItem(storage_keys.compact_input_area, String(power_user.compact_input_area));
$('#compact_input_area').prop('checked', power_user.compact_input_area);
switchCompactInputArea();
},
@ -1449,24 +1341,26 @@ async function showDebugMenu() {
callGenericPopup(template, POPUP_TYPE.TEXT, '', { wide: true, large: true, allowVerticalScrolling: true });
}
switchUiMode();
applyFontScale('forced');
applyThemeColor();
applyChatWidth('forced');
applyAvatarStyle();
applyBlurStrength();
applyShadowWidth();
applyCustomCSS();
switchMovingUI();
noShadows();
switchHotswap();
switchTimer();
switchTimestamps();
switchIcons();
switchMesIDDisplay();
switchHideChatAvatars();
switchTokenCount();
switchMessageActions();
function applyPowerUserSettings() {
switchUiMode();
applyFontScale('forced');
applyThemeColor();
applyChatWidth('forced');
applyAvatarStyle();
applyBlurStrength();
applyShadowWidth();
applyCustomCSS();
switchMovingUI();
applyNoShadows();
switchHotswap();
switchTimer();
switchTimestamps();
switchIcons();
switchMesIDDisplay();
switchHideChatAvatars();
switchTokenCount();
switchMessageActions();
}
function getExampleMessagesBehavior() {
if (power_user.strip_examples) {
@ -1529,20 +1423,10 @@ async function loadPowerUserSettings(settings, data) {
context_presets = data.context;
}
// These are still local storage
const fastUi = localStorage.getItem(storage_keys.fast_ui_mode);
const movingUI = localStorage.getItem(storage_keys.movingUI);
const noShadows = localStorage.getItem(storage_keys.noShadows);
const hotswap = localStorage.getItem(storage_keys.hotswap_enabled);
const timer = localStorage.getItem(storage_keys.timer_enabled);
const timestamps = localStorage.getItem(storage_keys.timestamps_enabled);
const mesIDDisplay = localStorage.getItem(storage_keys.mesIDDisplay_enabled);
const hideChatAvatars = localStorage.getItem(storage_keys.hideChatAvatars_enabled);
const expandMessageActions = localStorage.getItem(storage_keys.expand_message_actions);
const enableZenSliders = localStorage.getItem(storage_keys.enableZenSliders);
const enableLabMode = localStorage.getItem(storage_keys.enableLabMode);
// These are still local storage. Delete in 1.12.7
const autoLoadChat = localStorage.getItem(storage_keys.auto_load_chat_legacy);
const autoConnect = localStorage.getItem(storage_keys.auto_connect_legacy);
const hideChatAvatars = localStorage.getItem(storage_keys.hideChatAvatars_legacy);
if (autoLoadChat) {
power_user.auto_load_chat = autoLoadChat === 'true';
@ -1554,22 +1438,10 @@ async function loadPowerUserSettings(settings, data) {
localStorage.removeItem(storage_keys.auto_connect_legacy);
}
power_user.fast_ui_mode = fastUi === null ? true : fastUi == 'true';
power_user.movingUI = movingUI === null ? false : movingUI == 'true';
power_user.noShadows = noShadows === null ? false : noShadows == 'true';
power_user.hotswap_enabled = hotswap === null ? true : hotswap == 'true';
power_user.timer_enabled = timer === null ? true : timer == 'true';
power_user.timestamps_enabled = timestamps === null ? true : timestamps == 'true';
power_user.mesIDDisplay_enabled = mesIDDisplay === null ? true : mesIDDisplay == 'true';
power_user.hideChatAvatars_enabled = hideChatAvatars === null ? true : hideChatAvatars == 'true';
power_user.expand_message_actions = expandMessageActions === null ? true : expandMessageActions == 'true';
power_user.enableZenSliders = enableZenSliders === null ? false : enableZenSliders == 'true';
power_user.enableLabMode = enableLabMode === null ? false : enableLabMode == 'true';
power_user.avatar_style = Number(localStorage.getItem(storage_keys.avatar_style) ?? avatar_styles.ROUND);
//power_user.chat_display = Number(localStorage.getItem(storage_keys.chat_display) ?? chat_styles.DEFAULT);
power_user.chat_width = Number(localStorage.getItem(storage_keys.chat_width) ?? 50);
power_user.font_scale = Number(localStorage.getItem(storage_keys.font_scale) ?? 1);
power_user.blur_strength = Number(localStorage.getItem(storage_keys.blur_strength) ?? 10);
if (hideChatAvatars) {
power_user.hideChatAvatars_enabled = hideChatAvatars === 'true';
localStorage.removeItem(storage_keys.hideChatAvatars_legacy);
}
if (power_user.chat_display === '') {
power_user.chat_display = chat_styles.DEFAULT;
@ -1596,6 +1468,7 @@ async function loadPowerUserSettings(settings, data) {
$('#single_line').prop('checked', power_user.single_line);
$('#relaxed_api_urls').prop('checked', power_user.relaxed_api_urls);
$('#world_import_dialog').prop('checked', power_user.world_import_dialog);
$('#enable_auto_select_input').prop('checked', power_user.enable_auto_select_input);
$('#trim_spaces').prop('checked', power_user.trim_spaces);
$('#continue_on_send').prop('checked', power_user.continue_on_send);
$('#quick_continue').prop('checked', power_user.quick_continue);
@ -1655,7 +1528,7 @@ async function loadPowerUserSettings(settings, data) {
$('#messageTimestampsEnabled').prop('checked', power_user.timestamps_enabled);
$('#messageModelIconEnabled').prop('checked', power_user.timestamp_model_icon);
$('#mesIDDisplayEnabled').prop('checked', power_user.mesIDDisplay_enabled);
$('#hideChatAvatarsEndabled').prop('checked', power_user.hideChatAvatars_enabled);
$('#hideChatAvatarsEnabled').prop('checked', power_user.hideChatAvatars_enabled);
$('#prefer_character_prompt').prop('checked', power_user.prefer_character_prompt);
$('#prefer_character_jailbreak').prop('checked', power_user.prefer_character_jailbreak);
$('#enableZenSliders').prop('checked', power_user.enableZenSliders).trigger('input');
@ -3298,10 +3171,8 @@ $(document).ready(() => {
saveSettingsDebounced();
});
// Settings that go to local storage
$('#fast_ui_mode').change(function () {
power_user.fast_ui_mode = $(this).prop('checked');
localStorage.setItem(storage_keys.fast_ui_mode, power_user.fast_ui_mode);
switchUiMode();
saveSettingsDebounced();
});
@ -3312,24 +3183,21 @@ $(document).ready(() => {
saveSettingsDebounced();
});
$('#customCSS').on('change', () => {
power_user.custom_css = $('#customCSS').val();
localStorage.setItem(storage_keys.custom_css, power_user.custom_css);
$('#customCSS').on('input', () => {
power_user.custom_css = String($('#customCSS').val());
saveSettingsDebounced();
applyCustomCSS();
});
$('#movingUImode').change(function () {
power_user.movingUI = $(this).prop('checked');
localStorage.setItem(storage_keys.movingUI, power_user.movingUI);
switchMovingUI();
saveSettingsDebounced();
});
$('#noShadowsmode').change(function () {
power_user.noShadows = $(this).prop('checked');
localStorage.setItem(storage_keys.noShadows, power_user.noShadows);
noShadows();
applyNoShadows();
saveSettingsDebounced();
});
@ -3338,7 +3206,6 @@ $(document).ready(() => {
$('#avatar_style').on('change', function () {
const value = $(this).find(':selected').val();
power_user.avatar_style = Number(value);
localStorage.setItem(storage_keys.avatar_style, power_user.avatar_style);
applyAvatarStyle();
saveSettingsDebounced();
});
@ -3346,17 +3213,15 @@ $(document).ready(() => {
$('#chat_display').on('change', function () {
const value = $(this).find(':selected').val();
power_user.chat_display = Number(value);
localStorage.setItem(storage_keys.chat_display, power_user.chat_display);
applyChatDisplay();
saveSettingsDebounced();
});
$('#chat_width_slider').on('input', function (e, data) {
const applyMode = data?.forced ? 'forced' : 'normal';
power_user.chat_width = Number(e.target.value);
localStorage.setItem(storage_keys.chat_width, power_user.chat_width);
applyChatWidth(applyMode);
saveSettingsDebounced();
setHotswapsDebounced();
});
@ -3386,7 +3251,6 @@ $(document).ready(() => {
const applyMode = data?.forced ? 'forced' : 'normal';
power_user.font_scale = Number(e.target.value);
$('#font_scale_counter').val(power_user.font_scale);
localStorage.setItem(storage_keys.font_scale, power_user.font_scale);
await applyFontScale(applyMode);
saveSettingsDebounced();
});
@ -3394,7 +3258,6 @@ $(document).ready(() => {
$('input[name="blur_strength"]').on('input', async function (e) {
power_user.blur_strength = Number(e.target.value);
$('#blur_strength_counter').val(power_user.blur_strength);
localStorage.setItem(storage_keys.blur_strength, power_user.blur_strength);
await applyBlurStrength();
saveSettingsDebounced();
});
@ -3402,7 +3265,6 @@ $(document).ready(() => {
$('input[name="shadow_width"]').on('input', async function (e) {
power_user.shadow_width = Number(e.target.value);
$('#shadow_width_counter').val(power_user.shadow_width);
localStorage.setItem(storage_keys.shadow_width, power_user.shadow_width);
await applyShadowWidth();
saveSettingsDebounced();
});
@ -3643,36 +3505,36 @@ $(document).ready(() => {
$('#messageTimerEnabled').on('input', function () {
const value = !!$(this).prop('checked');
power_user.timer_enabled = value;
localStorage.setItem(storage_keys.timer_enabled, Boolean(power_user.timer_enabled));
switchTimer();
saveSettingsDebounced();
});
$('#messageTimestampsEnabled').on('input', function () {
const value = !!$(this).prop('checked');
power_user.timestamps_enabled = value;
localStorage.setItem(storage_keys.timestamps_enabled, Boolean(power_user.timestamps_enabled));
switchTimestamps();
saveSettingsDebounced();
});
$('#messageModelIconEnabled').on('input', function () {
const value = !!$(this).prop('checked');
power_user.timestamp_model_icon = value;
localStorage.setItem(storage_keys.timestamp_model_icon, Boolean(power_user.timestamp_model_icon));
switchIcons();
saveSettingsDebounced();
});
$('#messageTokensEnabled').on('input', function () {
const value = !!$(this).prop('checked');
power_user.message_token_count_enabled = value;
localStorage.setItem(storage_keys.message_token_count_enabled, Boolean(power_user.message_token_count_enabled));
switchTokenCount();
saveSettingsDebounced();
});
$('#expandMessageActions').on('input', function () {
const value = !!$(this).prop('checked');
power_user.expand_message_actions = value;
localStorage.setItem(storage_keys.expand_message_actions, Boolean(power_user.expand_message_actions));
switchMessageActions();
saveSettingsDebounced();
});
$('#enableZenSliders').on('input', function () {
@ -3684,9 +3546,8 @@ $(document).ready(() => {
return;
}
power_user.enableZenSliders = value;
localStorage.setItem(storage_keys.enableZenSliders, Boolean(power_user.enableZenSliders));
saveSettingsDebounced();
switchZenSliders();
saveSettingsDebounced();
});
$('#enableLabMode').on('input', function () {
@ -3699,30 +3560,29 @@ $(document).ready(() => {
}
power_user.enableLabMode = value;
localStorage.setItem(storage_keys.enableLabMode, Boolean(power_user.enableLabMode));
saveSettingsDebounced();
switchLabMode();
saveSettingsDebounced();
});
$('#mesIDDisplayEnabled').on('input', function () {
const value = !!$(this).prop('checked');
power_user.mesIDDisplay_enabled = value;
localStorage.setItem(storage_keys.mesIDDisplay_enabled, Boolean(power_user.mesIDDisplay_enabled));
switchMesIDDisplay();
saveSettingsDebounced();
});
$('#hideChatAvatarsEnabled').on('input', function () {
const value = !!$(this).prop('checked');
power_user.hideChatAvatars_enabled = value;
localStorage.setItem(storage_keys.hideChatAvatars_enabled, Boolean(power_user.hideChatAvatars_enabled));
switchHideChatAvatars();
saveSettingsDebounced();
});
$('#hotswapEnabled').on('input', function () {
const value = !!$(this).prop('checked');
power_user.hotswap_enabled = value;
localStorage.setItem(storage_keys.hotswap_enabled, Boolean(power_user.hotswap_enabled));
switchHotswap();
saveSettingsDebounced();
});
$('#prefer_character_prompt').on('input', function () {
@ -3775,6 +3635,12 @@ $(document).ready(() => {
saveSettingsDebounced();
});
$('#enable_auto_select_input').on('input', function () {
const value = !!$(this).prop('checked');
power_user.enable_auto_select_input = value;
saveSettingsDebounced();
});
$('#spoiler_free_mode').on('input', function () {
power_user.spoiler_free_mode = !!$(this).prop('checked');
switchSpoilerMode();
@ -3824,8 +3690,8 @@ $(document).ready(() => {
$('#ui_mode_select').on('change', function () {
const value = $(this).find(':selected').val();
power_user.ui_mode = Number(value);
saveSettingsDebounced();
switchSimpleMode();
saveSettingsDebounced();
});
$('#bogus_folders').on('input', function () {
@ -3929,14 +3795,12 @@ $(document).ready(() => {
$('#reduced_motion').on('input', function () {
power_user.reduced_motion = !!$(this).prop('checked');
localStorage.setItem(storage_keys.reduced_motion, String(power_user.reduced_motion));
switchReducedMotion();
saveSettingsDebounced();
});
$('#compact_input_area').on('input', function () {
power_user.compact_input_area = !!$(this).prop('checked');
localStorage.setItem(storage_keys.compact_input_area, String(power_user.compact_input_area));
switchCompactInputArea();
saveSettingsDebounced();
});

View File

@ -32,6 +32,8 @@ export const SECRET_KEYS = {
ZEROONEAI: 'api_key_01ai',
HUGGINGFACE: 'api_key_huggingface',
STABILITY: 'api_key_stability',
BLOCKENTROPY: 'api_key_blockentropy',
CUSTOM_OPENAI_TTS: 'api_key_custom_openai_tts',
};
const INPUT_MAP = {
@ -63,6 +65,7 @@ const INPUT_MAP = {
[SECRET_KEYS.FEATHERLESS]: '#api_key_featherless',
[SECRET_KEYS.ZEROONEAI]: '#api_key_01ai',
[SECRET_KEYS.HUGGINGFACE]: '#api_key_huggingface',
[SECRET_KEYS.BLOCKENTROPY]: '#api_key_blockentropy',
};
async function clearSecret() {
@ -125,7 +128,7 @@ export async function writeSecret(key, value) {
const text = await response.text();
if (text == 'ok') {
secret_state[key] = true;
secret_state[key] = !!value;
updateSecretDisplay();
}
}

View File

@ -7,9 +7,19 @@ export const markdownUnderscoreExt = () => {
}
return [{
type: 'lang',
regex: new RegExp('\\b(?<!_)_(?!_)(.*?)(?<!_)_(?!_)\\b', 'g'),
replace: '<em>$1</em>',
type: 'output',
regex: new RegExp('(<code(?:\\s+[^>]*)?>[\\s\\S]*?<\\/code>)|\\b(?<!_)_(?!_)(.*?)(?<!_)_(?!_)\\b', 'g'),
replace: function(match, codeContent, italicContent) {
if (codeContent) {
// If it's inside <code> tags, return unchanged
return match;
} else if (italicContent) {
// If it's an italic group, apply the replacement
return '<em>' + italicContent + '</em>';
}
// If none of the conditions are met, return the original match
return match;
},
}];
} catch (e) {
console.error('Error in Showdown-underscore extension:', e);

View File

@ -1,7 +1,9 @@
import {
Generate,
UNIQUE_APIS,
activateSendButtons,
addOneMessage,
api_server,
callPopup,
characters,
chat,
@ -49,8 +51,8 @@ import { findGroupMemberId, groups, is_group_generating, openGroupById, resetSel
import { chat_completion_sources, oai_settings, setupChatCompletionPromptManager } from './openai.js';
import { autoSelectPersona, retriggerFirstMessageOnEmptyChat, setPersonaLockState, togglePersonaLock, user_avatar } from './personas.js';
import { addEphemeralStoppingString, chat_styles, flushEphemeralStoppingStrings, power_user } from './power-user.js';
import { textgen_types, textgenerationwebui_settings } from './textgen-settings.js';
import { decodeTextTokens, getFriendlyTokenizerName, getTextTokens, getTokenCountAsync } from './tokenizers.js';
import { SERVER_INPUTS, textgen_types, textgenerationwebui_settings } from './textgen-settings.js';
import { decodeTextTokens, getAvailableTokenizers, getFriendlyTokenizerName, getTextTokens, getTokenCountAsync, selectTokenizer } from './tokenizers.js';
import { debounce, delay, isFalseBoolean, isTrueBoolean, showFontAwesomePicker, stringToRange, trimToEndSentence, trimToStartSentence, waitUntilCondition } from './utils.js';
import { registerVariableCommands, resolveVariable } from './variables.js';
import { background_settings } from './backgrounds.js';
@ -717,6 +719,7 @@ export function initDefaultSlashCommands() {
SlashCommandParser.addCommandObject(SlashCommand.fromProps({
name: 'delswipe',
callback: deleteSwipeCallback,
returns: 'the new, currently selected swipe id',
aliases: ['swipedel'],
unnamedArgumentList: [
SlashCommandArgument.fromProps({
@ -912,13 +915,28 @@ export function initDefaultSlashCommands() {
SlashCommandParser.addCommandObject(SlashCommand.fromProps({
name: 'addswipe',
callback: addSwipeCallback,
returns: 'the new swipe id',
aliases: ['swipeadd'],
namedArgumentList: [
SlashCommandNamedArgument.fromProps({
name: 'switch',
description: 'switch to the new swipe',
typeList: [ARGUMENT_TYPE.BOOLEAN],
enumList: commonEnumProviders.boolean()(),
}),
],
unnamedArgumentList: [
new SlashCommandArgument(
'text', [ARGUMENT_TYPE.STRING], true,
),
],
helpString: 'Adds a swipe to the last chat message.',
helpString: `
<div>
Adds a swipe to the last chat message.
</div>
<div>
Use switch=true to switch to directly switch to the new swipe.
</div>`,
}));
SlashCommandParser.addCommandObject(SlashCommand.fromProps({
name: 'stop',
@ -1480,8 +1498,9 @@ export function initDefaultSlashCommands() {
],
helpString: 'Sets the specified prompt manager entry/entries on or off.',
}));
SlashCommandParser.addCommandObject(SlashCommand.fromProps({ name: 'pick-icon',
callback: async()=>((await showFontAwesomePicker()) ?? false).toString(),
SlashCommandParser.addCommandObject(SlashCommand.fromProps({
name: 'pick-icon',
callback: async () => ((await showFontAwesomePicker()) ?? false).toString(),
returns: 'The chosen icon name or false if cancelled.',
helpString: `
<div>Opens a popup with all the available Font Awesome icons and returns the selected icon's name.</div>
@ -1495,6 +1514,72 @@ export function initDefaultSlashCommands() {
</div>
`,
}));
SlashCommandParser.addCommandObject(SlashCommand.fromProps({
name: 'api-url',
callback: setApiUrlCallback,
returns: 'the current API url',
aliases: ['server'],
namedArgumentList: [
SlashCommandNamedArgument.fromProps({
name: 'api',
description: 'API to set/get the URL for - if not provided, current API is used',
typeList: [ARGUMENT_TYPE.STRING],
enumList: [
new SlashCommandEnumValue('custom', 'custom OpenAI-compatible', enumTypes.getBasedOnIndex(UNIQUE_APIS.findIndex(x => x === 'openai')), 'O'),
new SlashCommandEnumValue('kobold', 'KoboldAI Classic', enumTypes.getBasedOnIndex(UNIQUE_APIS.findIndex(x => x === 'kobold')), 'K'),
...Object.values(textgen_types).map(api => new SlashCommandEnumValue(api, null, enumTypes.getBasedOnIndex(UNIQUE_APIS.findIndex(x => x === 'textgenerationwebui')), 'T')),
],
}),
SlashCommandNamedArgument.fromProps({
name: 'connect',
description: 'Whether to auto-connect to the API after setting the URL',
typeList: [ARGUMENT_TYPE.BOOLEAN],
defaultValue: 'true',
enumList: commonEnumProviders.boolean('trueFalse')(),
}),
],
unnamedArgumentList: [
SlashCommandArgument.fromProps({
description: 'API url to connect to',
typeList: [ARGUMENT_TYPE.STRING],
}),
],
helpString: `
<div>
Set the API url / server url for the currently selected API, including the port. If no argument is provided, it will return the current API url.
</div>
<div>
If a manual API is provided to <b>set</b> the URL, make sure to set <code>connect=false</code>, as auto-connect only works for the currently selected API,
or consider switching to it with <code>/api</code> first.
</div>
<div>
This slash command works for most of the Text Completion sources, KoboldAI Classic, and also Custom OpenAI compatible for the Chat Completion sources. If unsure which APIs are supported,
check the auto-completion of the optional <code>api</code> argument of this command.
</div>
`,
}));
SlashCommandParser.addCommandObject(SlashCommand.fromProps({
name: 'tokenizer',
callback: selectTokenizerCallback,
returns: 'current tokenizer',
unnamedArgumentList: [
SlashCommandArgument.fromProps({
description: 'tokenizer name',
typeList: [ARGUMENT_TYPE.STRING],
enumList: getAvailableTokenizers().map(tokenizer =>
new SlashCommandEnumValue(tokenizer.tokenizerKey, tokenizer.tokenizerName, enumTypes.enum, enumIcons.default)),
}),
],
helpString: `
<div>
Selects tokenizer by name. Gets the current tokenizer if no name is provided.
</div>
<div>
<strong>Available tokenizers:</strong>
<pre><code>${getAvailableTokenizers().map(t => t.tokenizerKey).join(', ')}</code></pre>
</div>
`,
}));
registerVariableCommands();
}
@ -1772,7 +1857,7 @@ async function popupCallback(args, value) {
return String(value);
}
function getMessagesCallback(args, value) {
async function getMessagesCallback(args, value) {
const includeNames = !isFalseBoolean(args?.names);
const includeHidden = isTrueBoolean(args?.hidden);
const role = args?.role;
@ -1805,33 +1890,34 @@ function getMessagesCallback(args, value) {
throw new Error(`Invalid role provided. Expected one of: system, assistant, user. Got: ${role}`);
};
const messages = [];
for (let messageId = range.start; messageId <= range.end; messageId++) {
const message = chat[messageId];
if (!message) {
console.warn(`WARN: No message found with ID ${messageId}`);
continue;
const processMessage = async (mesId) => {
const msg = chat[mesId];
if (!msg) {
console.warn(`WARN: No message found with ID ${mesId}`);
return null;
}
if (role && !filterByRole(message)) {
console.debug(`/messages: Skipping message with ID ${messageId} due to role filter`);
continue;
if (role && !filterByRole(msg)) {
console.debug(`/messages: Skipping message with ID ${mesId} due to role filter`);
return null;
}
if (!includeHidden && message.is_system) {
console.debug(`/messages: Skipping hidden message with ID ${messageId}`);
continue;
if (!includeHidden && msg.is_system) {
console.debug(`/messages: Skipping hidden message with ID ${mesId}`);
return null;
}
if (includeNames) {
messages.push(`${message.name}: ${message.mes}`);
} else {
messages.push(message.mes);
}
}
return includeNames ? `${msg.name}: ${msg.mes}` : msg.mes;
};
return messages.join('\n\n');
const messagePromises = [];
for (let rInd = range.start; rInd <= range.end; ++rInd)
messagePromises.push(processMessage(rInd));
const messages = await Promise.all(messagePromises);
return messages.filter(m => m !== null).join('\n\n');
}
async function runCallback(args, name) {
@ -2061,12 +2147,13 @@ async function generateRawCallback(args, value) {
}
}
/**
* Callback for the /gen command
* @param {object} args Named arguments
* @param {string} value Unnamed argument
* @returns {Promise<string>} The generated text
*/
async function generateCallback(args, value) {
if (!value) {
console.warn('WARN: No argument provided for /gen command');
return;
}
// Prevent generate recursion
$('#send_textarea').val('')[0].dispatchEvent(new Event('input', { bubbles: true }));
const lock = isTrueBoolean(args?.lock);
@ -2154,8 +2241,11 @@ async function echoCallback(args, value) {
}
}
async function addSwipeCallback(_, arg) {
/**
* @param {{switch?: string}} args - named arguments
* @param {string} value - The swipe text to add (unnamed argument)
*/
async function addSwipeCallback(args, value) {
const lastMessage = chat[chat.length - 1];
if (!lastMessage) {
@ -2163,7 +2253,7 @@ async function addSwipeCallback(_, arg) {
return '';
}
if (!arg) {
if (!value) {
console.warn('WARN: No argument provided for /addswipe command');
return '';
}
@ -2192,23 +2282,30 @@ async function addSwipeCallback(_, arg) {
lastMessage.swipe_info = lastMessage.swipes.map(() => ({}));
}
lastMessage.swipes.push(arg);
lastMessage.swipes.push(value);
lastMessage.swipe_info.push({
send_date: getMessageTimeStamp(),
gen_started: null,
gen_finished: null,
extra: {
bias: extractMessageBias(arg),
bias: extractMessageBias(value),
gen_id: Date.now(),
api: 'manual',
model: 'slash command',
},
});
const newSwipeId = lastMessage.swipes.length - 1;
if (isTrueBoolean(args.switch)) {
lastMessage.swipe_id = newSwipeId;
lastMessage.mes = lastMessage.swipes[newSwipeId];
}
await saveChatConditional();
await reloadCurrentChat();
return '';
return String(newSwipeId);
}
async function deleteSwipeCallback(_, arg) {
@ -2244,7 +2341,7 @@ async function deleteSwipeCallback(_, arg) {
await saveChatConditional();
await reloadCurrentChat();
return '';
return String(newSwipeId);
}
async function askCharacter(args, text) {
@ -3223,6 +3320,7 @@ function getModelOptions() {
{ id: 'model_perplexity_select', api: 'openai', type: chat_completion_sources.PERPLEXITY },
{ id: 'model_groq_select', api: 'openai', type: chat_completion_sources.GROQ },
{ id: 'model_01ai_select', api: 'openai', type: chat_completion_sources.ZEROONEAI },
{ id: 'model_blockentropy_select', api: 'openai', type: chat_completion_sources.BLOCKENTROPY },
{ id: 'model_novel_select', api: 'novel', type: null },
{ id: 'horde_model', api: 'koboldhorde', type: null },
];
@ -3391,6 +3489,123 @@ function setPromptEntryCallback(args, targetState) {
return '';
}
/**
* Sets the API URL and triggers the text generation web UI button click.
*
* @param {object} args - named args
* @param {string?} [args.api=null] - the API name to set/get the URL for
* @param {string?} [args.connect=true] - whether to connect to the API after setting
* @param {string} url - the API URL to set
* @returns {Promise<string>}
*/
async function setApiUrlCallback({ api = null, connect = 'true' }, url) {
const autoConnect = isTrueBoolean(connect);
// Special handling for Chat Completion Custom OpenAI compatible, that one can also support API url handling
const isCurrentlyCustomOpenai = main_api === 'openai' && oai_settings.chat_completion_source === chat_completion_sources.CUSTOM;
if (api === chat_completion_sources.CUSTOM || (!api && isCurrentlyCustomOpenai)) {
if (!url) {
return oai_settings.custom_url ?? '';
}
if (!isCurrentlyCustomOpenai && autoConnect) {
toastr.warning('Custom OpenAI API is not the currently selected API, so we cannot do an auto-connect. Consider switching to it via /api beforehand.');
return '';
}
$('#custom_api_url_text').val(url).trigger('input');
if (autoConnect) {
$('#api_button_openai').trigger('click');
}
return url;
}
// Special handling for Kobold Classic API
const isCurrentlyKoboldClassic = main_api === 'kobold';
if (api === 'kobold' || (!api && isCurrentlyKoboldClassic)) {
if (!url) {
return api_server ?? '';
}
if (!isCurrentlyKoboldClassic && autoConnect) {
toastr.warning('Kobold Classic API is not the currently selected API, so we cannot do an auto-connect. Consider switching to it via /api beforehand.');
return '';
}
$('#api_url_text').val(url).trigger('input');
// trigger blur debounced, so we hide the autocomplete menu
setTimeout(() => $('#api_url_text').trigger('blur'), 1);
if (autoConnect) {
$('#api_button').trigger('click');
}
return api_server ?? '';
}
// Do some checks and get the api type we are targeting with this command
if (api && !Object.values(textgen_types).includes(api)) {
toastr.warning(`API '${api}' is not a valid text_gen API.`);
return '';
}
if (!api && !Object.values(textgen_types).includes(textgenerationwebui_settings.type)) {
toastr.warning(`API '${textgenerationwebui_settings.type}' is not a valid text_gen API.`);
return '';
}
if (api && url && autoConnect && api !== textgenerationwebui_settings.type) {
toastr.warning(`API '${api}' is not the currently selected API, so we cannot do an auto-connect. Consider switching to it via /api beforehand.`);
return '';
}
const type = api || textgenerationwebui_settings.type;
const inputSelector = SERVER_INPUTS[type];
if (!inputSelector) {
toastr.warning(`API '${type}' does not have a server url input.`);
return '';
}
// If no url was provided, return the current one
if (!url) {
return textgenerationwebui_settings.server_urls[type] ?? '';
}
// else, we want to actually set the url
$(inputSelector).val(url).trigger('input');
// trigger blur debounced, so we hide the autocomplete menu
setTimeout(() => $(inputSelector).trigger('blur'), 1);
// Trigger the auto connect via connect button, if requested
if (autoConnect) {
$('#api_button_textgenerationwebui').trigger('click');
}
// We still re-acquire the value, as it might have been modified by the validation on connect
return textgenerationwebui_settings.server_urls[type] ?? '';
}
async function selectTokenizerCallback(_, name) {
if (!name) {
return getAvailableTokenizers().find(tokenizer => tokenizer.tokenizerId === power_user.tokenizer)?.tokenizerKey ?? '';
}
const tokenizers = getAvailableTokenizers();
const fuse = new Fuse(tokenizers, { keys: ['tokenizerKey', 'tokenizerName'] });
const result = fuse.search(name);
if (result.length === 0) {
toastr.warning(`Tokenizer "${name}" not found`);
return '';
}
/** @type {import('./tokenizers.js').Tokenizer} */
const foundTokenizer = result[0].item;
selectTokenizer(foundTokenizer.tokenizerId);
return foundTokenizer.tokenizerKey;
}
export let isExecutingCommandsFromChatInput = false;
export let commandsFromChatInputAbortController;

View File

@ -445,7 +445,11 @@ export function getTagKeyForEntity(entityOrKey) {
}
// Next lets check if its a valid character or character id, so we can swith it to its tag
const character = characters.indexOf(x) >= 0 ? x : characters[x];
let character;
if (!character && characters.indexOf(x) >= 0) character = x; // Check for char object
if (!character && !isNaN(parseInt(entityOrKey))) character = characters[x]; // check if its a char id
if (!character) character = characters.find(y => y.avatar === x); // check if its a char key
if (character) {
x = character.avatar;
}
@ -708,12 +712,12 @@ const ANTI_TROLL_MAX_TAGS = 15;
*
* @param {Character} character - The character
* @param {object} [options] - Options
* @param {boolean} [options.forceShow=false] - Whether to force showing the import dialog
* @param {tag_import_setting} [options.importSetting=null] - Force a tag import setting
* @returns {Promise<boolean>} Boolean indicating whether any tag was imported
*/
async function importTags(character, { forceShow = false } = {}) {
async function importTags(character, { importSetting = null } = {}) {
// Gather the tags to import based on the selected setting
const tagNamesToImport = await handleTagImport(character, { forceShow });
const tagNamesToImport = await handleTagImport(character, { importSetting });
if (!tagNamesToImport?.length) {
console.debug('No tags to import');
return;
@ -722,7 +726,11 @@ async function importTags(character, { forceShow = false } = {}) {
const tagsToImport = tagNamesToImport.map(tag => getTag(tag, { createNew: true }));
const added = addTagsToEntity(tagsToImport, character.avatar);
toastr.success(`Imported tags:<br />${tagsToImport.map(x => x.name).join(', ')}`, 'Importing Tags', { escapeHtml: false });
if (added) {
toastr.success(`Imported tags:<br />${tagsToImport.map(x => x.name).join(', ')}`, 'Importing Tags', { escapeHtml: false });
} else {
toastr.error(`Couldn't import tags:<br />${tagsToImport.map(x => x.name).join(', ')}`, 'Importing Tags', { escapeHtml: false });
}
return added;
}
@ -732,10 +740,10 @@ async function importTags(character, { forceShow = false } = {}) {
*
* @param {Character} character - The character
* @param {object} [options] - Options
* @param {boolean} [options.forceShow=false] - Whether to force showing the import dialog
* @param {tag_import_setting} [options.importSetting=null] - Force a tag import setting
* @returns {Promise<string[]>} Array of strings representing the tags to import
*/
async function handleTagImport(character, { forceShow = false } = {}) {
async function handleTagImport(character, { importSetting = null } = {}) {
/** @type {string[]} */
const importTags = character.tags.map(t => t.trim()).filter(t => t)
.filter(t => !IMPORT_EXLCUDED_TAGS.includes(t))
@ -745,9 +753,9 @@ async function handleTagImport(character, { forceShow = false } = {}) {
.map(newTag);
const folderTags = getOpenBogusFolders();
// Choose the setting for this dialog. If from settings, verify the setting really exists, otherwise take "ASK".
const setting = forceShow ? tag_import_setting.ASK
: Object.values(tag_import_setting).find(setting => setting === power_user.tag_import_setting) ?? tag_import_setting.ASK;
// Choose the setting for this dialog. First check override, then saved setting or finally use "ASK".
const setting = importSetting ? importSetting :
Object.values(tag_import_setting).find(setting => setting === power_user.tag_import_setting) ?? tag_import_setting.ASK;
switch (setting) {
case tag_import_setting.ALL:

View File

@ -0,0 +1,7 @@
<h3>Enter the Git URL of the extension to install</h3>
<br>
<p><b>Disclaimer:</b> Please be aware that using external extensions can have unintended side effects and may pose
security risks. Always make sure you trust the source before importing an extension. We are not responsible for any
damage caused by third-party extensions.</p>
<br>
<p>Example: <tt> https://github.com/author/extension-name </tt></p>

View File

@ -0,0 +1,8 @@
<div id="WIEntryHeaderTitlesPC" class="flex-container wide100p spaceBetween justifyCenter textAlignCenter" style="padding:0 4.5em;">
<small class="flex1" data-i18n="Title/Memo">Title/Memo</small>
<small style="width: calc(3.5em + 15px)" data-i18n="Strategy">Strategy</small>
<small style="width: calc(3.5em + 30px)" data-i18n="Position">Position</small>
<small style="width: calc(3.5em + 20px)" data-i18n="Depth">Depth</small>
<small style="width: calc(3.5em + 20px)" data-i18n="Order">Order</small>
<small style="width: calc(3.5em + 15px)" data-i18n="Trigger %">Trigger %</small>
</div>

View File

@ -599,6 +599,10 @@ export function getCurrentOpenRouterModelTokenizer() {
return tokenizers.YI;
case 'Mistral':
return tokenizers.MISTRAL;
case 'Gemini':
return tokenizers.GEMMA;
case 'Claude':
return tokenizers.CLAUDE;
default:
return tokenizers.OPENAI;
}
@ -618,7 +622,7 @@ export function getCurrentDreamGenModelTokenizer() {
}
}
jQuery(function () {
export function initTextGenModels() {
$('#mancer_model').on('change', onMancerModelSelect);
$('#model_togetherai_select').on('change', onTogetherModelSelect);
$('#model_infermaticai_select').on('change', onInfermaticAIModelSelect);
@ -708,6 +712,7 @@ jQuery(function () {
searchInputPlaceholder: 'Search providers...',
searchInputCssClass: 'text_pole',
width: '100%',
closeOnSelect: false,
});
providersSelect.on('select2:select', function (/** @type {any} */ evt) {
const element = evt.params.data.element;
@ -718,4 +723,4 @@ jQuery(function () {
$(this).trigger('change');
});
}
});
}

View File

@ -94,7 +94,7 @@ let DREAMGEN_SERVER = 'https://dreamgen.com';
let OPENROUTER_SERVER = 'https://openrouter.ai/api';
let FEATHERLESS_SERVER = 'https://api.featherless.ai/v1';
const SERVER_INPUTS = {
export const SERVER_INPUTS = {
[textgen_types.OOBA]: '#textgenerationwebui_api_url_text',
[textgen_types.VLLM]: '#vllm_api_url_text',
[textgen_types.APHRODITE]: '#aphrodite_api_url_text',
@ -1064,6 +1064,34 @@ function getLogprobsNumber() {
return 10;
}
/**
* Replaces {{macro}} in a comma-separated or serialized JSON array string.
* @param {string} str Input string
* @returns {string} Output string
*/
function replaceMacrosInList(str) {
if (!str || typeof str !== 'string') {
return str;
}
try {
const array = JSON.parse(str);
if (!Array.isArray(array)) {
throw new Error('Not an array');
}
for (let i = 0; i < array.length; i++) {
array[i] = substituteParams(array[i]);
}
return JSON.stringify(array);
} catch {
const array = str.split(',');
for (let i = 0; i < array.length; i++) {
array[i] = substituteParams(array[i]);
}
return array.join(',');
}
}
export function getTextGenGenerationData(finalPrompt, maxTokens, isImpersonate, isContinue, cfgValues, type) {
const canMultiSwipe = !isContinue && !isImpersonate && type !== 'quiet';
const dynatemp = isDynamicTemperatureSupported();
@ -1103,7 +1131,7 @@ export function getTextGenGenerationData(finalPrompt, maxTokens, isImpersonate,
'dry_allowed_length': settings.dry_allowed_length,
'dry_multiplier': settings.dry_multiplier,
'dry_base': settings.dry_base,
'dry_sequence_breakers': settings.dry_sequence_breakers,
'dry_sequence_breakers': replaceMacrosInList(settings.dry_sequence_breakers),
'dry_penalty_last_n': settings.dry_penalty_last_n,
'max_tokens_second': settings.max_tokens_second,
'sampler_priority': settings.type === OOBA ? settings.sampler_priority : undefined,

View File

@ -26,6 +26,7 @@ export const tokenizers = {
API_KOBOLD: 10,
CLAUDE: 11,
LLAMA3: 12,
GEMMA: 13,
BEST_MATCH: 99,
};
@ -34,6 +35,7 @@ export const SENTENCEPIECE_TOKENIZERS = [
tokenizers.MISTRAL,
tokenizers.YI,
tokenizers.LLAMA3,
tokenizers.GEMMA,
// uncomment when NovelAI releases Kayra and Clio weights, lol
//tokenizers.NERD,
//tokenizers.NERD2,
@ -91,6 +93,11 @@ const TOKENIZER_URLS = {
decode: '/api/tokenizers/llama3/decode',
count: '/api/tokenizers/llama3/encode',
},
[tokenizers.GEMMA]: {
encode: '/api/tokenizers/gemma/encode',
decode: '/api/tokenizers/gemma/decode',
count: '/api/tokenizers/gemma/encode',
},
[tokenizers.API_TEXTGENERATIONWEBUI]: {
encode: '/api/tokenizers/remote/textgenerationwebui/encode',
count: '/api/tokenizers/remote/textgenerationwebui/encode',
@ -140,10 +147,46 @@ async function resetTokenCache() {
}
}
/**
* @typedef {object} Tokenizer
* @property {number} tokenizerId - The id of the tokenizer option
* @property {string} tokenizerKey - Internal name/key of the tokenizer
* @property {string} tokenizerName - Human-readable detailed name of the tokenizer (as displayed in the UI)
*/
/**
* Gets all tokenizers available to the user.
* @returns {Tokenizer[]} Tokenizer info.
*/
export function getAvailableTokenizers() {
const tokenizerOptions = $('#tokenizer').find('option').toArray();
return tokenizerOptions.map(tokenizerOption => ({
tokenizerId: Number(tokenizerOption.value),
tokenizerKey: Object.entries(tokenizers).find(([_, value]) => value === Number(tokenizerOption.value))[0].toLocaleLowerCase(),
tokenizerName: tokenizerOption.text,
}))
}
/**
* Selects tokenizer if not already selected.
* @param {number} tokenizerId Tokenizer ID.
*/
export function selectTokenizer(tokenizerId) {
if (tokenizerId !== power_user.tokenizer) {
const tokenizer = getAvailableTokenizers().find(tokenizer => tokenizer.tokenizerId === tokenizerId);
if (!tokenizer) {
console.warn('Failed to find tokenizer with id', tokenizerId);
return;
}
$('#tokenizer').val(tokenizer.tokenizerId).trigger('change');
toastr.info(`Tokenizer: "${tokenizer.tokenizerName}" selected`);
}
}
/**
* Gets the friendly name of the current tokenizer.
* @param {string} forApi API to get the tokenizer for. Defaults to the main API.
* @returns { { tokenizerName: string, tokenizerId: number } } Tokenizer info
* @returns {Tokenizer} Tokenizer info
*/
export function getFriendlyTokenizerName(forApi) {
if (!forApi) {
@ -178,7 +221,9 @@ export function getFriendlyTokenizerName(forApi) {
? tokenizers.OPENAI
: tokenizerId;
return { tokenizerName, tokenizerId };
const tokenizerKey = Object.entries(tokenizers).find(([_, value]) => value === tokenizerId)[0].toLocaleLowerCase();
return { tokenizerName, tokenizerKey, tokenizerId };
}
/**
@ -232,6 +277,9 @@ export function getTokenizerBestMatch(forApi) {
if (model.includes('mistral') || model.includes('mixtral')) {
return tokenizers.MISTRAL;
}
if (model.includes('gemma')) {
return tokenizers.GEMMA;
}
}
return tokenizers.LLAMA;
@ -441,12 +489,14 @@ export function getTokenizerModel() {
const turbo0301Tokenizer = 'gpt-3.5-turbo-0301';
const turboTokenizer = 'gpt-3.5-turbo';
const gpt4Tokenizer = 'gpt-4';
const gpt4oTokenizer = 'gpt-4o';
const gpt2Tokenizer = 'gpt2';
const claudeTokenizer = 'claude';
const llamaTokenizer = 'llama';
const llama3Tokenizer = 'llama3';
const mistralTokenizer = 'mistral';
const yiTokenizer = 'yi';
const gemmaTokenizer = 'gemma';
// Assuming no one would use it for different models.. right?
if (oai_settings.chat_completion_source == chat_completion_sources.SCALE) {
@ -491,6 +541,12 @@ export function getTokenizerModel() {
else if (model?.architecture?.tokenizer === 'Yi') {
return yiTokenizer;
}
else if (model?.architecture?.tokenizer === 'Gemini') {
return gemmaTokenizer;
}
else if (oai_settings.openrouter_model.includes('gpt-4o')) {
return gpt4oTokenizer;
}
else if (oai_settings.openrouter_model.includes('gpt-4')) {
return gpt4Tokenizer;
}
@ -509,7 +565,7 @@ export function getTokenizerModel() {
}
if (oai_settings.chat_completion_source == chat_completion_sources.MAKERSUITE) {
return oai_settings.google_model;
return gemmaTokenizer;
}
if (oai_settings.chat_completion_source == chat_completion_sources.CLAUDE) {
@ -543,12 +599,24 @@ export function getTokenizerModel() {
if (oai_settings.groq_model.includes('mistral') || oai_settings.groq_model.includes('mixtral')) {
return mistralTokenizer;
}
if (oai_settings.groq_model.includes('gemma')) {
return gemmaTokenizer;
}
}
if (oai_settings.chat_completion_source === chat_completion_sources.ZEROONEAI) {
return yiTokenizer;
}
if (oai_settings.chat_completion_source === chat_completion_sources.BLOCKENTROPY) {
if (oai_settings.blockentropy_model.includes('llama3')) {
return llama3Tokenizer;
}
if (oai_settings.blockentropy_model.includes('miqu') || oai_settings.blockentropy_model.includes('mixtral')) {
return mistralTokenizer;
}
}
// Default to Turbo 3.5
return turboTokenizer;
}
@ -770,6 +838,7 @@ function getTextgenAPITokenizationParams(str) {
url: getTextGenServer(),
legacy_api: textgen_settings.legacy_api && (textgen_settings.type === OOBA || textgen_settings.type === APHRODITE),
vllm_model: textgen_settings.vllm_model,
aphrodite_model: textgen_settings.aphrodite_model,
};
}

View File

@ -498,9 +498,8 @@ export function restoreCaretPosition(element, position) {
}
export async function resetScrollHeight(element) {
let scrollHeight = $(element).prop('scrollHeight');
$(element).css('height', '0px');
$(element).css('height', scrollHeight + 3 + 'px');
$(element).css('height', $(element).prop('scrollHeight') + 3 + 'px');
}
/**
@ -1729,20 +1728,24 @@ export function select2ModifyOptions(element, items, { select = false, changeEve
/** @type {Select2Option[]} */
const dataItems = items.map(x => typeof x === 'string' ? { id: getSelect2OptionId(x), text: x } : x);
const existingValues = [];
const optionsToSelect = [];
const newOptions = [];
dataItems.forEach(item => {
// Set the value, creating a new option if necessary
if (element.find('option[value=\'' + item.id + '\']').length) {
if (select) existingValues.push(item.id);
if (select) optionsToSelect.push(item.id);
} else {
// Create a DOM Option and optionally pre-select by default
var newOption = new Option(item.text, item.id, select, select);
// Append it to the select
element.append(newOption);
if (select) element.trigger('change', changeEventArgs);
newOptions.push(newOption);
if (select) optionsToSelect.push(item.id);
}
if (existingValues.length) element.val(existingValues).trigger('change', changeEventArgs);
});
element.append(newOptions);
if (optionsToSelect.length) element.val(optionsToSelect).trigger('change', changeEventArgs);
}
/**
@ -1931,6 +1934,34 @@ export function getFreeName(name, list, numberFormatter = (n) => ` #${n}`) {
return `${name}${numberFormatter(counter)}`;
}
/**
* Toggles the visibility of a drawer by changing the display style of its content.
* This function skips the usual drawer animation.
*
* @param {HTMLElement} drawer - The drawer element to toggle
* @param {boolean} [expand=true] - Whether to expand or collapse the drawer
*/
export function toggleDrawer(drawer, expand = true) {
/** @type {HTMLElement} */
const icon = drawer.querySelector('.inline-drawer-icon');
/** @type {HTMLElement} */
const content = drawer.querySelector('.inline-drawer-content');
if (expand) {
icon.classList.remove('up', 'fa-circle-chevron-up');
icon.classList.add('down', 'fa-circle-chevron-down');
content.style.display = 'block';
} else {
icon.classList.remove('down', 'fa-circle-chevron-down');
icon.classList.add('up', 'fa-circle-chevron-up');
content.style.display = 'none';
}
// Set the height of "autoSetHeight" textareas within the inline-drawer to their scroll height
content.querySelectorAll('textarea.autoSetHeight').forEach(resetScrollHeight);
}
export async function fetchFaFile(name) {
const style = document.createElement('style');
style.innerHTML = await (await fetch(`/css/${name}`)).text();

View File

@ -17,6 +17,7 @@ import { commonEnumProviders, enumIcons } from './slash-commands/SlashCommandCom
import { SlashCommandClosure } from './slash-commands/SlashCommandClosure.js';
import { callGenericPopup, Popup, POPUP_TYPE } from './popup.js';
import { StructuredCloneMap } from './util/StructuredCloneMap.js';
import { renderTemplateAsync } from './templates.js';
export const world_info_insertion_strategy = {
evenly: 0,
@ -72,6 +73,7 @@ export let world_info_match_whole_words = false;
export let world_info_use_group_scoring = false;
export let world_info_character_strategy = world_info_insertion_strategy.character_first;
export let world_info_budget_cap = 0;
export let world_info_max_recursion_steps = 0;
const saveWorldDebounced = debounce(async (name, data) => await _save(name, data), debounce_timeout.relaxed);
const saveSettingsDebounced = debounce(() => {
Object.assign(world_info, { globalSelect: selected_world_info });
@ -709,6 +711,7 @@ export function getWorldInfoSettings() {
world_info_character_strategy,
world_info_budget_cap,
world_info_use_group_scoring,
world_info_max_recursion_steps,
};
}
@ -795,6 +798,8 @@ export function setWorldInfoSettings(settings, data) {
world_info_budget_cap = Number(settings.world_info_budget_cap);
if (settings.world_info_use_group_scoring !== undefined)
world_info_use_group_scoring = Boolean(settings.world_info_use_group_scoring);
if (settings.world_info_max_recursion_steps !== undefined)
world_info_max_recursion_steps = Number(settings.world_info_max_recursion_steps);
// Migrate old settings
if (world_info_budget > 100) {
@ -843,6 +848,9 @@ export function setWorldInfoSettings(settings, data) {
$('#world_info_budget_cap').val(world_info_budget_cap);
$('#world_info_budget_cap_counter').val(world_info_budget_cap);
$('#world_info_max_recursion_steps').val(world_info_max_recursion_steps);
$('#world_info_max_recursion_steps_counter').val(world_info_max_recursion_steps);
world_names = data.world_names?.length ? data.world_names : [];
// Add to existing selected WI if it exists
@ -1854,28 +1862,9 @@ function displayWorldEntries(name, data, navigation = navigation_option.none, fl
worldEntriesList.find('*').off();
worldEntriesList.empty();
const keywordHeaders = `
<div id="WIEntryHeaderTitlesPC" class="flex-container wide100p spaceBetween justifyCenter textAlignCenter" style="padding:0 4.5em;">
<small class="flex1">
Title/Memo
</small>
<small style="width: calc(3.5em + 15px)">
Status
</small>
<small style="width: calc(3.5em + 30px)">
Position
</small>
<small style="width: calc(3.5em + 20px)">
Depth
</small>
<small style="width: calc(3.5em + 20px)">
Order
</small>
<small style="width: calc(3.5em + 15px)">
Trigger %
</small>
</div>`;
const blocks = page.map(entry => getWorldEntry(name, data, entry)).filter(x => x);
const keywordHeaders = await renderTemplateAsync('worldInfoKeywordHeaders');
const blocksPromises = page.map(async (entry) => await getWorldEntry(name, data, entry)).filter(x => x);
const blocks = await Promise.all(blocksPromises);
const isCustomOrder = $('#world_info_sort_order').find(':selected').data('rule') === 'custom';
if (!isCustomOrder) {
blocks.forEach(block => {
@ -2275,7 +2264,7 @@ export function parseRegexFromString(input) {
}
}
function getWorldEntry(name, data, entry) {
async function getWorldEntry(name, data, entry) {
if (!data.entries[entry.uid]) {
return;
}
@ -2317,6 +2306,9 @@ function getWorldEntry(name, data, entry) {
}
if (isFancyInput) {
// First initialize existing values as options, before initializing select2, to speed up performance
select2ModifyOptions(input, entry[entryPropName], { select: true, changeEventArgs: { skipReset: true, noSave: true } });
input.select2({
ajax: dynamicSelect2DataViaAjax(() => worldEntryKeyOptionsCache),
tags: true,
@ -2358,8 +2350,6 @@ function getWorldEntry(name, data, entry) {
input.next('span.select2-container').find('textarea')
.val(key).trigger('input');
}, { openDrawer: true });
select2ModifyOptions(input, entry[entryPropName], { select: true, changeEventArgs: { skipReset: true, noSave: true } });
}
else {
// Compatibility with mobile devices. On mobile we need a text input field, not a select option control, so we need its own event handlers
@ -2476,7 +2466,7 @@ function getWorldEntry(name, data, entry) {
if (!isMobile()) {
$(characterFilter).select2({
width: '100%',
placeholder: 'All characters will pull from this entry.',
placeholder: 'Tie this entry to specific characters or characters with specific tags',
allowClear: true,
closeOnSelect: false,
});
@ -2876,21 +2866,7 @@ function getWorldEntry(name, data, entry) {
//add UID above content box (less important doesn't need to be always visible)
template.find('.world_entry_form_uid_value').text(`(UID: ${entry.uid})`);
// disable
/*
const disableInput = template.find('input[name="disable"]');
disableInput.data("uid", entry.uid);
disableInput.on("input", async function () {
const uid = $(this).data("uid");
const value = $(this).prop("checked");
data.entries[uid].disable = value;
setOriginalDataValue(data, uid, "enabled", !data.entries[uid].disable);
await saveWorldInfo(name, data);
});
disableInput.prop("checked", entry.disable).trigger("input");
*/
//new tri-state selector for constant/normal/disabled
//new tri-state selector for constant/normal/vectorized
const entryStateSelector = template.find('select[name="entryStateSelector"]');
entryStateSelector.data('uid', entry.uid);
entryStateSelector.on('click', function (event) {
@ -2903,49 +2879,43 @@ function getWorldEntry(name, data, entry) {
switch (value) {
case 'constant':
data.entries[uid].constant = true;
data.entries[uid].disable = false;
data.entries[uid].vectorized = false;
setWIOriginalDataValue(data, uid, 'enabled', true);
setWIOriginalDataValue(data, uid, 'constant', true);
setWIOriginalDataValue(data, uid, 'extensions.vectorized', false);
template.removeClass('disabledWIEntry');
break;
case 'normal':
data.entries[uid].constant = false;
data.entries[uid].disable = false;
data.entries[uid].vectorized = false;
setWIOriginalDataValue(data, uid, 'enabled', true);
setWIOriginalDataValue(data, uid, 'constant', false);
setWIOriginalDataValue(data, uid, 'extensions.vectorized', false);
template.removeClass('disabledWIEntry');
break;
case 'vectorized':
data.entries[uid].constant = false;
data.entries[uid].disable = false;
data.entries[uid].vectorized = true;
setWIOriginalDataValue(data, uid, 'enabled', true);
setWIOriginalDataValue(data, uid, 'constant', false);
setWIOriginalDataValue(data, uid, 'extensions.vectorized', true);
template.removeClass('disabledWIEntry');
break;
case 'disabled':
data.entries[uid].constant = false;
data.entries[uid].disable = true;
data.entries[uid].vectorized = false;
setWIOriginalDataValue(data, uid, 'enabled', false);
setWIOriginalDataValue(data, uid, 'constant', false);
setWIOriginalDataValue(data, uid, 'extensions.vectorized', false);
template.addClass('disabledWIEntry');
break;
}
await saveWorldInfo(name, data);
});
const entryKillSwitch = template.find('div[name="entryKillSwitch"]');
entryKillSwitch.data('uid', entry.uid);
entryKillSwitch.on('click', async function (event) {
const uid = entry.uid;
data.entries[uid].disable = !data.entries[uid].disable;
const isActive = !data.entries[uid].disable;
setWIOriginalDataValue(data, uid, 'enabled', isActive);
template.toggleClass('disabledWIEntry', !isActive);
entryKillSwitch.toggleClass('fa-toggle-off', !isActive);
entryKillSwitch.toggleClass('fa-toggle-on', isActive);
await saveWorldInfo(name, data);
});
const entryState = function () {
if (entry.disable === true) {
return 'disabled';
} else if (entry.constant === true) {
if (entry.constant === true) {
return 'constant';
} else if (entry.vectorized === true) {
return 'vectorized';
@ -2953,6 +2923,12 @@ function getWorldEntry(name, data, entry) {
return 'normal';
}
};
const isActive = !entry.disable;
template.toggleClass('disabledWIEntry', !isActive);
entryKillSwitch.toggleClass('fa-toggle-off', !isActive);
entryKillSwitch.toggleClass('fa-toggle-on', isActive);
template
.find(`select[name="entryStateSelector"] option[value=${entryState()}]`)
.prop('selected', true)
@ -3754,6 +3730,12 @@ export async function checkWorldInfo(chat, maxContext, isDryRun) {
console.debug(`[WI] --- SEARCHING ENTRIES (on ${sortedEntries.length} entries) ---`);
while (scanState) {
//if world_info_max_recursion_steps is non-zero min activations are disabled, and vice versa
if (world_info_max_recursion_steps && world_info_max_recursion_steps <= count) {
console.debug('[WI] Search stopped by reaching max recursion steps', world_info_max_recursion_steps);
break;
}
// Track how many times the loop has run. May be useful for debugging.
count++;
@ -4793,8 +4775,13 @@ jQuery(() => {
$('#world_info_min_activations').on('input', function () {
world_info_min_activations = Number($(this).val());
$('#world_info_min_activations_counter').val($(this).val());
saveSettings();
$('#world_info_min_activations_counter').val(world_info_min_activations);
if (world_info_min_activations !== 0) {
$('#world_info_max_recursion_steps').val(0).trigger('input');
} else {
saveSettings();
}
});
$('#world_info_min_activations_depth_max').on('input', function () {
@ -4850,6 +4837,16 @@ jQuery(() => {
saveSettings();
});
$('#world_info_max_recursion_steps').on('input', function () {
world_info_max_recursion_steps = Number($(this).val());
$('#world_info_max_recursion_steps_counter').val(world_info_max_recursion_steps);
if (world_info_max_recursion_steps !== 0) {
$('#world_info_min_activations').val(0).trigger('input');
} else {
saveSettings();
}
});
$('#world_button').on('click', async function (event) {
const chid = $('#set_character_world').data('chid');

View File

@ -3515,6 +3515,8 @@ grammarly-extension {
.drag-handle {
cursor: grab;
/* Make the drag handle not selectable in most browsers */
user-select: none;
}
#form_rename_chat {
@ -4577,6 +4579,7 @@ a {
image-rendering: -webkit-optimize-contrast;
}
.mes_img_swipes,
.mes_img_controls {
position: absolute;
top: 0.1em;
@ -4586,9 +4589,16 @@ a {
opacity: 0;
flex-direction: row;
justify-content: space-between;
align-items: center;
padding: 1em;
}
.mes_img_swipes {
top: unset;
bottom: 0.1rem;
}
.mes_img_swipes .right_menu_button,
.mes_img_controls .right_menu_button {
filter: brightness(90%);
text-shadow: 1px 1px var(--SmartThemeShadowColor) !important;
@ -4597,16 +4607,20 @@ a {
width: 1.25em;
}
.mes_img_swipes .right_menu_button::before,
.mes_img_controls .right_menu_button::before {
/* Fix weird alignment with this font-awesome icons on focus */
position: relative;
top: 0.6125em;
}
.mes_img_swipes .right_menu_button:hover,
.mes_img_controls .right_menu_button:hover {
filter: brightness(150%);
}
.mes_img_container:hover .mes_img_swipes,
.mes_img_container:focus-within .mes_img_swipes,
.mes_img_container:hover .mes_img_controls,
.mes_img_container:focus-within .mes_img_controls {
opacity: 1;
@ -4620,6 +4634,17 @@ body:not(.caption) .mes_img_caption {
display: none;
}
.mes_img_container:not(.img_swipes) .mes_img_swipes,
body:not(.sd) .mes_img_swipes {
display: none;
}
.mes_img_swipe_counter {
font-weight: 600;
filter: drop-shadow(2px 4px 6px black);
cursor: default;
}
.img_enlarged_holder {
/* Scaling via flex-grow and object-fit only works if we have some kind of base-height set */
min-height: 120px;

245
server.js
View File

@ -43,6 +43,8 @@ const {
getConfigValue,
color,
forwardFetchResponse,
removeColorFormatting,
getSeparator,
} = require('./src/util');
const { ensureThumbnailCache } = require('./src/endpoints/thumbnails');
@ -54,9 +56,6 @@ if (process.versions && process.versions.node && process.versions.node.match(/20
if (net.setDefaultAutoSelectFamily) net.setDefaultAutoSelectFamily(false);
}
// Set default DNS resolution order to IPv4 first
dns.setDefaultResultOrder('ipv4first');
const DEFAULT_PORT = 8000;
const DEFAULT_AUTORUN = false;
const DEFAULT_LISTEN = false;
@ -66,16 +65,46 @@ const DEFAULT_ACCOUNTS = false;
const DEFAULT_CSRF_DISABLED = false;
const DEFAULT_BASIC_AUTH = false;
const DEFAULT_ENABLE_IPV6 = false;
const DEFAULT_ENABLE_IPV4 = true;
const DEFAULT_PREFER_IPV6 = false;
const DEFAULT_AVOID_LOCALHOST = false;
const DEFAULT_AUTORUN_HOSTNAME = 'auto';
const DEFAULT_AUTORUN_PORT = -1;
const cliArguments = yargs(hideBin(process.argv))
.usage('Usage: <your-start-script> <command> [options]')
.option('port', {
.option('enableIPv6', {
type: 'boolean',
default: null,
describe: `Enables IPv6.\n[config default: ${DEFAULT_ENABLE_IPV6}]`,
}).option('enableIPv4', {
type: 'boolean',
default: null,
describe: `Enables IPv4.\n[config default: ${DEFAULT_ENABLE_IPV4}]`,
}).option('port', {
type: 'number',
default: null,
describe: `Sets the port under which SillyTavern will run.\nIf not provided falls back to yaml config 'port'.\n[config default: ${DEFAULT_PORT}]`,
}).option('dnsPreferIPv6', {
type: 'boolean',
default: null,
describe: `Prefers IPv6 for dns\nyou should probably have the enabled if you're on an IPv6 only network\nIf not provided falls back to yaml config 'preferIPv6'.\n[config default: ${DEFAULT_PREFER_IPV6}]`,
}).option('autorun', {
type: 'boolean',
default: null,
describe: `Automatically launch SillyTavern in the browser.\nAutorun is automatically disabled if --ssl is set to true.\nIf not provided falls back to yaml config 'autorun'.\n[config default: ${DEFAULT_AUTORUN}]`,
}).option('autorunHostname', {
type: 'string',
default: null,
describe: 'the autorun hostname, probably best left on \'auto\'.\nuse values like \'localhost\', \'st.example.com\'',
}).option('autorunPortOverride', {
type: 'string',
default: null,
describe: 'Overrides the port for autorun with open your browser with this port and ignore what port the server is running on. -1 is use server port',
}).option('listen', {
type: 'boolean',
default: null,
@ -108,6 +137,10 @@ const cliArguments = yargs(hideBin(process.argv))
type: 'string',
default: null,
describe: 'Root directory for data storage',
}).option('avoidLocalhost', {
type: 'boolean',
default: null,
describe: 'Avoids using \'localhost\' for autorun in auto mode.\nuse if you don\'t have \'localhost\' in your hosts file',
}).option('basicAuthMode', {
type: 'boolean',
default: null,
@ -138,6 +171,31 @@ const enableAccounts = getConfigValue('enableUserAccounts', DEFAULT_ACCOUNTS);
const uploadsPath = path.join(dataRoot, require('./src/constants').UPLOADS_DIRECTORY);
const enableIPv6 = cliArguments.enableIPv6 ?? getConfigValue('protocol.ipv6', DEFAULT_ENABLE_IPV6);
const enableIPv4 = cliArguments.enableIPv4 ?? getConfigValue('protocol.ipv4', DEFAULT_ENABLE_IPV4);
const autorunHostname = cliArguments.autorunHostname ?? getConfigValue('autorunHostname', DEFAULT_AUTORUN_HOSTNAME);
const autorunPortOverride = cliArguments.autorunPortOverride ?? getConfigValue('autorunPortOverride', DEFAULT_AUTORUN_PORT);
const dnsPreferIPv6 = cliArguments.dnsPreferIPv6 ?? getConfigValue('dnsPreferIPv6', DEFAULT_PREFER_IPV6);
const avoidLocalhost = cliArguments.avoidLocalhost ?? getConfigValue('avoidLocalhost', DEFAULT_AVOID_LOCALHOST);
if (dnsPreferIPv6) {
// Set default DNS resolution order to IPv6 first
dns.setDefaultResultOrder('ipv6first');
console.log('Preferring IPv6 for DNS resolution');
} else {
// Set default DNS resolution order to IPv4 first
dns.setDefaultResultOrder('ipv4first');
console.log('Preferring IPv4 for DNS resolution');
}
if (!enableIPv6 && !enableIPv4) {
console.error('error: You can\'t disable all internet protocols: at least IPv6 or IPv4 must be enabled.');
process.exit(1);
}
// CORS Settings //
const CORS = cors({
origin: 'null',
@ -546,15 +604,15 @@ app.use('/api/speech', require('./src/endpoints/speech').router);
// Azure TTS
app.use('/api/azure', require('./src/endpoints/azure').router);
const tavernUrl = new URL(
const tavernUrlV6 = new URL(
(cliArguments.ssl ? 'https://' : 'http://') +
(listen ? '0.0.0.0' : '127.0.0.1') +
(listen ? '[::]' : '[::1]') +
(':' + server_port),
);
const autorunUrl = new URL(
const tavernUrl = new URL(
(cliArguments.ssl ? 'https://' : 'http://') +
('127.0.0.1') +
(listen ? '0.0.0.0' : '127.0.0.1') +
(':' + server_port),
);
@ -607,19 +665,67 @@ const preSetupTasks = async function () {
};
/**
* Tasks that need to be run after the server starts listening.
* Gets the hostname to use for autorun in the browser.
* @returns {string} The hostname to use for autorun
*/
const postSetupTasks = async function () {
function getAutorunHostname() {
if (autorunHostname === 'auto') {
if (enableIPv6 && enableIPv4) {
if (avoidLocalhost) return '[::1]';
return 'localhost';
}
if (enableIPv6) {
return '[::1]';
}
if (enableIPv4) {
return '127.0.0.1';
}
}
return autorunHostname;
}
/**
* Tasks that need to be run after the server starts listening.
* @param {boolean} v6Failed If the server failed to start on IPv6
* @param {boolean} v4Failed If the server failed to start on IPv4
*/
const postSetupTasks = async function (v6Failed, v4Failed) {
const autorunUrl = new URL(
(cliArguments.ssl ? 'https://' : 'http://') +
(getAutorunHostname()) +
(':') +
((autorunPortOverride >= 0) ? autorunPortOverride : server_port),
);
console.log('Launching...');
if (autorun) open(autorunUrl.toString());
setWindowTitle('SillyTavern WebServer');
console.log(color.green('SillyTavern is listening on: ' + tavernUrl));
let logListen = 'SillyTavern is listening on';
if (enableIPv6 && !v6Failed) {
logListen += color.green(' IPv6: ' + tavernUrlV6.host);
}
if (enableIPv4 && !v4Failed) {
logListen += color.green(' IPv4: ' + tavernUrl.host);
}
const goToLog = 'Go to: ' + color.blue(autorunUrl) + ' to open SillyTavern';
const plainGoToLog = removeColorFormatting(goToLog);
console.log(logListen);
console.log('\n' + getSeparator(plainGoToLog.length) + '\n');
console.log(goToLog);
console.log('\n' + getSeparator(plainGoToLog.length) + '\n');
if (listen) {
console.log('\n0.0.0.0 means SillyTavern is listening on all network interfaces (Wi-Fi, LAN, localhost). If you want to limit it only to internal localhost (127.0.0.1), change the setting in config.yaml to "listen: false". Check "access.log" file in the SillyTavern directory if you want to inspect incoming connections.\n');
console.log('[::] or 0.0.0.0 means SillyTavern is listening on all network interfaces (Wi-Fi, LAN, localhost). If you want to limit it only to internal localhost ([::1] or 127.0.0.1), change the setting in config.yaml to "listen: false". Check "access.log" file in the SillyTavern directory if you want to inspect incoming connections.\n');
}
if (basicAuthMode) {
@ -674,6 +780,100 @@ function logSecurityAlert(message) {
process.exit(1);
}
/**
* Handles the case where the server failed to start on one or both protocols.
* @param {boolean} v6Failed If the server failed to start on IPv6
* @param {boolean} v4Failed If the server failed to start on IPv4
*/
function handleServerListenFail(v6Failed, v4Failed) {
if (v6Failed && !enableIPv4) {
console.error(color.red('fatal error: Failed to start server on IPv6 and IPv4 disabled'));
process.exit(1);
}
if (v4Failed && !enableIPv6) {
console.error(color.red('fatal error: Failed to start server on IPv4 and IPv6 disabled'));
process.exit(1);
}
if (v6Failed && v4Failed) {
console.error(color.red('fatal error: Failed to start server on both IPv6 and IPv4'));
process.exit(1);
}
}
/**
* Creates an HTTPS server.
* @param {URL} url The URL to listen on
* @returns {Promise<void>} A promise that resolves when the server is listening
* @throws {Error} If the server fails to start
*/
function createHttpsServer(url) {
return new Promise((resolve, reject) => {
const server = https.createServer(
{
cert: fs.readFileSync(cliArguments.certPath),
key: fs.readFileSync(cliArguments.keyPath),
}, app);
server.on('error', reject);
server.on('listening', resolve);
server.listen(url.port || 443, url.hostname);
});
}
/**
* Creates an HTTP server.
* @param {URL} url The URL to listen on
* @returns {Promise<void>} A promise that resolves when the server is listening
* @throws {Error} If the server fails to start
*/
function createHttpServer(url) {
return new Promise((resolve, reject) => {
const server = http.createServer(app);
server.on('error', reject);
server.on('listening', resolve);
server.listen(url.port || 80, url.hostname);
});
}
async function startHTTPorHTTPS() {
let v6Failed = false;
let v4Failed = false;
const createFunc = cliArguments.ssl ? createHttpsServer : createHttpServer;
if (enableIPv6) {
try {
await createFunc(tavernUrlV6);
} catch (error) {
console.error('non-fatal error: failed to start server on IPv6');
console.error(error);
v6Failed = true;
}
}
if (enableIPv4) {
try {
await createFunc(tavernUrl);
} catch (error) {
console.error('non-fatal error: failed to start server on IPv4');
console.error(error);
v4Failed = true;
}
}
return [v6Failed, v4Failed];
}
async function startServer() {
const [v6Failed, v4Failed] = await startHTTPorHTTPS();
handleServerListenFail(v6Failed, v4Failed);
postSetupTasks(v6Failed, v4Failed);
}
async function verifySecuritySettings() {
// Skip all security checks as listen is set to false
if (!listen) {
@ -707,23 +907,4 @@ userModule.initUserStorage(dataRoot)
.then(userModule.migrateUserData)
.then(verifySecuritySettings)
.then(preSetupTasks)
.finally(() => {
if (cliArguments.ssl) {
https.createServer(
{
cert: fs.readFileSync(cliArguments.certPath),
key: fs.readFileSync(cliArguments.keyPath),
}, app)
.listen(
Number(tavernUrl.port) || 443,
tavernUrl.hostname,
postSetupTasks,
);
} else {
http.createServer(app).listen(
Number(tavernUrl.port) || 80,
tavernUrl.hostname,
postSetupTasks,
);
}
});
.finally(startServer);

View File

@ -195,6 +195,7 @@ const CHAT_COMPLETION_SOURCES = {
PERPLEXITY: 'perplexity',
GROQ: 'groq',
ZEROONEAI: '01ai',
BLOCKENTROPY: 'blockentropy',
};
/**

View File

@ -28,7 +28,7 @@ router.post('/caption-image', jsonParser, async (request, response) => {
],
},
],
max_tokens: 800,
max_tokens: 4096,
};
console.log('Multimodal captioning request', body);

View File

@ -18,6 +18,7 @@ const API_PERPLEXITY = 'https://api.perplexity.ai';
const API_GROQ = 'https://api.groq.com/openai/v1';
const API_MAKERSUITE = 'https://generativelanguage.googleapis.com';
const API_01AI = 'https://api.01.ai/v1';
const API_BLOCKENTROPY = 'https://api.blockentropy.ai/v1';
/**
* Applies a post-processing step to the generated messages.
@ -104,6 +105,7 @@ async function sendClaudeRequest(request, response) {
const apiUrl = new URL(request.body.reverse_proxy || API_CLAUDE).toString();
const apiKey = request.body.reverse_proxy ? request.body.proxy_password : readSecret(request.user.directories, SECRET_KEYS.CLAUDE);
const divider = '-'.repeat(process.stdout.columns);
const enableSystemPromptCache = getConfigValue('claude.enableSystemPromptCache', false);
if (!apiKey) {
console.log(color.red(`Claude API key is missing.\n${divider}`));
@ -117,8 +119,8 @@ async function sendClaudeRequest(request, response) {
controller.abort();
});
const additionalHeaders = {};
let use_system_prompt = (request.body.model.startsWith('claude-2') || request.body.model.startsWith('claude-3')) && request.body.claude_use_sysprompt;
let converted_prompt = convertClaudeMessages(request.body.messages, request.body.assistant_prefill, use_system_prompt, request.body.human_sysprompt_message, request.body.char_name, request.body.user_name);
const useSystemPrompt = (request.body.model.startsWith('claude-2') || request.body.model.startsWith('claude-3')) && request.body.claude_use_sysprompt;
const convertedPrompt = convertClaudeMessages(request.body.messages, request.body.assistant_prefill, useSystemPrompt, request.body.human_sysprompt_message, request.body.char_name, request.body.user_name);
// Add custom stop sequences
const stopSequences = [];
if (Array.isArray(request.body.stop)) {
@ -126,7 +128,7 @@ async function sendClaudeRequest(request, response) {
}
const requestBody = {
messages: converted_prompt.messages,
messages: convertedPrompt.messages,
model: request.body.model,
max_tokens: request.body.max_tokens,
stop_sequences: stopSequences,
@ -135,13 +137,15 @@ async function sendClaudeRequest(request, response) {
top_k: request.body.top_k,
stream: request.body.stream,
};
if (use_system_prompt) {
requestBody.system = converted_prompt.systemPrompt;
if (useSystemPrompt) {
requestBody.system = enableSystemPromptCache
? [{ type: 'text', text: convertedPrompt.systemPrompt, cache_control: { type: 'ephemeral' } }]
: convertedPrompt.systemPrompt;
}
if (Array.isArray(request.body.tools) && request.body.tools.length > 0) {
// Claude doesn't do prefills on function calls, and doesn't allow empty messages
if (converted_prompt.messages.length && converted_prompt.messages[converted_prompt.messages.length - 1].role === 'assistant') {
converted_prompt.messages.push({ role: 'user', content: '.' });
if (convertedPrompt.messages.length && convertedPrompt.messages[convertedPrompt.messages.length - 1].role === 'assistant') {
convertedPrompt.messages.push({ role: 'user', content: '.' });
}
additionalHeaders['anthropic-beta'] = 'tools-2024-05-16';
requestBody.tool_choice = { type: request.body.tool_choice === 'required' ? 'any' : 'auto' };
@ -150,6 +154,9 @@ async function sendClaudeRequest(request, response) {
.map(tool => tool.function)
.map(fn => ({ name: fn.name, description: fn.description, input_schema: fn.parameters }));
}
if (enableSystemPromptCache) {
additionalHeaders['anthropic-beta'] = 'prompt-caching-2024-07-31';
}
console.log('Claude request:', requestBody);
const generateResponse = await fetch(apiUrl + '/messages', {
@ -252,7 +259,7 @@ async function sendMakerSuiteRequest(request, response) {
const apiKey = request.body.reverse_proxy ? request.body.proxy_password : readSecret(request.user.directories, SECRET_KEYS.MAKERSUITE);
if (!request.body.reverse_proxy && !apiKey) {
console.log('MakerSuite API key is missing.');
console.log('Google AI Studio API key is missing.');
return response.status(400).send({ error: true });
}
@ -319,7 +326,7 @@ async function sendMakerSuiteRequest(request, response) {
}
const body = isGemini ? getGeminiBody() : getBisonBody();
console.log('MakerSuite request:', body);
console.log('Google AI Studio request:', body);
try {
const controller = new AbortController();
@ -355,7 +362,7 @@ async function sendMakerSuiteRequest(request, response) {
}
} else {
if (!generateResponse.ok) {
console.log(`MakerSuite API returned error: ${generateResponse.status} ${generateResponse.statusText} ${await generateResponse.text()}`);
console.log(`Google AI Studio API returned error: ${generateResponse.status} ${generateResponse.statusText} ${await generateResponse.text()}`);
return response.status(generateResponse.status).send({ error: true });
}
@ -363,7 +370,7 @@ async function sendMakerSuiteRequest(request, response) {
const candidates = generateResponseJson?.candidates;
if (!candidates || candidates.length === 0) {
let message = 'MakerSuite API returned no candidate';
let message = 'Google AI Studio API returned no candidate';
console.log(message, generateResponseJson);
if (generateResponseJson?.promptFeedback?.blockReason) {
message += `\nPrompt was blocked due to : ${generateResponseJson.promptFeedback.blockReason}`;
@ -374,19 +381,19 @@ async function sendMakerSuiteRequest(request, response) {
const responseContent = candidates[0].content ?? candidates[0].output;
const responseText = typeof responseContent === 'string' ? responseContent : responseContent?.parts?.[0]?.text;
if (!responseText) {
let message = 'MakerSuite Candidate text empty';
let message = 'Google AI Studio Candidate text empty';
console.log(message, generateResponseJson);
return response.send({ error: { message } });
}
console.log('MakerSuite response:', responseText);
console.log('Google AI Studio response:', responseText);
// Wrap it back to OAI format
const reply = { choices: [{ 'message': { 'content': responseText } }] };
return response.send(reply);
}
} catch (error) {
console.log('Error communicating with MakerSuite API: ', error);
console.log('Error communicating with Google AI Studio API: ', error);
if (!response.headersSent) {
return response.status(500).send({ error: true });
}
@ -675,6 +682,10 @@ router.post('/status', jsonParser, async function (request, response_getstatus_o
api_url = API_01AI;
api_key_openai = readSecret(request.user.directories, SECRET_KEYS.ZEROONEAI);
headers = {};
} else if (request.body.chat_completion_source === CHAT_COMPLETION_SOURCES.BLOCKENTROPY) {
api_url = API_BLOCKENTROPY;
api_key_openai = readSecret(request.user.directories, SECRET_KEYS.BLOCKENTROPY);
headers = {};
} else {
console.log('This chat completion source is not supported yet.');
return response_getstatus_openai.status(400).send({ error: true });
@ -941,6 +952,11 @@ router.post('/generate', jsonParser, function (request, response) {
apiKey = readSecret(request.user.directories, SECRET_KEYS.ZEROONEAI);
headers = {};
bodyParams = {};
} else if (request.body.chat_completion_source === CHAT_COMPLETION_SOURCES.BLOCKENTROPY) {
apiUrl = API_BLOCKENTROPY;
apiKey = readSecret(request.user.directories, SECRET_KEYS.BLOCKENTROPY);
headers = {};
bodyParams = {};
} else {
console.log('This chat completion source is not supported yet.');
return response.status(400).send({ error: true });

View File

@ -5,7 +5,7 @@ const Readable = require('stream').Readable;
const { jsonParser } = require('../../express-common');
const { TEXTGEN_TYPES, TOGETHERAI_KEYS, OLLAMA_KEYS, INFERMATICAI_KEYS, OPENROUTER_KEYS, VLLM_KEYS, DREAMGEN_KEYS, FEATHERLESS_KEYS } = require('../../constants');
const { forwardFetchResponse, trimV1 } = require('../../util');
const { forwardFetchResponse, trimV1, getConfigValue } = require('../../util');
const { setAdditionalHeaders } = require('../../additional-headers');
const router = express.Router();
@ -325,11 +325,12 @@ router.post('/generate', jsonParser, async function (request, response) {
}
if (request.body.api_type === TEXTGEN_TYPES.OLLAMA) {
const keepAlive = getConfigValue('ollama.keepAlive', -1);
args.body = JSON.stringify({
model: request.body.model,
prompt: request.body.prompt,
stream: request.body.stream ?? false,
keep_alive: -1,
keep_alive: keepAlive,
raw: true,
options: _.pickBy(request.body, (_, key) => OLLAMA_KEYS.includes(key)),
});

View File

@ -44,7 +44,7 @@ router.post('/caption-image', jsonParser, async (request, response) => {
if (!result.ok) {
const error = await result.json();
console.log(`MakerSuite API returned error: ${result.status} ${result.statusText}`, error);
console.log(`Google AI Studio API returned error: ${result.status} ${result.statusText}`, error);
return response.status(result.status).send({ error: true });
}

View File

@ -82,7 +82,7 @@ router.post('/list/:folder', (request, response) => {
}
try {
const images = getImages(directoryPath);
const images = getImages(directoryPath, 'date');
return response.send(images);
} catch (error) {
console.error(error);

View File

@ -67,7 +67,6 @@ router.post('/caption-image', jsonParser, async (request, response) => {
],
},
],
max_tokens: 500,
...bodyParams,
};
@ -283,4 +282,48 @@ router.post('/generate-image', jsonParser, async (request, response) => {
}
});
const custom = express.Router();
custom.post('/generate-voice', jsonParser, async (request, response) => {
try {
const key = readSecret(request.user.directories, SECRET_KEYS.CUSTOM_OPENAI_TTS);
const { input, provider_endpoint, response_format, voice, speed, model } = request.body;
if (!provider_endpoint) {
console.log('No OpenAI-compatible TTS provider endpoint provided');
return response.sendStatus(400);
}
const result = await fetch(provider_endpoint, {
method: 'POST',
headers: {
'Content-Type': 'application/json',
Authorization: `Bearer ${key ?? ''}`,
},
body: JSON.stringify({
input: input ?? '',
response_format: response_format ?? 'mp3',
voice: voice ?? 'alloy',
speed: speed ?? 1,
model: model ?? 'tts-1',
}),
});
if (!result.ok) {
const text = await result.text();
console.log('OpenAI request failed', result.statusText, text);
return response.status(500).send(text);
}
const buffer = await result.arrayBuffer();
response.setHeader('Content-Type', 'audio/mpeg');
return response.send(Buffer.from(buffer));
} catch (error) {
console.error('OpenAI TTS generation failed', error);
response.status(500).send('Internal server error');
}
});
router.use('/custom', custom);
module.exports = { router };

View File

@ -44,6 +44,8 @@ const SECRET_KEYS = {
ZEROONEAI: 'api_key_01ai',
HUGGINGFACE: 'api_key_huggingface',
STABILITY: 'api_key_stability',
BLOCKENTROPY: 'api_key_blockentropy',
CUSTOM_OPENAI_TTS: 'api_key_custom_openai_tts',
};
// These are the keys that are safe to expose, even if allowKeysExposure is false

View File

@ -9,6 +9,7 @@ const { jsonParser } = require('../express-common');
const { getAllUserHandles, getUserDirectories } = require('../users');
const ENABLE_EXTENSIONS = getConfigValue('enableExtensions', true);
const ENABLE_EXTENSIONS_AUTO_UPDATE = getConfigValue('enableExtensionsAutoUpdate', true);
const ENABLE_ACCOUNTS = getConfigValue('enableUserAccounts', false);
// 10 minutes
@ -268,6 +269,7 @@ router.post('/get', jsonParser, (request, response) => {
instruct,
context,
enable_extensions: ENABLE_EXTENSIONS,
enable_extensions_auto_update: ENABLE_EXTENSIONS_AUTO_UPDATE,
enable_accounts: ENABLE_ACCOUNTS,
});
});

View File

@ -908,10 +908,135 @@ stability.post('/generate', jsonParser, async (request, response) => {
}
});
const blockentropy = express.Router();
blockentropy.post('/models', jsonParser, async (request, response) => {
try {
const key = readSecret(request.user.directories, SECRET_KEYS.BLOCKENTROPY);
if (!key) {
console.log('Block Entropy key not found.');
return response.sendStatus(400);
}
const modelsResponse = await fetch('https://api.blockentropy.ai/sdapi/v1/sd-models', {
method: 'GET',
headers: {
'Authorization': `Bearer ${key}`,
},
});
if (!modelsResponse.ok) {
console.log('Block Entropy returned an error.');
return response.sendStatus(500);
}
const data = await modelsResponse.json();
if (!Array.isArray(data)) {
console.log('Block Entropy returned invalid data.');
return response.sendStatus(500);
}
const models = data.map(x => ({ value: x.name, text: x.name }));
return response.send(models);
} catch (error) {
console.log(error);
return response.sendStatus(500);
}
});
blockentropy.post('/generate', jsonParser, async (request, response) => {
try {
const key = readSecret(request.user.directories, SECRET_KEYS.BLOCKENTROPY);
if (!key) {
console.log('Block Entropy key not found.');
return response.sendStatus(400);
}
console.log('Block Entropy request:', request.body);
const result = await fetch('https://api.blockentropy.ai/sdapi/v1/txt2img', {
method: 'POST',
body: JSON.stringify({
prompt: request.body.prompt,
negative_prompt: request.body.negative_prompt,
model: request.body.model,
steps: request.body.steps,
width: request.body.width,
height: request.body.height,
// Random seed if negative.
seed: request.body.seed >= 0 ? request.body.seed : Math.floor(Math.random() * 10_000_000),
}),
headers: {
'Content-Type': 'application/json',
'Authorization': `Bearer ${key}`,
},
});
if (!result.ok) {
console.log('Block Entropy returned an error.');
return response.sendStatus(500);
}
const data = await result.json();
console.log('Block Entropy response:', data);
return response.send(data);
} catch (error) {
console.log(error);
return response.sendStatus(500);
}
});
const huggingface = express.Router();
huggingface.post('/generate', jsonParser, async (request, response) => {
try {
const key = readSecret(request.user.directories, SECRET_KEYS.HUGGINGFACE);
if (!key) {
console.log('Hugging Face key not found.');
return response.sendStatus(400);
}
console.log('Hugging Face request:', request.body);
const result = await fetch(`https://api-inference.huggingface.co/models/${request.body.model}`, {
method: 'POST',
body: JSON.stringify({
inputs: request.body.prompt,
}),
headers: {
'Content-Type': 'application/json',
'Authorization': `Bearer ${key}`,
},
});
if (!result.ok) {
console.log('Hugging Face returned an error.');
return response.sendStatus(500);
}
const buffer = await result.buffer();
return response.send({
image: buffer.toString('base64'),
});
} catch (error) {
console.log(error);
return response.sendStatus(500);
}
});
router.use('/comfy', comfy);
router.use('/together', together);
router.use('/drawthings', drawthings);
router.use('/pollinations', pollinations);
router.use('/stability', stability);
router.use('/blockentropy', blockentropy);
router.use('/huggingface', huggingface);
module.exports = { router };

View File

@ -143,6 +143,7 @@ const spp_nerd = new SentencePieceTokenizer('src/tokenizers/nerdstash.model');
const spp_nerd_v2 = new SentencePieceTokenizer('src/tokenizers/nerdstash_v2.model');
const spp_mistral = new SentencePieceTokenizer('src/tokenizers/mistral.model');
const spp_yi = new SentencePieceTokenizer('src/tokenizers/yi.model');
const spp_gemma = new SentencePieceTokenizer('src/tokenizers/gemma.model');
const claude_tokenizer = new WebTokenizer('src/tokenizers/claude.json');
const llama3_tokenizer = new WebTokenizer('src/tokenizers/llama3.json');
@ -152,6 +153,7 @@ const sentencepieceTokenizers = [
'nerdstash_v2',
'mistral',
'yi',
'gemma',
];
/**
@ -180,6 +182,10 @@ function getSentencepiceTokenizer(model) {
return spp_yi;
}
if (model.includes('gemma')) {
return spp_gemma;
}
return null;
}
@ -268,6 +274,10 @@ function getTokenizerModel(requestModel) {
return 'gpt-4o';
}
if (requestModel.includes('chatgpt-4o-latest')) {
return 'gpt-4o';
}
if (requestModel.includes('gpt-4-32k')) {
return 'gpt-4-32k';
}
@ -308,8 +318,8 @@ function getTokenizerModel(requestModel) {
return 'yi';
}
if (requestModel.includes('gemini')) {
return 'gpt-4o';
if (requestModel.includes('gemma') || requestModel.includes('gemini')) {
return 'gemma';
}
// default
@ -579,6 +589,7 @@ router.post('/nerdstash/encode', jsonParser, createSentencepieceEncodingHandler(
router.post('/nerdstash_v2/encode', jsonParser, createSentencepieceEncodingHandler(spp_nerd_v2));
router.post('/mistral/encode', jsonParser, createSentencepieceEncodingHandler(spp_mistral));
router.post('/yi/encode', jsonParser, createSentencepieceEncodingHandler(spp_yi));
router.post('/gemma/encode', jsonParser, createSentencepieceEncodingHandler(spp_gemma));
router.post('/gpt2/encode', jsonParser, createTiktokenEncodingHandler('gpt2'));
router.post('/claude/encode', jsonParser, createWebTokenizerEncodingHandler(claude_tokenizer));
router.post('/llama3/encode', jsonParser, createWebTokenizerEncodingHandler(llama3_tokenizer));
@ -587,6 +598,7 @@ router.post('/nerdstash/decode', jsonParser, createSentencepieceDecodingHandler(
router.post('/nerdstash_v2/decode', jsonParser, createSentencepieceDecodingHandler(spp_nerd_v2));
router.post('/mistral/decode', jsonParser, createSentencepieceDecodingHandler(spp_mistral));
router.post('/yi/decode', jsonParser, createSentencepieceDecodingHandler(spp_yi));
router.post('/gemma/decode', jsonParser, createSentencepieceDecodingHandler(spp_gemma));
router.post('/gpt2/decode', jsonParser, createTiktokenDecodingHandler('gpt2'));
router.post('/claude/decode', jsonParser, createWebTokenizerDecodingHandler(claude_tokenizer));
router.post('/llama3/decode', jsonParser, createWebTokenizerDecodingHandler(llama3_tokenizer));
@ -620,6 +632,11 @@ router.post('/openai/encode', jsonParser, async function (req, res) {
return handler(req, res);
}
if (queryModel.includes('gemma') || queryModel.includes('gemini')) {
const handler = createSentencepieceEncodingHandler(spp_gemma);
return handler(req, res);
}
const model = getTokenizerModel(queryModel);
const handler = createTiktokenEncodingHandler(model);
return handler(req, res);
@ -658,6 +675,11 @@ router.post('/openai/decode', jsonParser, async function (req, res) {
return handler(req, res);
}
if (queryModel.includes('gemma') || queryModel.includes('gemini')) {
const handler = createSentencepieceDecodingHandler(spp_gemma);
return handler(req, res);
}
const model = getTokenizerModel(queryModel);
const handler = createTiktokenDecodingHandler(model);
return handler(req, res);
@ -704,6 +726,11 @@ router.post('/openai/count', jsonParser, async function (req, res) {
return res.send({ 'token_count': num_tokens });
}
if (model === 'gemma' || model === 'gemini') {
num_tokens = await countSentencepieceArrayTokens(spp_gemma, req.body);
return res.send({ 'token_count': num_tokens });
}
const tokensPerName = queryModel.includes('gpt-3.5-turbo-0301') ? -1 : 1;
const tokensPerMessage = queryModel.includes('gpt-3.5-turbo-0301') ? 4 : 3;
const tokensPadding = 3;
@ -785,6 +812,7 @@ router.post('/remote/textgenerationwebui/encode', jsonParser, async function (re
const baseUrl = String(request.body.url);
const legacyApi = Boolean(request.body.legacy_api);
const vllmModel = String(request.body.vllm_model) || '';
const aphroditeModel = String(request.body.aphrodite_model) || '';
try {
const args = {
@ -820,7 +848,7 @@ router.post('/remote/textgenerationwebui/encode', jsonParser, async function (re
break;
case TEXTGEN_TYPES.APHRODITE:
url += '/v1/tokenize';
args.body = JSON.stringify({ 'prompt': text });
args.body = JSON.stringify({ 'model': aphroditeModel, 'prompt': text });
break;
default:
url += '/v1/internal/encode';

View File

@ -1,6 +1,7 @@
const fetch = require('node-fetch').default;
const https = require('https');
const express = require('express');
const iconv = require('iconv-lite');
const { readSecret, SECRET_KEYS } = require('./secrets');
const { getConfigValue, uuidv4 } = require('../util');
const { jsonParser } = require('../express-common');
@ -80,16 +81,18 @@ router.post('/google', jsonParser, async (request, response) => {
const url = generateRequestUrl(text, { to: lang });
https.get(url, (resp) => {
let data = '';
const data = [];
resp.on('data', (chunk) => {
data += chunk;
data.push(chunk);
});
resp.on('end', () => {
try {
const result = normaliseResponse(JSON.parse(data));
const decodedData = iconv.decode(Buffer.concat(data), 'utf-8');
const result = normaliseResponse(JSON.parse(decodedData));
console.log('Translated text: ' + result.text);
response.setHeader('Content-Type', 'text/plain; charset=utf-8');
return response.send(result.text);
} catch (error) {
console.log('Translation error', error);

BIN
src/tokenizers/gemma.model Normal file

Binary file not shown.

View File

@ -1,6 +1,7 @@
import { pipeline, env, RawImage, Pipeline } from 'sillytavern-transformers';
import { getConfigValue } from './util.js';
import path from 'path';
import fs from 'fs';
configureTransformers();
@ -34,7 +35,7 @@ const tasks = {
defaultModel: 'Cohee/fooocus_expansion-onnx',
pipeline: null,
configField: 'extras.promptExpansionModel',
quantized: true,
quantized: false,
},
'automatic-speech-recognition': {
defaultModel: 'Xenova/whisper-small',
@ -48,7 +49,7 @@ const tasks = {
configField: 'extras.textToSpeechModel',
quantized: false,
},
}
};
/**
* Gets a RawImage object from a base64-encoded image.
@ -85,6 +86,36 @@ function getModelForTask(task) {
}
}
async function migrateCacheToDataDir() {
const oldCacheDir = path.join(process.cwd(), 'cache');
const newCacheDir = path.join(global.DATA_ROOT, '_cache');
if (!fs.existsSync(newCacheDir)) {
fs.mkdirSync(newCacheDir, { recursive: true });
}
if (fs.existsSync(oldCacheDir) && fs.statSync(oldCacheDir).isDirectory()) {
const files = fs.readdirSync(oldCacheDir);
if (files.length === 0) {
return;
}
console.log('Migrating model cache files to data directory. Please wait...');
for (const file of files) {
try {
const oldPath = path.join(oldCacheDir, file);
const newPath = path.join(newCacheDir, file);
fs.cpSync(oldPath, newPath, { recursive: true, force: true });
fs.rmSync(oldPath, { recursive: true, force: true });
} catch (error) {
console.warn('Failed to migrate cache file. The model will be re-downloaded.', error);
}
}
}
}
/**
* Gets the transformers.js pipeline for a given task.
* @param {import('sillytavern-transformers').PipelineType} task The task to get the pipeline for
@ -92,6 +123,8 @@ function getModelForTask(task) {
* @returns {Promise<Pipeline>} Pipeline for the task
*/
async function getPipeline(task, forceModel = '') {
await migrateCacheToDataDir();
if (tasks[task].pipeline) {
if (forceModel === '' || tasks[task].currentModel === forceModel) {
return tasks[task].pipeline;
@ -100,11 +133,11 @@ async function getPipeline(task, forceModel = '') {
await tasks[task].pipeline.dispose();
}
const cache_dir = path.join(process.cwd(), 'cache');
const cacheDir = path.join(global.DATA_ROOT, '_cache');
const model = forceModel || getModelForTask(task);
const localOnly = getConfigValue('extras.disableAutoDownload', false);
console.log('Initializing transformers.js pipeline for task', task, 'with model', model);
const instance = await pipeline(task, model, { cache_dir, quantized: tasks[task].quantized ?? true, local_files_only: localOnly });
const instance = await pipeline(task, model, { cache_dir: cacheDir, quantized: tasks[task].quantized ?? true, local_files_only: localOnly });
tasks[task].pipeline = instance;
tasks[task].currentModel = model;
return instance;

View File

@ -19,12 +19,6 @@ const AVATAR_PREFIX = 'avatar:';
const ENABLE_ACCOUNTS = getConfigValue('enableUserAccounts', false);
const ANON_CSRF_SECRET = crypto.randomBytes(64).toString('base64');
/**
* The root directory for user data.
* @type {string}
*/
let DATA_ROOT = './data';
/**
* Cache for user directories.
* @type {Map<string, UserDirectoryList>}
@ -138,7 +132,7 @@ async function migrateUserData() {
console.log();
console.log(color.magenta('Preparing to migrate user data...'));
console.log(`All public data will be moved to the ${DATA_ROOT} directory.`);
console.log(`All public data will be moved to the ${global.DATA_ROOT} directory.`);
console.log('This process may take a while depending on the amount of data to move.');
console.log(`Backups will be placed in the ${PUBLIC_DIRECTORIES.backups} directory.`);
console.log(`The process will start in ${TIMEOUT} seconds. Press Ctrl+C to cancel.`);
@ -352,11 +346,11 @@ function toAvatarKey(handle) {
* @returns {Promise<void>}
*/
async function initUserStorage(dataRoot) {
DATA_ROOT = dataRoot;
console.log('Using data root:', color.green(DATA_ROOT));
global.DATA_ROOT = dataRoot;
console.log('Using data root:', color.green(global.DATA_ROOT));
console.log();
await storage.init({
dir: path.join(DATA_ROOT, '_storage'),
dir: path.join(global.DATA_ROOT, '_storage'),
ttl: false, // Never expire
});
@ -457,7 +451,7 @@ function getUserDirectories(handle) {
const directories = structuredClone(USER_DIRECTORY_TEMPLATE);
for (const key in directories) {
directories[key] = path.join(DATA_ROOT, handle, USER_DIRECTORY_TEMPLATE[key]);
directories[key] = path.join(global.DATA_ROOT, handle, USER_DIRECTORY_TEMPLATE[key]);
}
DIRECTORIES_CACHE.set(handle, directories);
return directories;

View File

@ -382,14 +382,31 @@ function removeOldBackups(directory, prefix) {
}
}
function getImages(path) {
/**
* Get a list of images in a directory.
* @param {string} directoryPath Path to the directory containing the images
* @param {'name' | 'date'} sortBy Sort images by name or date
* @returns {string[]} List of image file names
*/
function getImages(directoryPath, sortBy = 'name') {
function getSortFunction() {
switch (sortBy) {
case 'name':
return Intl.Collator().compare;
case 'date':
return (a, b) => fs.statSync(path.join(directoryPath, a)).mtimeMs - fs.statSync(path.join(directoryPath, b)).mtimeMs;
default:
return (_a, _b) => 0;
}
}
return fs
.readdirSync(path)
.readdirSync(directoryPath)
.filter(file => {
const type = mime.lookup(file);
return type && type.startsWith('image/');
})
.sort(Intl.Collator().compare);
.sort(getSortFunction());
}
/**
@ -610,6 +627,25 @@ class Cache {
}
}
/**
* Removes color formatting from a text string.
* @param {string} text Text with color formatting
* @returns {string} Text without color formatting
*/
function removeColorFormatting(text) {
// ANSI escape codes for colors are usually in the format \x1b[<codes>m
return text.replace(/\x1b\[\d{1,2}(;\d{1,2})*m/g, '');
}
/**
* Gets a separator string repeated n times.
* @param {number} n Number of times to repeat the separator
* @returns {string} Separator string
*/
function getSeparator(n) {
return '='.repeat(n);
}
module.exports = {
getConfig,
getConfigValue,
@ -637,4 +673,6 @@ module.exports = {
trimV1,
Cache,
makeHttp2Request,
removeColorFormatting,
getSeparator,
};

View File

@ -23,8 +23,8 @@ async function getMakerSuiteVector(text, directories) {
const key = readSecret(directories, SECRET_KEYS.MAKERSUITE);
if (!key) {
console.log('No MakerSuite key found');
throw new Error('No MakerSuite key found');
console.log('No Google AI Studio key found');
throw new Error('No Google AI Studio key found');
}
const response = await fetch(`https://generativelanguage.googleapis.com/v1beta/models/embedding-gecko-001:embedText?key=${key}`, {
@ -39,8 +39,8 @@ async function getMakerSuiteVector(text, directories) {
if (!response.ok) {
const text = await response.text();
console.log('MakerSuite request failed', response.statusText, text);
throw new Error('MakerSuite request failed');
console.log('Google AI Studio request failed', response.statusText, text);
throw new Error('Google AI Studio request failed');
}
const data = await response.json();

View File

@ -1653,10 +1653,9 @@
"license": "MIT"
},
"node_modules/axios": {
"version": "1.7.2",
"resolved": "https://registry.npmjs.org/axios/-/axios-1.7.2.tgz",
"integrity": "sha512-2A8QhOMrbomlDuiLeK9XibIBzuHeRcqqNOHp0Cyp5EoJ1IFDh+XZH3A6BkXtv0K4gFGCI0Y4BM7B1wOEi0Rmgw==",
"license": "MIT",
"version": "1.7.4",
"resolved": "https://registry.npmjs.org/axios/-/axios-1.7.4.tgz",
"integrity": "sha512-DukmaFRnY6AzAALSH4J2M3k6PkaC+MfaAGdEERRWcC9q3/TWQwLpHR8ZRLKTdQ3aBDL64EdluRDjJqKw+BPZEw==",
"dependencies": {
"follow-redirects": "^1.15.6",
"form-data": "^4.0.0",
@ -4514,10 +4513,9 @@
}
},
"node_modules/micromatch": {
"version": "4.0.7",
"resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.7.tgz",
"integrity": "sha512-LPP/3KorzCwBxfeUuZmaR6bG2kdeHSbe0P2tY3FLRU4vYrjYz5hI4QZwV0njUx3jeuKe67YukQ1LSPZBKDqO/Q==",
"license": "MIT",
"version": "4.0.8",
"resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.8.tgz",
"integrity": "sha512-PXwfBhYu0hBCPw8Dn0E+WDYb7af3dSLVWKi3HGv84IdF4TyFoC0ysxFd0Goxw7nSv4T/PzEJQxsYsEiFCKo2BA==",
"dependencies": {
"braces": "^3.0.3",
"picomatch": "^2.3.1"