Compare commits

..

286 Commits

Author SHA1 Message Date
Cohee
74e5e0e4c0 Merge pull request #1404 from SillyTavern/staging
Staging
2023-11-26 20:38:50 +02:00
Cohee
3eeb137416 Fix persona switch input trigger 2023-11-26 19:56:19 +02:00
Cohee
f04c277f03 Add optional {{mesExamples}} to story string 2023-11-26 19:47:23 +02:00
Cohee
e587f208be Add resolution match on load 2023-11-26 18:19:37 +02:00
deffcolony
aaeaa643e3 resolution presets for image generation extension (#1394)
+ New drawer with resolution presets at image generation extension

---------

Co-authored-by: Cohee <18619528+Cohee1207@users.noreply.github.com>
2023-11-26 17:55:49 +02:00
Cohee
9c01a849cb Add buttons command 2023-11-26 17:05:55 +02:00
Cohee
fb08552d46 Add instruct mode sequence macros 2023-11-26 16:37:05 +02:00
Cohee
3bc91f10ec Fix command aliases 2023-11-26 15:47:11 +02:00
Cohee
19f758a0fb #670 Add square avatars 2023-11-26 15:27:54 +02:00
Cohee
e6c96553d0 Add text trimming commands 2023-11-26 13:55:22 +02:00
Cohee
7b3f2a8986 Fix readme config reference 2023-11-26 13:19:40 +02:00
Cohee
87108421b3 Add default config values 2023-11-26 13:13:44 +02:00
Cohee
809a55b2fd Merge branch 'config-yaml' into staging 2023-11-26 13:08:06 +02:00
Cohee
c328d6f04a Add QR auto-exec on app startup 2023-11-26 02:12:31 +02:00
Cohee
9587a704c5 Fix docstrings 2023-11-26 01:52:41 +02:00
Cohee
283d49a6ee Add empty return value to /while 2023-11-26 01:49:37 +02:00
Cohee
c259c0a72a Skip hidden messages from /message command 2023-11-26 01:15:19 +02:00
Cohee
c6aea00e27 Resolve ephemeral stop strings variables 2023-11-26 00:56:55 +02:00
Cohee
50322ed8b0 Don't show auto-update toast if no extensions installed 2023-11-26 00:52:00 +02:00
Cohee
0648da8d05 Docker fix 2023-11-26 00:41:28 +02:00
Cohee
a7024a1d34 Migrate to config.yaml 2023-11-25 23:45:33 +02:00
RossAscends
df15a00430 resolve roll&random before parsing macro var commands 2023-11-26 06:27:13 +09:00
RossAscends
3ec692e76c fix /world unsetting function 2023-11-26 05:26:41 +09:00
Cohee
0bbaeeaedd Revert to get/set/add order for variable macro 2023-11-25 22:02:40 +02:00
Cohee
b24d4f2340 Add opt-in CORS bypass endpoint 2023-11-25 21:56:57 +02:00
Cohee
0410540066 Process variable macros line by line 2023-11-25 20:52:17 +02:00
Cohee
a5c3e22833 #1055 Add new random syntax to docs 2023-11-25 20:41:46 +02:00
Cohee
0d9068f11e Don't replace {{addvar}} macros with the execution result 2023-11-25 20:33:07 +02:00
Cohee
67fa9c9595 Allow dice rolls in {{random}} 2023-11-25 20:11:47 +02:00
RossAscends
a8dc4dc810 Merge branch 'staging' of https://github.com/Cohee1207/SillyTavern into staging 2023-11-26 03:08:05 +09:00
RossAscends
9645034b09 reverse compatibility for old random method 2023-11-26 03:08:03 +09:00
Cohee
67174c8cf8 Add functions to delete local and global variables 2023-11-25 19:53:00 +02:00
RossAscends
7264e3fe83 Merge branch 'staging' of https://github.com/Cohee1207/SillyTavern into staging 2023-11-26 01:28:35 +09:00
RossAscends
2bed9fde70 {{random}} split on :: to allow empty items & commas in items 2023-11-26 01:28:33 +09:00
Cohee
b8d7b0922d Fix evaluation order of named args 2023-11-25 18:18:57 +02:00
Cohee
d862005c1c Revert "Replace macros in named args of boolean evaluation"
This reverts commit 6894b7ef72.
2023-11-25 18:16:53 +02:00
Cohee
6894b7ef72 Replace macros in named args of boolean evaluation 2023-11-25 18:12:28 +02:00
Cohee
06ade803fa Concatenate strings in /addvar 2023-11-25 17:45:40 +02:00
RossAscends
389c2b5435 force firstMes {{user}} update on persona switch 2023-11-25 23:40:27 +09:00
Cohee
2c822f79f7 Merge pull request #1397 from aisu-wata0/claude_system_order_fix 2023-11-24 23:38:02 +02:00
Aisu Wata
a6898365d1 Claude system message order fix 2023-11-24 17:58:20 -03:00
Cohee
8e49ecce49 Merge pull request #1395 from daswer123/staging 2023-11-24 20:07:51 +02:00
Cohee
851a00630a Add /popup command 2023-11-24 19:50:49 +02:00
Danil Boldyrev
fbc72085f8 Hindi Support for XTTS 2023-11-24 20:16:49 +03:00
Cohee
4fd68e5be7 Skill issue 2023-11-24 19:10:09 +02:00
Cohee
a178bdc3b0 Fix ephemeral stopping strings flush 2023-11-24 19:06:31 +02:00
Cohee
2c8e855385 Resolve variables in /fuzzy 2023-11-24 17:41:49 +02:00
Cohee
adb3badcc1 Add /messages and /setinput commands 2023-11-24 17:12:59 +02:00
Cohee
c9b3ccc585 Add STscript procedure calls with /run command 2023-11-24 15:58:00 +02:00
Cohee
dd17c2483f Add lock=on/off to /gen and /genraw commands 2023-11-24 15:18:49 +02:00
Cohee
8e16f28827 Fix variable cast 2023-11-24 14:53:12 +02:00
Cohee
d81371c2b7 Fix variables casting 2023-11-24 14:44:11 +02:00
Cohee
bcf73e0e55 Add auto-execute on opening chat option to quick
reply settings
2023-11-24 14:02:04 +02:00
Cohee
f1d375c2ba Add hidden/invisible/auto-exec only QR buttons 2023-11-24 13:50:42 +02:00
Cohee
720da5649b Don't auto-exec on disable QR 2023-11-24 13:32:27 +02:00
Cohee
ad8709842b STscript improvements (see below)
Add /while loop
Add escaping of macros in sub-commands
Add /input prompt
2023-11-24 12:49:14 +02:00
RossAscends
55607ee847 remove HumiBlur from QR chain menus 2023-11-24 17:45:13 +09:00
RossAscends
0b9555234e sort ChatCompletion presets by name 2023-11-24 17:15:39 +09:00
Cohee
d3ea5c081d Add /pass and /fuzzy commands 2023-11-24 02:21:50 +02:00
Cohee
74fbc88d7d Move macros replacement to script execution time 2023-11-24 01:56:43 +02:00
Cohee
461e8d7929 Update the tooltip on the vertical ellipsis button 2023-11-24 01:39:39 +02:00
Cohee
e593dd4dbd Auto-executable QR 2023-11-24 01:32:02 +02:00
Cohee
7841f3d91f Merge pull request #1392 from LenAnderson/qr-context
Add context menus for Quick Replies
2023-11-24 01:04:24 +02:00
Cohee
c4e1fff1bc Respect # limit for ephemeral stop strings 2023-11-24 00:54:23 +02:00
Cohee
863554fea6 Add ephemeral stop strings to /genraw 2023-11-24 00:51:27 +02:00
Cohee
c2e3bfa06d /genraw instruct=off 2023-11-24 00:36:35 +02:00
Cohee
c50ed4bf6a STscript improvements (see below)
/abort command, {{pipe}} macro in named args, subcommand batch escaping with backslash, string literals and rules for /if, else clause for /if
2023-11-24 00:18:07 +02:00
Cohee
3594c4aac7 Add {{newline}} and {{pipe}} macros 2023-11-23 22:56:52 +02:00
Cohee
86819b6f4f Add /genraw command 2023-11-23 22:50:13 +02:00
Cohee
6d0982e823 Unleash the variables / STscript alpha 2023-11-23 22:36:48 +02:00
LenAnderson
ab9c654708 Merge branch 'staging' into qr-context 2023-11-23 20:35:21 +00:00
LenAnderson
cdbcd6cfb2 add a healthy dose of mobile copium 2023-11-23 20:34:20 +00:00
Cohee
3328df6076 Update readme.md 2023-11-23 21:15:22 +02:00
Cohee
6d4484c4d0 Merge pull request #1390 from Huge/patch-1
Update readme.md with node version check
2023-11-23 21:13:45 +02:00
Cohee
e48cd0a49d Fix version number + provide LTS guidance 2023-11-23 21:13:20 +02:00
Cohee
044cceba4d Merge pull request #1387 from aikitoria/add-manifest
Add manifest.json for Chrome Android / Add to Home Screen flow
2023-11-23 20:56:43 +02:00
Cohee
58eae43cb0 Merge branch 'release' into staging 2023-11-23 20:55:13 +02:00
Cohee
22e17cd681 Pass image type to thumbnail creator 2023-11-23 20:50:08 +02:00
LenAnderson
81f135fa7c use client coords not screen coords 2023-11-23 17:42:31 +00:00
LenAnderson
af2b108730 add context menu editor 2023-11-23 17:42:19 +00:00
LenAnderson
5e4dc388eb Merge branch 'staging' into qr-context 2023-11-23 15:57:35 +00:00
LenAnderson
d32224041a add parent placeholders 2023-11-23 15:57:14 +00:00
Huge
68370dbe30 Update readme.md with node version check
otherwise illegible error occurs
2023-11-23 14:55:32 +01:00
RossAscends
73d6801406 slashcommand /movingui to set a MUI preset 2023-11-23 22:32:47 +09:00
RossAscends
4f7c925dc6 properly round left menu borders 2023-11-23 22:10:23 +09:00
LenAnderson
e2a1892e6b Merge branch 'staging' into qr-context 2023-11-23 12:21:46 +00:00
LenAnderson
cc426e9897 add qr context menus 2023-11-23 12:21:25 +00:00
RossAscends
e0e303b339 Sortable QuickReplies 2023-11-23 19:49:15 +09:00
RossAscends
a88e2f93af Merge branch 'staging' of https://github.com/Cohee1207/SillyTavern into staging 2023-11-23 17:20:35 +09:00
RossAscends
e672a7fe99 'New Chat' in Manage Chats & showLoader when delChat-ing 2023-11-23 17:20:31 +09:00
Cohee
986ae263d3 Merge pull request #1389 from bdashore3/staging 2023-11-23 07:59:36 +02:00
kingbri
e445aeec14 Tabby: Fix model name return on error
Tabby's model API is always /v1/model/list, so return "None" if the
request fails since that means a model is most likely not loaded.

Signed-off-by: kingbri <bdashore3@proton.me>
2023-11-23 00:13:28 -05:00
Cohee
b4afb10fab Change # of beams min value 2023-11-23 02:03:41 +02:00
Cohee
45b714fb9e Don't crash server if google translate fails 2023-11-23 00:59:34 +02:00
aikitoria
10716d1101 Add manifest.json for Chrome Android 2023-11-22 19:18:00 +01:00
Cohee
35349dd8d7 Hide page overflow 2023-11-22 17:59:46 +02:00
Cohee
f802fe1797 Fix xtts separator 2023-11-22 17:47:58 +02:00
Cohee
62d57e0a1a #1386 Fix PaLM API 2023-11-22 17:37:01 +02:00
Cohee
56b63c0e02 #1386 Fix PaLM API 2023-11-22 17:36:34 +02:00
Cohee
2b54bfd1d5 Merge pull request #1380 from LenAnderson/checkbox-contrast
ensure checkboxes have contrast
2023-11-22 16:52:48 +02:00
Cohee
6ea7987a44 Merge pull request #1385 from XXpE3/staging
Optimized the Chinese translation in i18n.
2023-11-22 16:47:51 +02:00
Cohee
57f303223b Don't add extra space on non-instruct continue 2023-11-22 16:34:25 +02:00
Cohee
73eeab9ace Don't display incomplete sentences in quiet-to-loud prompts if trim is enabled 2023-11-22 16:21:43 +02:00
Cohee
61908935f5 Stop string for user-continue. Trim spaces after name2 2023-11-22 16:16:48 +02:00
XXpE3
668a149898 Optimized the Chinese translation in i18n. 2023-11-22 13:41:46 +08:00
Cohee
55af72cb17 /addswipe command 2023-11-22 01:26:17 +02:00
Cohee
63e5bc9341 Merge branch 'staging' of https://github.com/SillyTavern/SillyTavern into staging 2023-11-22 00:58:08 +02:00
Cohee
4f7523b896 Parallelize extensions auto-update + add a toast 2023-11-22 00:58:06 +02:00
Cohee
59e558fba5 Don't execute commands on dry runs 2023-11-22 00:50:41 +02:00
RossAscends
0d84aed89c Merge branch 'staging' of https://github.com/Cohee1207/SillyTavern into staging 2023-11-22 07:48:40 +09:00
RossAscends
fe21a7c25b Add toggle for hiding muted group member sprites 2023-11-22 07:48:35 +09:00
Cohee
e5f7b0b5c7 Use explicit unnamed argument first if exists 2023-11-22 00:43:33 +02:00
Cohee
4b78ddbc8a First steps in slash command piping 2023-11-22 00:39:17 +02:00
Cohee
1b4d955aec Add swipe id validation for /delswipe 2023-11-21 23:33:20 +02:00
Cohee
284bd76589 Add /delswipe command 2023-11-21 23:28:11 +02:00
Cohee
2dc8f8f2f7 Add 5 group control commands 2023-11-21 22:35:59 +02:00
Cohee
df4ed389bf System prompt for Claude 2 2023-11-21 22:11:26 +02:00
Cohee
5f77b2f816 Add Claude 2.1 2023-11-21 20:07:37 +02:00
Cohee
1891a03b11 Merge pull request #1383 from daswer123/staging
Add support for the new TTS - XTTSv2
2023-11-21 16:26:45 +02:00
Cohee
73e081dd99 Don't use global state to build Chat Completion prompts 2023-11-21 14:38:15 +02:00
Danil Boldyrev
bcad0d4e51 add XTTS 2023-11-21 13:16:56 +03:00
LenAnderson
9f16b329c5 ensure checkboxes have contrast 2023-11-21 01:26:43 +00:00
Cohee
01b629bd49 New syntax for sendas command 2023-11-21 02:54:04 +02:00
Cohee
52d9855916 Code lint 2023-11-21 02:00:50 +02:00
Cohee
91429ce516 Merge pull request #1378 from LenAnderson/more-comfy-merge
Add more ComfyUI options
2023-11-21 01:55:00 +02:00
Cohee
ddbdceba64 Add sampler order for koboldcpp under text completions 2023-11-21 01:47:57 +02:00
Cohee
1ebfddf07e Use mistral and yi tokenizers for custom token bans 2023-11-21 01:04:27 +02:00
Cohee
9b75e49b54 Add support for Yi tokenizer 2023-11-21 00:21:58 +02:00
LenAnderson
1c725879d8 add func for loading dropdown opts for settings 2023-11-20 22:00:40 +00:00
Cohee
4222b2aa21 Add enable/disable group member commands 2023-11-20 23:49:04 +02:00
Cohee
f60e74fbd9 Fix message timer showing NaN in some cases 2023-11-20 22:53:51 +02:00
Cohee
ac4b673c5a Fix character name appending on user continue 2023-11-20 22:48:43 +02:00
LenAnderson
3d4442ab25 make api routes kebab 2023-11-20 18:27:50 +00:00
LenAnderson
8b5a56a99c use DIRECTORIES const for comfy workflow path 2023-11-20 18:26:13 +00:00
Cohee
cf853a21ad Merge pull request #1376 from bdashore3/staging
Secrets: Add find endpoint to retrieve a secret value (with conditions)
2023-11-20 19:12:24 +02:00
kingbri
6a511fdfcf Secrets: Add find endpoint
Requires the user to set allowKeysExposure to true before any calls
can work.

Signed-off-by: kingbri <bdashore3@proton.me>
2023-11-20 12:05:54 -05:00
Cohee
e81c100e13 Merge pull request #1377 from LenAnderson/fix-popups
fix all popups being large/wide after first one
2023-11-20 18:25:01 +02:00
LenAnderson
46cc04c798 add default comfy workflow 2023-11-20 15:59:38 +00:00
LenAnderson
9c41a9d2ac fix merge duplicates 2023-11-20 15:58:48 +00:00
LenAnderson
18e6b8cd7c onelinefy 2023-11-20 12:56:52 +00:00
LenAnderson
d24c74e34a update comfy generate to use saved workflows 2023-11-20 12:51:36 +00:00
LenAnderson
777d105602 fix formatting 2023-11-20 12:40:27 +00:00
LenAnderson
06ececc1a5 add managing multiple comfy workflows 2023-11-20 12:39:06 +00:00
LenAnderson
5e5c111d25 add VAE selection 2023-11-20 12:34:37 +00:00
LenAnderson
d5b9dd34b7 remove unused comfy method 2023-11-20 12:29:47 +00:00
LenAnderson
1dd1cd69ac fix all popups being large/wide after first one 2023-11-20 12:16:37 +00:00
Cohee
6ddf8291e9 Merge pull request #1374 from ThisIsPIRI/help
Corrections to /help macros, /help format
2023-11-20 10:57:12 +02:00
Cohee
dd8deab4e3 Update colab 2023-11-20 03:21:26 +02:00
Cohee
af44a63265 Update colab 2023-11-20 03:15:47 +02:00
Cohee
41db1464a2 Update colab 2023-11-20 03:12:24 +02:00
ThisIsPIRI
d87e44ff03 Corrections to /help macros, /help format 2023-11-20 08:51:37 +09:00
Cohee
47a2734ad4 Merge pull request #1352 from chrisbennight/staging
update docker registry in compose file to github from dockerhub
2023-11-20 01:06:38 +02:00
Cohee
48034eb6c9 More info for mancer models 2023-11-19 23:01:39 +02:00
Cohee
ac07c8324d Configurable chat truncation amount 2023-11-19 21:57:54 +02:00
Cohee
a02446c4cc Cancel deletion mode on switching chats 2023-11-19 21:40:23 +02:00
Cohee
a39ee32f93 Horde fire and forget delete request 2023-11-19 21:30:08 +02:00
Cohee
9dcc23825a [chore] Reformat 2023-11-19 21:17:02 +02:00
Cohee
5e6fcd28b2 Merge pull request #1371 from LenAnderson/staging
Add ComfyUI to the Stable Diffusion extension
2023-11-19 21:14:42 +02:00
Cohee
9e3072f89b Explicitly add form-data to package.json 2023-11-19 21:05:58 +02:00
Cohee
96b87641ca Add OpenAI Whisper API 2023-11-19 20:30:34 +02:00
LenAnderson
9dd1e59421 move requests to comfy into ST server 2023-11-19 18:29:41 +00:00
Cohee
1f58d8c335 Merge pull request #1373 from eltociear/patch-2
Update readme.md
2023-11-19 18:28:15 +02:00
Cohee
de456fd097 #1372 Typical P unbreak 2023-11-19 18:27:33 +02:00
Cohee
8de343295d #1372 Typical P unbreak 2023-11-19 18:26:57 +02:00
Ikko Eltociear Ashimine
80161bf138 Update readme.md
minor fix
2023-11-20 01:18:50 +09:00
Cohee
96caddfd71 Add koboldcpp as Text Completion source 2023-11-19 17:14:53 +02:00
Cohee
1dc1b926c4 Fix TTS issues with streaming 2023-11-19 16:56:12 +02:00
Cohee
b0b19edf31 Add multimodal captioning for SD prompt generation 2023-11-19 15:24:43 +02:00
Cohee
c3e5d0f6f2 Adjust scroll height on image inserting 2023-11-19 14:32:36 +02:00
LenAnderson
fdccab3069 add ComfyUI 2023-11-19 12:18:48 +00:00
Cohee
81cb43004b Fix zoomed character avatar validation 2023-11-19 02:16:30 +02:00
Cohee
57165cbe48 Fix invalid SerpApi secret id 2023-11-19 01:41:39 +02:00
Cohee
211722d67b Bump package version 2023-11-19 01:34:24 +02:00
Cohee
685bb9742e Fix update button icons 2023-11-19 01:33:54 +02:00
Cohee
53c3fc16c1 Assorted SD fixes 2023-11-19 00:40:21 +02:00
Cohee
b6936584fe Extend chat comps response limit 2023-11-18 23:39:59 +02:00
Cohee
3f5728d67a Fix TTS not playing if the last message was generated quietly 2023-11-18 23:37:11 +02:00
Cohee
ddeb42ba55 Close modal send form menus when clicking on button again + fix caption UI labels 2023-11-18 21:17:53 +02:00
Cohee
4999fbd97c Check for API key in multimodal captions 2023-11-18 20:58:04 +02:00
Cohee
7045d242e8 Add ability to attach files and images to messages 2023-11-18 19:24:55 +02:00
Cohee
73660c7bef Merge pull request #1364 from LenAnderson/staging 2023-11-18 14:16:10 +02:00
LenAnderson
e520a50de2 add autorun command line argument 2023-11-18 00:09:42 +00:00
Cohee
6f9be2eee9 Merge branch 'staging' of https://github.com/SillyTavern/SillyTavern into staging 2023-11-17 23:19:25 +02:00
Cohee
0608c0afac Add OpenRouter and Llava to captioning plugin. 2023-11-17 23:19:21 +02:00
Cohee
b28ebf46b6 Merge pull request #1361 from bdashore3/staging
Add support for TabbyAPI
2023-11-17 22:02:34 +02:00
kingbri
5f8c615981 Server: Fix model info logs
Don't use a formatted string.

Signed-off-by: kingbri <bdashore3@proton.me>
2023-11-17 15:01:13 -05:00
kingbri
f7ed574d04 Tabby: Decouple combined conditionals
Done by request.

Signed-off-by: kingbri <bdashore3@proton.me>
2023-11-17 14:51:44 -05:00
Cohee
a8c819e293 Minor spelling mistake 2023-11-17 19:32:34 +02:00
kingbri
4cfa267b1b API Tokenizer: Add support for TabbyAPI
Use Tabby's /v1/token endpoints.

Signed-off-by: kingbri <bdashore3@proton.me>
2023-11-17 01:48:03 -05:00
kingbri
f31b996cb5 Text Generation: Add TabbyAPI support
TabbyAPI is an exllamav2 only API server that aims to provide a simple
experience for loading and chatting with exl2 models.

SillyTavern currently doesn't have the ability to load and unload models,
so only add the OAI compatible completion endpoints.

The repository can be found here:
https://github.com/theroyallab/tabbyAPI

Signed-off-by: kingbri <bdashore3@proton.me>
2023-11-17 00:37:39 -05:00
Cohee
323b338cdd Add images to quiet prompts if inlining enabled 2023-11-17 01:30:32 +02:00
Cohee
4b4880bf11 Merge branch 'release' into staging 2023-11-16 18:57:09 +02:00
Cohee
5289038dbd Merge pull request #1354 from Tony-sama/staging
Correct live2d model file parsing to handle non "model3" models
2023-11-16 18:55:49 +02:00
Cohee
f4630f9808 Add lastMessage macro. Substitute params in CFG negative prompts 2023-11-16 16:58:49 +02:00
Cohee
d114ebf6fa Add default role for Message class if not set. 2023-11-16 16:20:33 +02:00
Cohee
7781dae836 Add separator to colab cell 2023-11-16 14:32:09 +02:00
Cohee
72ad2c3261 Update colab 2023-11-16 14:27:09 +02:00
Cohee
e753246373 Add NAI Diffusion 3 model 2023-11-16 02:15:28 +02:00
Cohee
9e48d807cc Add slash command to set UI theme 2023-11-16 00:57:23 +02:00
Cohee
d72c4e0e3f Rename Text Generation WebUI to Text Completion 2023-11-15 23:58:16 +02:00
Cohee
9353ae7415 Merge pull request #1357 from Touch-Night/release 2023-11-15 22:31:34 +02:00
Lu Guanghua
74dec58e0d Update i18n.json 2023-11-16 02:17:09 +08:00
Cohee
3fb26d3927 Add lazy loading of sentencepiece tokenizers 2023-11-15 19:39:55 +02:00
Cohee
9199750afe Merge pull request #1356 from nonenothingnada/staging
Bugfix: World Info - NOT operator only considers first secondary key
2023-11-15 14:11:53 +02:00
nonenothingnada
bca21ec9b6 Bugfix: World Info - NOT operator only considers first secondary key
Small fix to correct the behavior of the World Info NOT operator with a list of secondary keys to what was (hopefully) intended. Previously only the first secondary key was ever checked. Now each primary key should be checked against each secondary key with a single match invalidating the entry activation.
2023-11-15 12:11:09 +01:00
RossAscends
42c73c8658 block mobile swipe gesture under fullscreen panels 2023-11-15 19:20:05 +09:00
RossAscends
88863262da remove console logs 2023-11-15 18:59:39 +09:00
RossAscends
d4a2502ec0 make zensliders ctx respect unlock state 2023-11-15 18:58:47 +09:00
Cohee
5136b70882 #1355 Update summary settings button to make it more visible 2023-11-15 02:16:42 +02:00
Cohee
3c3594c52f Add backup/restore for Personas 2023-11-15 02:09:40 +02:00
Cohee
3143356523 Skill issue 2023-11-15 01:16:31 +02:00
Cohee
5b5e42361a Fix chat backups saving with incorrect file extension 2023-11-15 01:06:27 +02:00
Cohee
22161c2264 Add backup/restore for tags 2023-11-15 00:59:44 +02:00
Cohee
9bef9f4332 Fix delete message without checkboxes 2023-11-15 00:27:46 +02:00
Cohee
f24aae546c Filter WI entries by tags 2023-11-14 23:54:08 +02:00
Cohee
dcf913336b Add macros for character's main and jailbreak prompts 2023-11-14 22:54:16 +02:00
Cohee
7be808c2ff Disable position select for system prompts 2023-11-14 22:41:47 +02:00
Cohee
314aca3f2c Allow disabling system marker prompts 2023-11-14 22:27:07 +02:00
Cohee
4277aac974 Don't prompt to create persona if replacing an image 2023-11-14 22:07:32 +02:00
Cohee
ea583e0ff5 Add fuzzy search to /bg command 2023-11-14 21:43:08 +02:00
Cohee
abb8b0f0cc Update hide / unhide commands to accept range 2023-11-14 21:37:37 +02:00
Cohee
c6ac4459a3 Move image inlining toggle. GPT-4V via OpenRouter 2023-11-14 21:19:39 +02:00
Cohee
b559187722 Autoset height of QR slots. Revert textarea padding 2023-11-14 12:48:55 +02:00
Cohee
50f3def2eb Decrease icon size and text padding 2023-11-14 11:36:57 +02:00
RossAscends
2f20c8e6da Merge branch 'staging' of https://github.com/Cohee1207/SillyTavern into staging 2023-11-14 15:53:28 +09:00
RossAscends
3f4a62d22c ext button to left, stack buttons on mobile 2023-11-14 15:53:26 +09:00
Cohee
1f736a051e Merge branch 'release' into staging 2023-11-14 01:34:04 +02:00
Cohee
c0a8186d37 Add assets and update symbolic links in Dockerfile and entrypoint script 2023-11-14 01:30:47 +02:00
Cohee
fc17f42f93 Change flag hint for ooba 2023-11-14 00:36:34 +02:00
Cohee
b6fb624c99 Change flag hint for ooba 2023-11-14 00:36:04 +02:00
Cohee
4e9b952116 Merge pull request #1354 from Tony-sama/staging
Correct live2d model file parsing to handle non "model3" models
2023-11-14 00:29:43 +02:00
Tony Ribeiro
a261c163a5 merging change to live2d assets listing 2023-11-13 23:22:19 +01:00
Tony Ribeiro
9169938448 Fix listing of live2d model file for non-model3 type models. 2023-11-13 23:20:36 +01:00
Cohee
5fe8f70eb1 #1345 Add API endpoint for web search interaction 2023-11-14 00:16:41 +02:00
Cohee
61764a9a21 Change mancer base URL via debug menu 2023-11-13 11:13:39 +02:00
RossAscends
52c07e0895 setup Aphrodite-specific API flag handling 2023-11-13 16:36:01 +09:00
Cohee
7bf62b3dad Merge pull request #1351 from SillyTavern/staging
Staging
2023-11-13 01:52:46 +02:00
Chris Bennight
690dc328c5 update docker registry in compose file to github from dockerhub 2023-11-12 18:52:34 -05:00
Cohee
25f1afa628 Fix lab mode class 2023-11-13 01:49:36 +02:00
Cohee
e29bcde1d3 Localization hotfix 2023-11-13 01:45:46 +02:00
Cohee
8ff4599e8a Merge branch 'release' into staging 2023-11-12 23:29:13 +02:00
Cohee
6c02a12e88 Proper manual input debounce 2023-11-12 23:28:34 +02:00
Cohee
8fd5a5886b #1347 Localization hotfix 2023-11-12 23:20:32 +02:00
Cohee
31d6c97e70 #1347 Localization hotfix 2023-11-12 23:20:14 +02:00
Cohee
d3e5f6ebc0 #1343 Move bypass check up 2023-11-12 23:08:24 +02:00
Cohee
aeac56c95d Reformat assets.js code 2023-11-12 23:02:07 +02:00
Cohee
2aaaa71d85 Merge branch 'staging' of https://github.com/SillyTavern/SillyTavern into staging 2023-11-12 22:38:55 +02:00
Cohee
500a1dc4c6 Merge pull request #1349 from Tony-sama/staging
update live2d assets detection to allow for subfolder organisation.
2023-11-12 22:38:49 +02:00
Cohee
0d4e5c31e2 Code clean-up 2023-11-12 22:35:17 +02:00
Cohee
9a1d1594d6 Fix formatting in openai.js 2023-11-12 22:14:35 +02:00
Cohee
cd440f6539 Merge pull request #1350 from artisticMink/feature/openrouter-grouping-and-sorting
Feature/openrouter grouping and sorting
2023-11-12 22:13:40 +02:00
Cohee
5bcd49b7ca More concise UI texts, fix button spam clicks 2023-11-12 22:12:16 +02:00
Cohee
2092f849f7 #1348 Show a full screen loader when bulk deleting characters 2023-11-12 21:58:43 +02:00
artisticMink
cc0b4e8174 Access oai_settings instead of dom 2023-11-12 20:55:29 +01:00
Cohee
6e9c6a14f7 Update index.html 2023-11-12 20:35:53 +02:00
Tony Ribeiro
f082420fc7 Use cross os separator for live2d assets path 2023-11-12 19:25:13 +01:00
Tony Ribeiro
c5ea3cfce7 normalize path of live2d assets 2023-11-12 19:21:01 +01:00
Tony Ribeiro
e7e4f75c86 fix new live2d asset listing for character folder. 2023-11-12 19:08:29 +01:00
artisticMink
3bbbf0d8e4 Put openrouter model sorting in drawer
Renames 'Infinity'k tokens to 'Free'
2023-11-12 19:02:41 +01:00
Tony Ribeiro
7f8994c1fd update live2d assets detection to allow for subfolder organisation. 2023-11-12 18:56:01 +01:00
artisticMink
cb2644cdea Add sorting for openrouter models
Alphabetically (default), price, context size
2023-11-12 18:27:56 +01:00
Cohee
dd12cacd16 Fix range validation error 2023-11-12 17:27:40 +02:00
RossAscends
4558f856b5 Merge branch 'staging' of https://github.com/Cohee1207/SillyTavern into staging 2023-11-12 23:57:54 +09:00
RossAscends
5e28d6f651 'off' labels for kobold, typable zenslider handles 2023-11-12 23:57:51 +09:00
artisticMink
a16e34bcef Add optional toggle for grouping openrouter models
By vendor
2023-11-12 15:15:30 +01:00
Cohee
822d9d72ea Force persona creation on avatar uploads. Don't show cropper for images if never resize is enabled. 2023-11-12 15:47:52 +02:00
Cohee
adc533070d Throttle silly animals to one change per minute 2023-11-12 13:26:38 +02:00
Cohee
7afe9e6481 #1343 Add status check bypass 2023-11-12 13:23:46 +02:00
Cohee
efe2a06976 Add null checks for OR pricing 2023-11-12 13:08:22 +02:00
Cohee
4c0b3fb7ae Add null checks for OR pricing 2023-11-12 13:07:57 +02:00
RossAscends
86caffb1c6 Ooba ZenSlider QoL: "Off" for disabled val sampler 2023-11-12 18:22:21 +09:00
RossAscends
583de0d0e7 Merge branch 'staging' of https://github.com/Cohee1207/SillyTavern into staging 2023-11-12 17:23:33 +09:00
RossAscends
986eef9830 fix zenslider and labmode compatibility check 2023-11-12 17:23:29 +09:00
Cohee
35c5d4e528 SD interactive mode fixes 2023-11-12 02:35:37 +02:00
Cohee
6f061adc1e Add OpenAI TTS provider 2023-11-12 02:28:03 +02:00
Cohee
a42c1fc581 Hide MinP under a feature flag. Send Miro to Horde 2023-11-12 01:12:14 +02:00
Cohee
59f7147271 Merge pull request #1342 from ThisIsPIRI/maxcontext
Fix context size counter when size is unlocked
2023-11-12 00:27:24 +02:00
Cohee
879502c1e7 Only allow inlining if OAI is the selected API 2023-11-12 00:13:30 +02:00
Cohee
2c4f53e7b5 Add native GPT-4V image inlining 2023-11-12 00:09:48 +02:00
ThisIsPIRI
ab5b0cb1db Fix context size counter when size is unlocked 2023-11-12 06:32:04 +09:00
Cohee
5d34c8aef5 Merge branch 'staging' of https://github.com/SillyTavern/SillyTavern into staging 2023-11-11 21:53:15 +02:00
Cohee
10264367aa WI editor visual QoL 2023-11-11 21:53:11 +02:00
Cohee
25b549b034 WI custom order (slight return) 2023-11-11 20:16:57 +02:00
Cohee
30f723d9fc Bump package version 2023-11-11 18:26:20 +02:00
Cohee
f79eaa4c8b Merge branch 'staging' of https://github.com/SillyTavern/SillyTavern into staging 2023-11-11 18:25:47 +02:00
Cohee
91a1cc81a0 #1242 Add aux field selector 2023-11-11 18:25:43 +02:00
84 changed files with 7019 additions and 1541 deletions

View File

@@ -170,7 +170,7 @@ SillyTavern 会将 API 密钥保存在目录中的 `secrets.json` 文件内。
如果要想通过点击 API 输入框旁边的按钮来查看密钥,请按照以下设置:
1. 打开 `config.conf` 文件,将里面的 `allowKeysExposure` 设置为 `true`
1. 打开 `config.yaml` 文件,将里面的 `allowKeysExposure` 设置为 `true`
2. 然后重启 SillyTavern 服务。
## 远程访问
@@ -207,7 +207,7 @@ SillyTavern 会将 API 密钥保存在目录中的 `secrets.json` 文件内。
然后,文件中设置的 IP 就可以访问 SillyTavern 了。
*注意:"config.conf" 文件内也有一个 "whitelist" 设置,你可以用同样的方法设置它,但如果 "whitelist.txt" 文件存在,这个设置将被忽略。
*注意:"config.yaml" 文件内也有一个 "whitelist" 设置,你可以用同样的方法设置它,但如果 "whitelist.txt" 文件存在,这个设置将被忽略。
### 2.获取 SillyTavern 服务的 IP 地址
@@ -233,19 +233,19 @@ SillyTavern 会将 API 密钥保存在目录中的 `secrets.json` 文件内。
### 向所有 IP 开放您的 SillyTavern 服务
我们不建议这样做,但您可以打开 `config.conf` 并将里面的 `whitelist` 设置改为 `false`
我们不建议这样做,但您可以打开 `config.yaml` 并将里面的 `whitelistMode` 设置改为 `false`
你必须删除或重命名SillyTavern 文件夹中的 `whitelist.txt` 文件(如果有的话)。
这通常是不安全的做法,所以我们要求在这样做时必须设置用户名和密码。
用户名和密码在`config.conf`文件中设置。
用户名和密码在`config.yaml`文件中设置。
重启 SillyTavern 服务后,只要知道用户名和密码,任何设备都可以访问。
### 还是无法访问?
*`config.conf` 文件中的端口创建一条入站/出站防火墙规则。切勿将此误认为是路由器上的端口转发,否则,有人可能会发现你的聊天隐私,那就大错特错了。
*`config.yaml` 文件中的端口创建一条入站/出站防火墙规则。切勿将此误认为是路由器上的端口转发,否则,有人可能会发现你的聊天隐私,那就大错特错了。
* 在 "设置" > "网络和 Internet" > "以太网" 中启用 "专用网络" 配置。这对 Windows 11 非常重要,否则即使添加了上述防火墙规则也无法连接。
### 性能问题?

18
.github/readme.md vendored
View File

@@ -162,8 +162,10 @@ Installing via ZIP download (discouraged)
### Linux
1. Run the `start.sh` script.
2. Enjoy.
1. Ensure you have Node.js v18 or higher (the latest [LTS version](https://nodejs.org/en/download/) is recommended) installed by running `node -v`.
Alternatively, use the [Node Version Manager](https://github.com/nvm-sh/nvm#installing-and-updating) script to quickly and easily manage your Node installations.
2. Run the `start.sh` script.
3. Enjoy.
## API keys management
@@ -173,7 +175,7 @@ By default, they will not be exposed to a frontend after you enter them and relo
In order to enable viewing your keys by clicking a button in the API block:
1. Set the value of `allowKeysExposure` to `true` in `config.conf` file.
1. Set the value of `allowKeysExposure` to `true` in `config.yaml` file.
2. Restart the SillyTavern server.
## Remote connections
@@ -211,7 +213,7 @@ CIDR masks are also accepted (eg. 10.0.0.0/24).
Now devices which have the IP specified in the file will be able to connect.
*Note: `config.conf` also has a `whitelist` array, which you can use in the same way, but this array will be ignored if `whitelist.txt` exists.*
*Note: `config.yaml` also has a `whitelist` array, which you can use in the same way, but this array will be ignored if `whitelist.txt` exists.*
### 2. Getting the IP for the ST host machine
@@ -223,7 +225,7 @@ If the ST-hosting device is on the same wifi network, you will use the ST-host's
If you (or someone else) want to connect to your hosted ST while not being on the same network, you will need the public IP of your ST-hosting device.
* While using the ST-hosting device, access [this page](https://whatismyipaddress.com/) and look for for `IPv4`. This is what you would use to connect from the remote device.
* While using the ST-hosting device, access [this page](https://whatismyipaddress.com/) and look for `IPv4`. This is what you would use to connect from the remote device.
### 3. Connect the remote device to the ST host machine
@@ -237,19 +239,19 @@ Use http:// NOT https://
### Opening your ST to all IPs
We do not recommend doing this, but you can open `config.conf` and change `whitelist` to `false`.
We do not recommend doing this, but you can open `config.yaml` and change `whitelistMode` to `false`.
You must remove (or rename) `whitelist.txt` in the SillyTavern base install folder if it exists.
This is usually an insecure practice, so we require you to set a username and password when you do this.
The username and password are set in `config.conf`.
The username and password are set in `config.yaml`.
After restarting your ST server, any device will be able to connect to it, regardless of their IP as long as they know the username and password.
### Still Unable To Connect?
* Create an inbound/outbound firewall rule for the port found in `config.conf`. Do NOT mistake this for port-forwarding on your router, otherwise, someone could find your chat logs and that's a big no-no.
* Create an inbound/outbound firewall rule for the port found in `config.yaml`. Do NOT mistake this for port-forwarding on your router, otherwise, someone could find your chat logs and that's a big no-no.
* Enable the Private Network profile type in Settings > Network and Internet > Ethernet. This is VERY important for Windows 11, otherwise, you would be unable to connect even with the aforementioned firewall rules.
## Performance issues?

2
.gitignore vendored
View File

@@ -20,6 +20,8 @@ public/stats.json
/uploads/
*.jsonl
/config.conf
/config.yaml
/config.conf.bak
/docker/config
.DS_Store
public/settings.json

View File

@@ -23,18 +23,19 @@ COPY . ./
# Copy default chats, characters and user avatars to <folder>.default folder
RUN \
IFS="," RESOURCES="characters,chats,groups,group chats,User Avatars,worlds,OpenAI Settings,NovelAI Settings,KoboldAI Settings,TextGen Settings" && \
IFS="," RESOURCES="assets,backgrounds,user,context,instruct,QuickReplies,movingUI,themes,characters,chats,groups,group chats,User Avatars,worlds,OpenAI Settings,NovelAI Settings,KoboldAI Settings,TextGen Settings" && \
\
echo "*** Store default $RESOURCES in <folder>.default ***" && \
for R in $RESOURCES; do mv "public/$R" "public/$R.default"; done && \
for R in $RESOURCES; do mv "public/$R" "public/$R.default"; done || true && \
\
echo "*** Create symbolic links to config directory ***" && \
for R in $RESOURCES; do ln -s "../config/$R" "public/$R"; done && \
# rm "config.conf" "public/settings.json" "public/css/bg_load.css" && \
ln -s "./config/config.conf" "config.conf" && \
ln -s "../config/settings.json" "public/settings.json" && \
ln -s "../../config/bg_load.css" "public/css/bg_load.css" && \
mkdir "config"
for R in $RESOURCES; do ln -s "../config/$R" "public/$R"; done || true && \
\
rm -f "config.yaml" "public/settings.json" "public/css/bg_load.css" || true && \
ln -s "./config/config.yaml" "config.yaml" || true && \
ln -s "../config/settings.json" "public/settings.json" || true && \
ln -s "../../config/bg_load.css" "public/css/bg_load.css" || true && \
mkdir "config" || true
# Cleanup unnecessary files
RUN \

View File

@@ -4,7 +4,7 @@ echo WARNING: Cloudflare Tunnel!
echo ========================================================================================================================
echo This script downloads and runs the latest cloudflared.exe from Cloudflare to set up an HTTPS tunnel to your SillyTavern!
echo Using the randomly generated temporary tunnel URL, anyone can access your SillyTavern over the Internet while the tunnel
echo is active. Keep the URL safe and secure your SillyTavern installation by setting a username and password in config.conf!
echo is active. Keep the URL safe and secure your SillyTavern installation by setting a username and password in config.yaml!
echo.
echo See https://docs.sillytavern.app/usage/remoteconnections/ for more details about how to secure your SillyTavern install.
echo.

View File

@@ -34,43 +34,56 @@
"source": [
"#@markdown (RECOMMENDED) Generates an API key for you to use with the API\n",
"secure = False #@param {type:\"boolean\"}\n",
"#@markdown Enables hosting of extensions backend for SillyTavern Extras\n",
"use_cpu = False #@param {type:\"boolean\"}\n",
"#@markdown Allows to run SillyTavern Extras on CPU (use if you're out of daily GPU allowance)\n",
"use_sd_cpu = False #@param {type:\"boolean\"}\n",
"use_cpu = False #@param {type:\"boolean\"}\n",
"#@markdown Allows to run Stable Diffusion pipeline on CPU (slow!)\n",
"extras_enable_captioning = True #@param {type:\"boolean\"}\n",
"use_sd_cpu = False #@param {type:\"boolean\"}\n",
"#@markdown ***\n",
"#@markdown Loads the image captioning module\n",
"Captions_Model = \"Salesforce/blip-image-captioning-large\" #@param [ \"Salesforce/blip-image-captioning-large\", \"Salesforce/blip-image-captioning-base\" ]\n",
"extras_enable_caption = True #@param {type:\"boolean\"}\n",
"captioning_model = \"Salesforce/blip-image-captioning-large\" #@param [ \"Salesforce/blip-image-captioning-large\", \"Salesforce/blip-image-captioning-base\" ]\n",
"#@markdown * Salesforce/blip-image-captioning-large - good base model\n",
"#@markdown * Salesforce/blip-image-captioning-base - slightly faster but less accurate\n",
"extras_enable_emotions = True #@param {type:\"boolean\"}\n",
"#@markdown ***\n",
"#@markdown Loads the sentiment classification model\n",
"Emotions_Model = \"nateraw/bert-base-uncased-emotion\" #@param [\"nateraw/bert-base-uncased-emotion\", \"joeddav/distilbert-base-uncased-go-emotions-student\"]\n",
"extras_enable_classify = True #@param {type:\"boolean\"}\n",
"classification_model = \"nateraw/bert-base-uncased-emotion\" #@param [\"nateraw/bert-base-uncased-emotion\", \"joeddav/distilbert-base-uncased-go-emotions-student\"]\n",
"#@markdown * nateraw/bert-base-uncased-emotion = 6 supported emotions<br>\n",
"#@markdown * joeddav/distilbert-base-uncased-go-emotions-student = 28 supported emotions\n",
"extras_enable_memory = True #@param {type:\"boolean\"}\n",
"#@markdown ***\n",
"#@markdown Loads the story summarization module\n",
"Memory_Model = \"slauw87/bart_summarisation\" #@param [ \"slauw87/bart_summarisation\", \"Qiliang/bart-large-cnn-samsum-ChatGPT_v3\", \"Qiliang/bart-large-cnn-samsum-ElectrifAi_v10\", \"distilbart-xsum-12-3\" ]\n",
"extras_enable_summarize = True #@param {type:\"boolean\"}\n",
"summarization_model = \"slauw87/bart_summarisation\" #@param [ \"slauw87/bart_summarisation\", \"Qiliang/bart-large-cnn-samsum-ChatGPT_v3\", \"Qiliang/bart-large-cnn-samsum-ElectrifAi_v10\", \"distilbart-xsum-12-3\" ]\n",
"#@markdown * slauw87/bart_summarisation - general purpose summarization model\n",
"#@markdown * Qiliang/bart-large-cnn-samsum-ChatGPT_v3 - summarization model optimized for chats\n",
"#@markdown * Qiliang/bart-large-cnn-samsum-ElectrifAi_v10 - nice results so far, but still being evaluated\n",
"#@markdown * distilbart-xsum-12-3 - faster, but pretty basic alternative\n",
"extras_enable_silero_tts = True #@param {type:\"boolean\"}\n",
"#@markdown ***\n",
"#@markdown Enables Silero text-to-speech module\n",
"extras_enable_edge_tts = True #@param {type:\"boolean\"}\n",
"extras_enable_silero_tts = True #@param {type:\"boolean\"}\n",
"#@markdown Enables Microsoft Edge text-to-speech module\n",
"extras_enable_sd = True #@param {type:\"boolean\"}\n",
"extras_enable_edge_tts = True #@param {type:\"boolean\"}\n",
"#@markdown Enables RVC module\n",
"extras_enable_rvc = False #@param {type:\"boolean\"}\n",
"#@markdown ***\n",
"#@markdown Enables Whisper speech recognition module\n",
"extras_enable_whisper_stt = True #@param {type:\"boolean\"}\n",
"whisper_model = \"base.en\" #@param [ \"tiny.en\", \"base.en\", \"small.en\", \"medium.en\", \"tiny\", \"base\", \"small\", \"medium\", \"large\" ]\n",
"#@markdown There are five model sizes, four with English-only versions, offering speed and accuracy tradeoffs.\n",
"#@markdown The .en models for English-only applications tend to perform better, especially for the tiny.en and base.en models.\n",
"#@markdown ***\n",
"#@markdown Enables SD picture generation\n",
"SD_Model = \"ckpt/anything-v4.5-vae-swapped\" #@param [ \"ckpt/anything-v4.5-vae-swapped\", \"hakurei/waifu-diffusion\", \"philz1337/clarity\", \"prompthero/openjourney\", \"ckpt/sd15\", \"stabilityai/stable-diffusion-2-1-base\" ]\n",
"extras_enable_sd = True #@param {type:\"boolean\"}\n",
"sd_model = \"ckpt/anything-v4.5-vae-swapped\" #@param [ \"ckpt/anything-v4.5-vae-swapped\", \"hakurei/waifu-diffusion\", \"philz1337/clarity\", \"prompthero/openjourney\", \"ckpt/sd15\", \"stabilityai/stable-diffusion-2-1-base\" ]\n",
"#@markdown * ckpt/anything-v4.5-vae-swapped - anime style model\n",
"#@markdown * hakurei/waifu-diffusion - anime style model\n",
"#@markdown * philz1337/clarity - realistic style model\n",
"#@markdown * prompthero/openjourney - midjourney style model\n",
"#@markdown * ckpt/sd15 - base SD 1.5\n",
"#@markdown * stabilityai/stable-diffusion-2-1-base - base SD 2.1\n",
"extras_enable_chromadb = True #@param {type:\"boolean\"}\n",
"#@markdown ***\n",
"#@markdown Enables ChromaDB module\n",
"extras_enable_chromadb = True #@param {type:\"boolean\"}\n",
"\n",
"import subprocess\n",
"import secrets\n",
@@ -86,28 +99,36 @@
"if secure:\n",
" params.append('--secure')\n",
"params.append('--share')\n",
"ExtrasModules = []\n",
"modules = []\n",
"\n",
"if (extras_enable_captioning):\n",
" ExtrasModules.append('caption')\n",
"if (extras_enable_memory):\n",
" ExtrasModules.append('summarize')\n",
"if (extras_enable_emotions):\n",
" ExtrasModules.append('classify')\n",
"if (extras_enable_sd):\n",
" ExtrasModules.append('sd')\n",
"if (extras_enable_silero_tts):\n",
" ExtrasModules.append('silero-tts')\n",
"if extras_enable_caption:\n",
" modules.append('caption')\n",
"if extras_enable_summarize:\n",
" modules.append('summarize')\n",
"if extras_enable_classify:\n",
" modules.append('classify')\n",
"if extras_enable_sd:\n",
" modules.append('sd')\n",
"if extras_enable_silero_tts:\n",
" modules.append('silero-tts')\n",
"if extras_enable_edge_tts:\n",
" ExtrasModules.append('edge-tts')\n",
"if (extras_enable_chromadb):\n",
" ExtrasModules.append('chromadb')\n",
" modules.append('edge-tts')\n",
"if extras_enable_chromadb:\n",
" modules.append('chromadb')\n",
"if extras_enable_whisper_stt:\n",
" modules.append('whisper-stt')\n",
" params.append(f'--stt-whisper-model-path={whisper_model}')\n",
"if extras_enable_rvc:\n",
" modules.append('rvc')\n",
" params.append('--max-content-length=2000')\n",
" params.append('--rvc-save-file')\n",
"\n",
"params.append(f'--classification-model={Emotions_Model}')\n",
"params.append(f'--summarization-model={Memory_Model}')\n",
"params.append(f'--captioning-model={Captions_Model}')\n",
"params.append(f'--sd-model={SD_Model}')\n",
"params.append(f'--enable-modules={\",\".join(ExtrasModules)}')\n",
"\n",
"params.append(f'--classification-model={classification_model}')\n",
"params.append(f'--summarization-model={summarization_model}')\n",
"params.append(f'--captioning-model={captioning_model}')\n",
"params.append(f'--sd-model={sd_model}')\n",
"params.append(f'--enable-modules={\",\".join(modules)}')\n",
"\n",
"\n",
"%cd /\n",
@@ -115,23 +136,14 @@
"%cd /SillyTavern-extras\n",
"!git clone https://github.com/Cohee1207/tts_samples\n",
"!npm install -g localtunnel\n",
"!pip install -r requirements-complete.txt\n",
"!pip install tensorflow==2.14\n",
"!pip install colorama\n",
"!pip install Flask-Cors\n",
"!pip install Flask-Compress\n",
"!pip install transformers\n",
"!pip install Flask_Cloudflared\n",
"!pip install webuiapi\n",
"!pip install diffusers\n",
"!pip install accelerate\n",
"!pip install silero_api_server\n",
"!pip install edge_tts\n",
"!pip install chromadb\n",
"!pip install sentence_transformers\n",
"%pip install -r requirements.txt\n",
"!wget https://github.com/cloudflare/cloudflared/releases/download/2023.5.0/cloudflared-linux-amd64 -O /tmp/cloudflared-linux-amd64\n",
"!chmod +x /tmp/cloudflared-linux-amd64\n",
"\n",
"if extras_enable_rvc:\n",
" print(\"Installing RVC requirements\")\n",
" !pip install -r requirements-rvc.txt\n",
"\n",
"# Generate a random API key\n",
"api_key = secrets.token_hex(5)\n",
"\n",

View File

@@ -1,56 +0,0 @@
const port = 8000;
const whitelist = ['127.0.0.1']; //Example for add several IP in whitelist: ['127.0.0.1', '192.168.0.10']
const whitelistMode = true; //Disabling enabling the ip whitelist mode. true/false
const basicAuthMode = false; //Toggle basic authentication for endpoints.
const basicAuthUser = {username: "user", password: "password"}; //Login credentials when basicAuthMode is true.
const disableThumbnails = false; //Disables the generation of thumbnails, opting to use the raw images instead
const autorun = true; //Autorun in the browser. true/false
const enableExtensions = true; //Enables support for TavernAI-extras project
const listen = true; // If true, Can be access from other device or PC. otherwise can be access only from hosting machine.
const allowKeysExposure = false; // If true, private API keys could be fetched to the frontend.
const skipContentCheck = false; // If true, no new default content will be delivered to you.
const thumbnailsQuality = 95; // Quality of thumbnails. 0-100
const disableChatBackup = false; // Disables the backup of chat logs to the /backups folder
// If true, Allows insecure settings for listen, whitelist, and authentication.
// Change this setting only on "trusted networks". Do not change this value unless you are aware of the issues that can arise from changing this setting and configuring a insecure setting.
const securityOverride = false;
// Additional settings for extra modules / extensions
const extras = {
// Disables auto-download of models from the HuggingFace Hub.
// You will need to manually download the models and put them into the /cache folder.
disableAutoDownload: false,
// Text classification model for sentiment analysis. HuggingFace ID of a model in ONNX format.
classificationModel: 'Cohee/distilbert-base-uncased-go-emotions-onnx',
// Image captioning model. HuggingFace ID of a model in ONNX format.
captioningModel: 'Xenova/vit-gpt2-image-captioning',
// Feature extraction model. HuggingFace ID of a model in ONNX format.
embeddingModel: 'Xenova/all-mpnet-base-v2',
// GPT-2 text generation model. HuggingFace ID of a model in ONNX format.
promptExpansionModel: 'Cohee/fooocus_expansion-onnx',
};
// Request overrides for additional headers
// Format is an array of objects:
// { hosts: [ "<url>" ], headers: { <header>: "<value>" } }
const requestOverrides = [];
module.exports = {
port,
whitelist,
whitelistMode,
basicAuthMode,
basicAuthUser,
autorun,
enableExtensions,
listen,
disableThumbnails,
allowKeysExposure,
securityOverride,
skipContentCheck,
requestOverrides,
thumbnailsQuality,
extras,
disableChatBackup,
};

53
default/config.yaml Normal file
View File

@@ -0,0 +1,53 @@
# -- NETWORK CONFIGURATION --
# Listen for incoming connections
listen: true
# Server port
port: 8000
# Toggle whitelist mode
whitelistMode: true
# Whitelist of allowed IP addresses
whitelist:
- 127.0.0.1
# Toggle basic authentication for endpoints
basicAuthMode: false
# Basic authentication credentials
basicAuthUser:
username: user
password: password
# Enables CORS proxy middleware
enableCorsProxy: false
# Disable security checks - NOT RECOMMENDED
securityOverride: false
# -- ADVANCED CONFIGURATION --
# Open the browser automatically
autorun: true
# Disable thumbnail generation
disableThumbnails: false
# Thumbnail quality (0-100)
thumbnailsQuality: 95
# Allow secret keys exposure via API
allowKeysExposure: false
# Skip new default content checks
skipContentCheck: false
# Disable automatic chats backup
disableChatBackup: false
# API request overrides (for KoboldAI and Text Completion APIs)
## Format is an array of objects:
## - hosts:
## - example.com
## headers:
## Content-Type: application/json
requestOverrides: []
# -- PLUGIN CONFIGURATION --
# Enable UI extensions
enableExtensions: true
# Extension settings
extras:
# Disables automatic model download from HuggingFace
disableAutoDownload: false
# Extra models for plugins. Expects model IDs from HuggingFace model hub in ONNX format
classificationModel: Cohee/distilbert-base-uncased-go-emotions-onnx
captioningModel: Xenova/vit-gpt2-image-captioning
embeddingModel: Xenova/all-mpnet-base-v2
promptExpansionModel: Cohee/fooocus_expansion-onnx

View File

@@ -0,0 +1,86 @@
{
"3": {
"class_type": "KSampler",
"inputs": {
"cfg": "%scale%",
"denoise": 1,
"latent_image": [
"5",
0
],
"model": [
"4",
0
],
"negative": [
"7",
0
],
"positive": [
"6",
0
],
"sampler_name": "%sampler%",
"scheduler": "%scheduler%",
"seed": "%seed%",
"steps": "%steps%"
}
},
"4": {
"class_type": "CheckpointLoaderSimple",
"inputs": {
"ckpt_name": "%model%"
}
},
"5": {
"class_type": "EmptyLatentImage",
"inputs": {
"batch_size": 1,
"height": "%height%",
"width": "%width%"
}
},
"6": {
"class_type": "CLIPTextEncode",
"inputs": {
"clip": [
"4",
1
],
"text": "%prompt%"
}
},
"7": {
"class_type": "CLIPTextEncode",
"inputs": {
"clip": [
"4",
1
],
"text": "%negative_prompt%"
}
},
"8": {
"class_type": "VAEDecode",
"inputs": {
"samples": [
"3",
0
],
"vae": [
"4",
2
]
}
},
"9": {
"class_type": "SaveImage",
"inputs": {
"filename_prefix": "SillyTavern",
"images": [
"8",
0
]
}
}
}

View File

@@ -22,5 +22,9 @@
{
"filename": "user-default.png",
"type": "avatar"
},
{
"filename": "Default_Comfy_Workflow.json",
"type": "workflow"
}
]

View File

@@ -4,7 +4,7 @@ services:
build: ..
container_name: sillytavern
hostname: sillytavern
image: sillytavern/sillytavern:latest
image: ghcr.io/sillytavern/sillytavern:latest
ports:
- "8000:8000"
volumes:

View File

@@ -1,7 +1,7 @@
#!/bin/sh
# Initialize missing user files
IFS="," RESOURCES="characters,chats,groups,group chats,User Avatars,worlds,OpenAI Settings,NovelAI Settings,KoboldAI Settings,TextGen Settings"
IFS="," RESOURCES="assets,backgrounds,user,context,instruct,QuickReplies,movingUI,themes,characters,chats,groups,group chats,User Avatars,worlds,OpenAI Settings,NovelAI Settings,KoboldAI Settings,TextGen Settings"
for R in $RESOURCES; do
if [ ! -e "config/$R" ]; then
echo "Resource not found, copying from defaults: $R"
@@ -9,9 +9,9 @@ for R in $RESOURCES; do
fi
done
if [ ! -e "config/config.conf" ]; then
echo "Resource not found, copying from defaults: config.conf"
cp -r "default/config.conf" "config/config.conf"
if [ ! -e "config/config.yaml" ]; then
echo "Resource not found, copying from defaults: config.yaml"
cp -r "default/config.yaml" "config/config.yaml"
fi
if [ ! -e "config/settings.json" ]; then
@@ -24,5 +24,20 @@ if [ ! -e "config/bg_load.css" ]; then
cp -r "default/bg_load.css" "config/bg_load.css"
fi
CONFIG_FILE="config.yaml"
echo "Starting with the following config:"
cat $CONFIG_FILE
if grep -q "listen: false" $CONFIG_FILE; then
echo -e "\033[1;31mThe listen parameter is set to false. If you can't connect to the server, edit the \"docker/config/config.yaml\" file and restart the container.\033[0m"
sleep 5
fi
if grep -q "whitelistMode: true" $CONFIG_FILE; then
echo -e "\033[1;31mThe whitelistMode parameter is set to true. If you can't connect to the server, edit the \"docker/config/config.yaml\" file and restart the container.\033[0m"
sleep 5
fi
# Start the server
exec node server.js

14
package-lock.json generated
View File

@@ -1,12 +1,12 @@
{
"name": "sillytavern",
"version": "1.10.8",
"version": "1.10.10",
"lockfileVersion": 3,
"requires": true,
"packages": {
"": {
"name": "sillytavern",
"version": "1.10.8",
"version": "1.10.10",
"hasInstallScript": true,
"license": "AGPL-3.0",
"dependencies": {
@@ -21,6 +21,7 @@
"csrf-csrf": "^2.2.3",
"device-detector-js": "^3.0.3",
"express": "^4.18.2",
"form-data": "^4.0.0",
"google-translate-api-browser": "^3.0.1",
"gpt3-tokenizer": "^1.1.5",
"ip-matching": "^2.1.2",
@@ -42,6 +43,7 @@
"vectra": "^0.2.2",
"write-file-atomic": "^5.0.1",
"ws": "^8.13.0",
"yaml": "^2.3.4",
"yargs": "^17.7.1",
"yauzl": "^2.10.0"
},
@@ -4387,6 +4389,14 @@
"integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==",
"dev": true
},
"node_modules/yaml": {
"version": "2.3.4",
"resolved": "https://registry.npmjs.org/yaml/-/yaml-2.3.4.tgz",
"integrity": "sha512-8aAvwVUSHpfEqTQ4w/KMlf3HcRdt50E5ODIQJBw1fQ5RL34xabzxtUlzTXVqc4rkZsPbvrXKWnABCD7kWSmocA==",
"engines": {
"node": ">= 14"
}
},
"node_modules/yargs": {
"version": "17.7.2",
"resolved": "https://registry.npmjs.org/yargs/-/yargs-17.7.2.tgz",

View File

@@ -3,6 +3,7 @@
"@agnai/sentencepiece-js": "^1.1.1",
"@agnai/web-tokenizers": "^0.1.3",
"@dqbd/tiktoken": "^1.0.2",
"bing-translate-api": "^2.9.1",
"command-exists": "^1.2.9",
"compression": "^1",
"cookie-parser": "^1.4.6",
@@ -10,8 +11,8 @@
"csrf-csrf": "^2.2.3",
"device-detector-js": "^3.0.3",
"express": "^4.18.2",
"form-data": "^4.0.0",
"google-translate-api-browser": "^3.0.1",
"bing-translate-api": "^2.9.1",
"gpt3-tokenizer": "^1.1.5",
"ip-matching": "^2.1.2",
"ipaddr.js": "^2.0.1",
@@ -32,6 +33,7 @@
"vectra": "^0.2.2",
"write-file-atomic": "^5.0.1",
"ws": "^8.13.0",
"yaml": "^2.3.4",
"yargs": "^17.7.1",
"yauzl": "^2.10.0"
},
@@ -50,7 +52,7 @@
"type": "git",
"url": "https://github.com/SillyTavern/SillyTavern.git"
},
"version": "1.10.8",
"version": "1.10.10",
"scripts": {
"start": "node server.js",
"start-multi": "node server.js --disableCsrf",

View File

@@ -4,6 +4,102 @@
const fs = require('fs');
const path = require('path');
const crypto = require('crypto');
const yaml = require('yaml');
const _ = require('lodash');
/**
* Colorizes console output.
*/
const color = {
byNum: (mess, fgNum) => {
mess = mess || '';
fgNum = fgNum === undefined ? 31 : fgNum;
return '\u001b[' + fgNum + 'm' + mess + '\u001b[39m';
},
black: (mess) => color.byNum(mess, 30),
red: (mess) => color.byNum(mess, 31),
green: (mess) => color.byNum(mess, 32),
yellow: (mess) => color.byNum(mess, 33),
blue: (mess) => color.byNum(mess, 34),
magenta: (mess) => color.byNum(mess, 35),
cyan: (mess) => color.byNum(mess, 36),
white: (mess) => color.byNum(mess, 37)
};
/**
* Gets all keys from an object recursively.
* @param {object} obj Object to get all keys from
* @param {string} prefix Prefix to prepend to all keys
* @returns {string[]} Array of all keys in the object
*/
function getAllKeys(obj, prefix = '') {
if (typeof obj !== 'object' || Array.isArray(obj)) {
return [];
}
return _.flatMap(Object.keys(obj), key => {
const newPrefix = prefix ? `${prefix}.${key}` : key;
if (typeof obj[key] === 'object' && !Array.isArray(obj[key])) {
return getAllKeys(obj[key], newPrefix);
} else {
return [newPrefix];
}
});
}
/**
* Converts the old config.conf file to the new config.yaml format.
*/
function convertConfig() {
if (fs.existsSync('./config.conf')) {
if (fs.existsSync('./config.yaml')) {
console.log(color.yellow('Both config.conf and config.yaml exist. Please delete config.conf manually.'));
return;
}
try {
console.log(color.blue('Converting config.conf to config.yaml. Your old config.conf will be renamed to config.conf.bak'));
const config = require(path.join(process.cwd(), './config.conf'));
fs.renameSync('./config.conf', './config.conf.bak');
fs.writeFileSync('./config.yaml', yaml.stringify(config));
console.log(color.green('Conversion successful. Please check your config.yaml and fix it if necessary.'));
} catch (error) {
console.error(color.red('FATAL: Config conversion failed. Please check your config.conf file and try again.'));
return;
}
}
}
/**
* Compares the current config.yaml with the default config.yaml and adds any missing values.
*/
function addMissingConfigValues() {
try {
const defaultConfig = yaml.parse(fs.readFileSync(path.join(process.cwd(), './default/config.yaml'), 'utf8'));
let config = yaml.parse(fs.readFileSync(path.join(process.cwd(), './config.yaml'), 'utf8'));
// Get all keys from the original config
const originalKeys = getAllKeys(config);
// Use lodash's defaultsDeep function to recursively apply default properties
config = _.defaultsDeep(config, defaultConfig);
// Get all keys from the updated config
const updatedKeys = getAllKeys(config);
// Find the keys that were added
const addedKeys = _.difference(updatedKeys, originalKeys);
if (addedKeys.length === 0) {
return;
}
console.log('Adding missing config values to config.yaml:', addedKeys);
fs.writeFileSync('./config.yaml', yaml.stringify(config));
} catch (error) {
console.error(color.red('FATAL: Could not add missing config values to config.yaml'), error);
}
}
/**
* Creates the default config files if they don't exist yet.
@@ -12,7 +108,7 @@ function createDefaultFiles() {
const files = {
settings: './public/settings.json',
bg_load: './public/css/bg_load.css',
config: './config.conf',
config: './config.yaml',
user: './public/css/user.css',
};
@@ -21,10 +117,10 @@ function createDefaultFiles() {
if (!fs.existsSync(file)) {
const defaultFilePath = path.join('./default', path.parse(file).base);
fs.copyFileSync(defaultFilePath, file);
console.log(`Created default file: ${file}`);
console.log(color.green(`Created default file: ${file}`));
}
} catch (error) {
console.error(`FATAL: Could not write default file: ${file}`, error);
console.error(color.red(`FATAL: Could not write default file: ${file}`), error);
}
}
}
@@ -73,10 +169,14 @@ function copyWasmFiles() {
}
try {
// 0. Convert config.conf to config.yaml
convertConfig();
// 1. Create default config files
createDefaultFiles();
// 2. Copy transformers WASM binaries from node_modules
copyWasmFiles();
// 3. Add missing config values
addMissingConfigValues();
} catch (error) {
console.error(error);
}

57
public/css/file-form.css Normal file
View File

@@ -0,0 +1,57 @@
.file_attached {
display: flex;
min-width: 150px;
max-width: calc(var(--sheldWidth) * 0.9);
flex-direction: row;
gap: 10px;
align-items: center;
margin: 0.25em auto;
padding: 0 0.75em;
border: 2px solid var(--SmartThemeBorderColor);
border-radius: 15px;
background-color: var(--white20a);
}
.mes_file_container {
cursor: default;
display: flex;
gap: 15px;
align-items: center;
width: fit-content;
max-width: 100%;
background-color: var(--white20a);
border: 2px solid var(--SmartThemeBorderColor);
padding: 0.5em 1em;
border-radius: 15px;
}
.mes_file_container .right_menu_button {
padding-right: 0;
}
.mes_file_container .mes_file_size,
.file_attached .file_size {
font-size: 0.9em;
color: var(--SmartThemeQuoteColor);
}
.file_attached .file_name,
.mes_file_container .mes_file_name {
flex: 1;
overflow: hidden;
text-overflow: ellipsis;
white-space: nowrap;
}
#file_form {
display: flex;
width: 100%;
}
.file_modal {
width: 100%;
height: 100%;
overflow-y: auto;
display: flex;
text-align: left;
}

View File

@@ -369,6 +369,18 @@
top: unset;
bottom: unset;
}
#leftSendForm,
#rightSendForm {
width: 1.15em;
flex-wrap: wrap;
height: unset;
}
#extensionsMenuButton {
order: 1;
}
}
/*iOS specific*/
@@ -445,4 +457,4 @@
#horde_model {
height: unset;
}
}
}

View File

@@ -229,6 +229,10 @@
display: flex;
}
.flexBasis100p {
flex-basis: 100%;
}
.flexBasis50p {
flex-basis: 50%
}
@@ -263,6 +267,10 @@
flex-shrink: 1
}
.flexWrap {
flex-wrap: wrap;
}
.flexnowrap {
flex-wrap: nowrap;
}
@@ -289,6 +297,10 @@
align-content: flex-start;
}
.alignContentCenter {
align-content: center;
}
.overflowHidden {
overflow: hidden;
}
@@ -518,4 +530,4 @@ textarea:disabled {
height: 30px;
text-align: center;
padding: 5px;
}
}

View File

@@ -17,6 +17,13 @@ body.no-modelIcons .icon-svg {
display: none !important;
}
body.square-avatars .avatar,
body.square-avatars .avatar img,
body.square-avatars .hotswapAvatar,
body.square-avatars .hotswapAvatar img {
border-radius: 2px !important;
}
/*char list grid mode*/
body.charListGrid #rm_print_characters_block {
@@ -358,3 +365,11 @@ body.expandMessageActions .mes .mes_buttons .extraMesButtons {
body.expandMessageActions .mes .mes_buttons .extraMesButtonsHint {
display: none !important;
}
#openai_image_inlining:not(:checked)~#image_inlining_hint {
display: none;
}
#openai_image_inlining:checked~#image_inlining_hint {
display: block;
}

View File

@@ -12,66 +12,66 @@
"clickslidertips": "点击滑块右侧数字可手动输入",
"kobldpresets": "Kobold 预设",
"guikoboldaisettings": "KoboldAI GUI 设置",
"novelaipreserts": "NovelAI预设",
"novelaipreserts": "NovelAI 预设",
"default": "默认",
"openaipresets": "OpenAI 预设",
"text gen webio(ooba) presets": "Text generation web UI 预设",
"response legth(tokens)": "响应长度 (Toekns)",
"response legth(tokens)": "响应长度Tokens",
"select": "选择 ",
"context size(tokens)": "上下文大小 (Toekns)",
"context size(tokens)": "上下文大小Tokens",
"unlocked": "解锁",
"Only select models support context sizes greater than 4096 tokens. Increase only if you know what you're doing.": "只有在选定的模型支持大于 4096 个Toekn 时可以选择启用在启用该选项时,你应该知道自己在做什么。",
"rep.pen": "频率惩罚",
"rep.pen range": "存在惩罚",
"temperature": "温度设置",
"Encoder Rep. Pen.": "Encoder Rep. Pen.",
"No Repeat Ngram Size": "不需要重复Ngram大小",
"Only select models support context sizes greater than 4096 tokens. Increase only if you know what you're doing.": "只有在选定的模型支持大于 4096 个Token时可以选择启用在启用该选项时,你应该知道自己在做什么。",
"rep.pen": "重复惩罚",
"rep.pen range": "重复惩罚范围",
"temperature": "温度",
"Encoder Rep. Pen.": "编码器重复惩罚",
"No Repeat Ngram Size": "不重复N元语法大小",
"Min Length": "最小长度",
"OpenAI Reverse Proxy": "OpenAI API 反向代理",
"Alternative server URL (leave empty to use the default value).": "自定义 OpenAI API 的反向代理地址 (留空时使用 OpenAI 默认服务器)。",
"Remove your real OAI API Key from the API panel BEFORE typing anything into this box": "在输入内容之前,从 API 面板中删除 OpenAI API 密钥",
"We cannot provide support for problems encountered while using an unofficial OpenAI proxy": "我们无法为使用自定义 OpenAI API 反向代理时遇到的问题提供支持",
"Legacy Streaming Processing": "传统流式处理",
"Enable this if the streaming doesn't work with your proxy": "如果流式响应与您的代理不兼容,请启用此功能",
"Context Size (tokens)": "上下文大小(Tokens)",
"Max Response Length (tokens)": "最大响应长度(Tokens)",
"Temperature": "温度设置",
"Enable this if the streaming doesn't work with your proxy": "如果流式回复与您的代理不兼容,请启用此功能",
"Context Size (tokens)": "上下文大小Tokens",
"Max Response Length (tokens)": "最大响应长度Tokens",
"Temperature": "温度",
"Frequency Penalty": "频率惩罚",
"Presence Penalty": "存在惩罚",
"Top-p": "Top-p",
"Top-p": "Top P",
"Display bot response text chunks as they are generated": "显示机器人生成的响应文本块",
"Top A": "Top-a",
"Top A": "Top A",
"Typical Sampling": "典型采样",
"Tail Free Sampling": "无尾采样",
"Rep. Pen. Slope": "Rep. Pen. Slope",
"Rep. Pen. Slope": "重复惩罚梯度",
"Single-line mode": "单行模式",
"Top K": "Top-k",
"Top P": "Top-p",
"Typical P": "典型P",
"Do Sample": "样",
"Add BOS Token": "添加BOS标记",
"Add the bos_token to the beginning of prompts. Disabling this can make the replies more creative.": "在提示的开头添加bos_token标记。禁用此功能可以让回复更加创造性.",
"Ban EOS Token": "禁止EOS标记",
"Ban the eos_token. This forces the model to never end the generation prematurely": "禁止eos_token标记。这会迫使模型不会过早结束生成",
"Skip Special Tokens": "跳过特殊标记",
"Beam search": "搜索",
"Number of Beams": "光束数目",
"Top K": "Top-K",
"Top P": "Top-P",
"Typical P": "典型 P",
"Do Sample": "样本测试",
"Add BOS Token": "添加 BOS Token",
"Add the bos_token to the beginning of prompts. Disabling this can make the replies more creative.": "在提示的开头添加 bos_token禁用此功能可以让回复更加创造性",
"Ban EOS Token": "禁止 EOS Token",
"Ban the eos_token. This forces the model to never end the generation prematurely": "禁止 EOS Token这会迫使模型不会过早结束生成",
"Skip Special Tokens": "跳过特殊 Tokens",
"Beam search": "Beam 搜索",
"Number of Beams": "Beams 的数量",
"Length Penalty": "长度惩罚",
"Early Stopping": "提前终止",
"Contrastive search": "对比搜索",
"Penalty Alpha": "惩罚系数",
"Seed": "种子",
"Inserts jailbreak as a last system message.": "最后一个系统消息中插入越狱",
"Seed": "随机种子",
"Inserts jailbreak as a last system message.": "插入越狱作为最后一个系统消息",
"This tells the AI to ignore its usual content restrictions.": "这告诉人工智能忽略其通常的内容限制",
"NSFW Encouraged": "NSFW鼓励",
"Tell the AI that NSFW is allowed.": "告诉人工智能,NSFW是允许的。",
"NSFW Prioritized": "NSFW优先",
"Tell the AI that NSFW is allowed.": "告诉人工智能NSFW 是允许的。",
"NSFW Prioritized": "NSFW 优先",
"NSFW prompt text goes first in the prompt to emphasize its effect.": "NSFW 提示文本排在提示的顶部,以强调其效果",
"Streaming": "流式响应",
"Display the response bit by bit as it is generated.": "在生成响应时逐显示响应。",
"When this is off, responses will be displayed all at once when they are complete.": "关闭此选项后,响应将在完成后立即显示所有响应。",
"Streaming": "流式生成",
"Display the response bit by bit as it is generated.": "在生成响应时逐显示。",
"When this is off, responses will be displayed all at once when they are complete.": "关闭此选项后,响应将在全部完成后立即显示。",
"Generate only one line per request (KoboldAI only, ignored by KoboldCpp).": "每个请求仅生成一行(仅限 KoboldAI被 KoboldCpp 忽略)。",
"Ban the End-of-Sequence (EOS) token (with KoboldCpp, and possibly also other tokens with KoboldAI).": "禁止序列结束 (EOS) 代币(使用 KoboldCpp也可能使用 KoboldAI 禁止其他代币)。",
"Ban the End-of-Sequence (EOS) token (with KoboldCpp, and possibly also other tokens with KoboldAI).": "禁止序列结束 (EOS) token(使用 KoboldCpp也可能使用 KoboldAI 禁止其他 token)。",
"Good for story writing, but should not be used for chat and instruct mode.": "适合故事写作,但不应用于聊天和指导模式。",
"Enhance Definitions": "增强定义",
"Use OAI knowledge base to enhance definitions for public figures and known fictional characters": "使用 OpenAI 知识库增强公众人物和已知虚构人物的定义",
@@ -84,9 +84,9 @@
"Prompt that is used when the NSFW toggle is on": "NSFW 打开时使用的提示",
"Jailbreak prompt": "越狱提示",
"Prompt that is used when the Jailbreak toggle is on": "越狱开关打开时使用的提示",
"Impersonation prompt": "模拟提示",
"Prompt that is used for Impersonation function": "用于模拟功能的提示",
"Logit Bias": "对数偏",
"Impersonation prompt": "扮演提示",
"Prompt that is used for Impersonation function": "用于扮演功能的提示",
"Logit Bias": "对数偏",
"Helps to ban or reenforce the usage of certain words": "有助于禁止或加强某些单词的使用",
"View / Edit bias preset": "查看/编辑偏置预设",
"Add bias entry": "添加偏置条目",
@@ -108,8 +108,8 @@
"Register": "注册",
"For privacy reasons": "出于隐私原因,您的 API 密钥将在您重新加载页面后隐藏",
"Model": "模型",
"Hold Control / Command key to select multiple models.": "按住控制/命令键选择多个型。",
"Horde models not loaded": "按住控制/命令键选择多个型号。",
"Hold Control / Command key to select multiple models.": "按住控制/命令键选择多个型。",
"Horde models not loaded": "未加载 Horde 模型。",
"Not connected": "未连接",
"Novel API key": "NovelAI API 密钥",
"Follow": "跟随",
@@ -119,22 +119,22 @@
"Novel AI Model": "NovelAI 模型",
"No connection": "无连接",
"oobabooga/text-generation-webui": "",
"Make sure you run it with": "确保启动时包含 --extensions openai 参数",
"Make sure you run it with": "确保启动时包含 --api 参数",
"Blocking API url": "阻塞式 API 地址",
"Streaming API url": "流式传输 API 地址",
"to get your OpenAI API key.": "获取您的 OpenAI API 密钥。",
"OpenAI Model": "OpenAI模型",
"View API Usage Metrics": "查看 API 使用情况",
"Bot": "Bot",
"Connect to the API": "连接到API",
"Auto-connect to Last Server": "自动连接到最后设置的 API 服务",
"Connect to the API": "连接到 API",
"Auto-connect to Last Server": "自动连接到最后设置的 API 服务",
"View hidden API keys": "查看隐藏的 API 密钥",
"Advanced Formatting": "高级格式化",
"AutoFormat Overrides": "自动套用格式替代",
"Disable description formatting": "禁用说明格式",
"Disable personality formatting": "禁用个性化格式",
"Disable scenario formatting": "禁用方案格式",
"Disable example chats formatting": "禁用聊天格式示例",
"AutoFormat Overrides": "覆盖自动格式化",
"Disable description formatting": "禁用描述格式",
"Disable personality formatting": "禁用人设格式",
"Disable scenario formatting": "禁用场景格式",
"Disable example chats formatting": "禁用聊天示例格式",
"Disable chat start formatting": "禁用聊天开始格式",
"Custom Chat Separator": "自定义聊天分隔符",
"Instruct Mode": "指示模式",
@@ -142,7 +142,7 @@
"Wrap Sequences with Newline": "用换行符换行序列",
"Include Names": "包括名称",
"System Prompt": "系统提示",
"Instruct Mode Sequences": "指模式序列",
"Instruct Mode Sequences": "指模式序列",
"Input Sequence": "输入序列",
"Output Sequence": "输出序列",
"First Output Sequence": "第一个输出序列",
@@ -151,22 +151,21 @@
"System Sequence Suffix": "系统序列后缀",
"Stop Sequence": "停止序列",
"Context Formatting": "上下文格式",
"Tokenizer": "分词器",
"Tokenizer": "Tokenizer",
"None / Estimated": "无/估计",
"Sentencepiece (LLaMA)": "Sentencepiece (LLaMA)",
"Token Padding": "令牌填充",
"Always add character's name to prompt": "始终将角色名称添加到提示中",
"Keep Example Messages in Prompt": "保持示例消息提示",
"Token Padding": "Token 填充",
"Always add character's name to prompt": "始终将角色名称添加到提示中",
"Keep Example Messages in Prompt": "在提示中保留示例消息",
"Remove Empty New Lines from Output": "从输出中删除空的新行",
"Disabled for all models": "对所有模型禁用",
"Automatic (based on model name)": "自动(基于型名称)",
"Enabled for all models": "所有模型启用",
"Automatic (based on model name)": "自动(基于型名称)",
"Enabled for all models": "所有模型启用",
"Anchors Order": "锚点顺序",
"Character then Style": "字符然后样式",
"Style then Character": "样式然后字符",
"Character Anchor": "角色锚点",
"Character Anchor": "字符锚点",
"Style Anchor": "样式锚点",
"World Info": "",
"Scan Depth": "扫描深度",
"depth": "深度",
"Token Budget": "Token 预算",
@@ -174,32 +173,32 @@
"Recursive scanning": "递归扫描",
"None": "没有",
"User Settings": "聊天窗口设置",
"UI Customization": "聊天窗口定制",
"UI Customization": "聊天窗口自定义",
"Avatar Style": "头像风格",
"Circle": "圆形",
"Rectangle": "长方形",
"Chat Style": "聊天式:",
"Chat Style": "聊天窗口样式:",
"Default": "默认",
"Bubbles": "气泡",
"Chat Width (PC)": "聊天窗口宽度(电脑):",
"No Blur Effect": "关糊效果",
"No Text Shadows": "关文字阴影",
"No Blur Effect": "关闭模糊效果",
"No Text Shadows": "关文字阴影",
"Waifu Mode": "♡ 老 婆 模 式 ♡",
"Message Timer": "消息计时器",
"Characters Hotswap": "角色热插拔",
"Movable UI Panels": "可移动的聊天窗口",
"Reset Panels": "恢复默认设置",
"UI Colors": "聊天窗口字体颜色",
"Movable UI Panels": "可移动的UI面板",
"Reset Panels": "重置面板",
"UI Colors": "UI颜色",
"Main Text": "正文",
"Italics Text": "斜体文字",
"Quote Text": "引用文字",
"Shadow Color": "阴影颜色",
"FastUI BG": "快界面 BG",
"Blur Tint": "背景颜色",
"Font Scale": "字体比例",
"FastUI BG": "快速UI背景",
"Blur Tint": "模糊色调",
"Font Scale": "字体缩放",
"Blur Strength": "模糊强度",
"Text Shadow Width": "文字阴影宽度",
"UI Theme Preset": "界面主题预设",
"UI Theme Preset": "UI主题预设",
"Power User Options": "高级用户选项",
"Swipes": "滑动",
"Background Sound Only": "仅背景声音",
@@ -208,23 +207,23 @@
"Auto-fix Markdown": "自动修复 Markdown 格式",
"Allow {{char}}: in bot messages": "允许 {{char}}:在机器人消息中",
"Allow {{user}}: in bot messages": "允许 {{user}}:在机器人消息中",
"Auto-scroll Chat": "自动滚动聊天信息",
"Auto-scroll Chat": "自动滚动聊天界面",
"Render Formulas": "渲染公式",
"Send on Enter": "按下回车键发送",
"Always disabled": "始终禁用",
"Automatic (desktop)": "自动(电脑)",
"Always enabled": "始终启用",
"Name": "用户名称",
"Your Avatar": "用户角色头像",
"Extensions API:": "扩展接口",
"Your Avatar": "用户头像",
"Extensions API:": "扩展API",
"SillyTavern-extras": "SillyTavern 扩展",
"Auto-connect": "自动连接",
"Active extensions": "活动扩展",
"Active extensions": "启用扩展",
"Extension settings": "扩展设置",
"Description": "描述",
"First message": "第一条消息",
"Group Controls": "组控",
"Group reply strategy": "组回复策略",
"Group Controls": "组控",
"Group reply strategy": "组回复策略",
"Natural order": "自然顺序",
"List order": "列表顺序",
"Allow self responses": "允许自我响应",
@@ -234,9 +233,9 @@
"text": "文本",
"Delete": "删除",
"Cancel": "取消",
"Advanced Defininitions": "- 高级定义",
"Personality summary": "性格总结",
"A brief description of the personality": "个性的简要描述",
"Advanced Defininitions": "高级定义",
"Personality summary": "人设总结",
"A brief description of the personality": "人设的简要描述",
"Scenario": "场景",
"Circumstances and context of the dialogue": "对话的情况和背景",
"Talkativeness": "回复频率",
@@ -244,17 +243,17 @@
"group chats!": "群聊!",
"Shy": "羞涩 ",
"Normal": "正常",
"Chatty": "",
"Chatty": "健谈",
"Examples of dialogue": "对话示例",
"Forms a personality more clearly": "更清晰地形成个性",
"Forms a personality more clearly": "更清晰地形成人设",
"Save": "保存",
"World Info Editor": "信息编辑器",
"New Entry": "新一行",
"World Info Editor": "世界背景编辑器",
"New Entry": "新条目",
"Export": "导出",
"Delete World": "删除文本",
"Chat History": "聊天记录",
"Group Chat Scenario Override": "群聊方案覆盖",
"All group members will use the following scenario text instead of what is specified in their character cards.": "所有组成员都将使用以下方案文本,而不是其角色卡中指定的内容。",
"Group Chat Scenario Override": "群聊场景覆盖",
"All group members will use the following scenario text instead of what is specified in their character cards.": "所有组成员都将使用以下场景文本,而不是其角色卡中指定的内容。",
"Keywords": "关键字",
"Separate with commas": "用逗号分隔",
"Secondary Required Keywords": "次要必填关键字",
@@ -266,15 +265,15 @@
"Selective": "选择",
"Before Char": "在Char之前",
"After Char": "在Char之后",
"Insertion Order": "顺序",
"Insertion Order": "插入顺序",
"Tokens:": "Tokens",
"Disable": "禁用",
"${characterName}": "${字符名称}",
"CHAR": "字符",
"${characterName}": "${角色名}",
"CHAR": "角色",
"is typing": "正在输入...",
"Back to parent chat": "返回聊天",
"Save bookmark": "保存书签",
"Convert to group": "转换为组",
"Convert to group": "转换为组",
"Start new chat": "开始新聊天",
"View past chats": "查看过去的聊天",
"Delete messages": "删除消息",
@@ -292,9 +291,9 @@
"Prompt that is used when the NSFW toggle is off": "当 NSFW 切换关闭时使用的提示",
"Advanced prompt bits": "高级提示",
"World Info format template": "世界背景格式模板",
"Wraps activated World Info entries before inserting into the prompt. Use {0} to mark a place where the content is inserted.": "在插入提示之前,激活世界背景条目的包装。使用 {0} 来标记内容插入的位置。",
"Unrestricted maximum value for the context slider": "上下文滑块的无限最大值",
"Chat Completion Source": "Chat Completion",
"Wraps activated World Info entries before inserting into the prompt. Use {0} to mark a place where the content is inserted.": "在插入提示之前,包装已激活世界背景条目。使用 {0} 来标记内容插入的位置。",
"Unrestricted maximum value for the context slider": "上下文滑块的无限最大值",
"Chat Completion Source": "聊天补全源",
"Avoid sending sensitive information to the Horde.": "避免向 Horde 发送敏感信息",
"Review the Privacy statement": "查看隐私声明",
"Learn how to contribute your idel GPU cycles to the Horde": "学习如何将闲置的显卡计算资源贡献给Horde",
@@ -308,15 +307,15 @@
"Text Gen WebUI (ooba)": "Text Gen WebUI (ooba)",
"NovelAI": "NovelAI",
"Chat Completion (OpenAI, Claude, Window/OpenRouter, Scale)": "Chat Completion (OpenAI, Claude, Window/OpenRouter, Scale)",
"OpenAI API key": "OenAI API 密钥",
"OpenAI API key": "OpenAI API 密钥",
"Trim spaces": "修剪空格",
"Trim Incomplete Sentences": "修剪不完整的句子",
"Include Newline": "包括换行",
"Non-markdown strings": "非标记字串符",
"Non-markdown strings": "非markdown字符串",
"Replace Macro in Sequences": "替换序列中的宏",
"Presets": "预设",
"Separator": "分隔符",
"Start Reply With": "开始回复",
"Start Reply With": "回复前缀",
"Show reply prefix in chat": "在聊天中显示回复前缀",
"Worlds/Lorebooks": "世界/Lorebooks",
"Active World(s)": "激活的世界",
@@ -332,40 +331,40 @@
"--- None ---": "--- 全无 ---",
"Comma seperated (ignored if empty)": "逗号分隔 (如果为空则忽略)",
"Use Probability": "使用概率",
"Exclude from recursion": "排除递归",
"Position:": "插入位置",
"Before Char Defs": "字符定义之前",
"After Char Defs": "字符定义之后",
"Exclude from recursion": "从递归中排除",
"Position:": "插入位置",
"Before Char Defs": "角色定义之前",
"After Char Defs": "角色定义之后",
"Before AN": "作者注释之前",
"After AN": "作者注释之后",
"Order:": "排序",
"Probability:": "概率",
"Delete Entry": "删除条目",
"User Message Blur Tint": "用户消息",
"AI Message Blur Tint": "AI 消息",
"Chat Style:": "聊天窗口样式",
"Chat Width (PC):": "聊天窗口宽度 (电脑)",
"User Message Blur Tint": "用户消息模糊颜色",
"AI Message Blur Tint": "AI 消息模糊颜色",
"Chat Style:": "聊天窗口样式",
"Chat Width (PC):": "聊天窗口宽度 (电脑)",
"Chat Timestamps": "聊天时间戳",
"Message IDs": "消息 ID",
"Prefer Character Card Prompt": "首选角色卡提示",
"Prefer Character Card Jailbreak": "首选角色卡越狱",
"Press Send to continue": "按下发送消息继续",
"Press Send to continue": "按下发送按钮继续",
"Log prompts to console": "将提示记录到控制台",
"Never resize avatars": "不要调整头像大小",
"Show avatar filenames": "显示头像文件名",
"Import Card Tags": "导入卡片标签",
"Confirm message deletion": "确认删除消息",
"Spoiler Free Mode": "自由剧透模式",
"Auto-swipe": "自动重新生成",
"Spoiler Free Mode": "剧透模式",
"Auto-swipe": "自动右滑生成",
"Minimum generated message length": "消息生成的最小长度",
"Blacklisted words": "黑名单词汇",
"Blacklisted word count to swipe": "黑名单词汇计数器",
"Blacklisted word count to swipe": "自动滑动触发的累计黑名单词汇",
"Reload Chat": "重新加载聊天窗口",
"Not Connected": "未连接",
"Persona Management": "用户角色设置",
"Persona Description": "用户角色描述",
"In Story String / Chat Completion: Before Character Card": "在故事串中 / Chat Completion: 角色卡之前",
"In Story String / Chat Completion: After Character Card": "在故事串中 / Chat Completion: 角色卡之后",
"In Story String / Chat Completion: Before Character Card": "在故事字符串 / 聊天补全中: 角色卡之前",
"In Story String / Chat Completion: After Character Card": "在故事字符串 / 聊天补全中: 角色卡之后",
"Top of Author's Note": "作者注释之前",
"Bottom of Author's Note": "作者注释之后",
"How do I use this?": "用户角色设置说明",
@@ -382,8 +381,8 @@
"Oldest": "最旧",
"Favorites": "收藏",
"Recent": "最近",
"Most chats": "大多数聊天",
"Least chats": "最少聊天",
"Most chats": "聊天次数最多",
"Least chats": "聊天次数最少",
"Back": "返回",
"Prompt Overrides (For OpenAI/Claude/Scale APIs, Window/OpenRouter, and Instruct mode)": "提示覆盖适用于OpenAI/Claude/Scale APIs、Window/OpenRouter和Instruct模式",
"Insert {{original}} into either box to include the respective default prompt from system settings.": "将{{original}}插入任意一个框中,即可包含来自系统设置的默认提示。",
@@ -395,16 +394,16 @@
"Character Version": "角色版本",
"Tags to Embed": "要嵌入的标签",
"How often the character speaks in group chats!": "角色在群聊中说话的频率!",
"Important to set the character's writing style.": "重要的是设置角色的写作风格。",
"Important to set the character's writing style.": "设置角色的写作风格,它很重要。",
"ATTENTION!": "注意!",
"Samplers Order": "采样器顺序",
"Samplers will be applied in a top-down order. Use with caution.": "采样器将按从上到下的顺序应用。谨慎使用。",
"Repetition Penalty": "重复惩罚",
"Epsilon Cutoff": "Epsilon切断",
"Eta Cutoff": "Eta切断",
"Rep. Pen. Range.": "代表范围的惩罚。",
"Rep. Pen. Freq.": "代表鹰频",
"Rep. Pen. Presence": "重复惩罚出现",
"Epsilon Cutoff": "Epsilon 切断",
"Eta Cutoff": "Eta 切断",
"Rep. Pen. Range.": "重复惩罚范围",
"Rep. Pen. Freq.": "重复频率惩罚",
"Rep. Pen. Presence": "重复存在惩罚",
"Enter it in the box below:": "在下面的框中输入:",
"separate with commas w/o space between": "用逗号分隔,不要空格",
"Document": "文档",
@@ -428,9 +427,9 @@
"Any contents here will replace the default Jailbreak Prompt used for this character. (v2 spec: post_history_instructions)": "这里的任何内容都将替换用于此角色的默认越狱提示。 v2规范post_history_instructions",
"(Botmaker's name / Contact Info)": "Bot制作者的名字/联系信息)",
"(If you want to track character versions)": "(如果你想跟踪角色版本)",
"(Describe the bot, give use tips, or list the chat models it has been tested on. This will be displayed in the character list.)": "(描述机器人,给出使用提示,或列出它测试过的聊天模型。这将显示在角色列表中)",
"(Describe the bot, give use tips, or list the chat models it has been tested on. This will be displayed in the character list.)": "(描述机器人,给出使用提示,或列出它测试过的聊天模型。这将显示在角色列表中)",
"(Write a comma-separated list of tags)": "(编写逗号分隔的标签列表)",
"(A brief description of the personality)": "个性的简要描述)",
"(A brief description of the personality)": "人设的简要描述)",
"(Circumstances and context of the interaction)": "(互动的情况和上下文)",
"(Examples of chat dialog. Begin each example with START on a new line.)": "聊天对话的示例。每个示例都以新行的START开始",
"Injection text (supports parameters)": "注入文本(支持参数)",
@@ -450,8 +449,8 @@
"Export preset": "导出预设",
"Delete the preset": "删除该预设",
"Inserts jailbreak as a last system message": "将越狱插入为最后一个系统消息",
"NSFW block goes first in the resulting prompt": "在生成的提示中,首先是NSFW",
"Enables OpenAI completion streaming": "启用OpenAI完成流",
"NSFW block goes first in the resulting prompt": "在生成的提示中NSFW部分排在首位",
"Enables OpenAI completion streaming": "启用OpenAI补全流",
"Wrap user messages in quotes before sending": "发送前用引号括起用户消息",
"Restore default prompt": "恢复默认提示",
"New preset": "新预设",
@@ -460,17 +459,17 @@
"Restore default reply": "恢复默认回复",
"Restore defaul note": "恢复默认注释",
"API Connections": "API连接",
"Can help with bad responses by queueing only the approved workers. May slowdown the response time.": "通过排队已批准的工作人员来帮助处理不良响应。可能会减慢响应时间。",
"Can help with bad responses by queueing only the approved workers. May slowdown the response time.": "通过排队已批准的worker来帮助处理不良响应。可能会减慢响应时间。",
"Clear your API key": "清除你的API密钥",
"Refresh models": "刷新模型",
"Get your OpenRouter API token using OAuth flow. You will be redirected to openrouter.ai": "使用OAuth流程获取您的OpenRouter API令牌。您将被重定向到openrouter.ai",
"Verifies your API connection by sending a short test message. Be aware that you'll be credited for it!": "通过发送一个短测试消息验证您的API连接。请注意您会获得相应的积分",
"Verifies your API connection by sending a short test message. Be aware that you'll be credited for it!": "通过发送一个短测试消息验证您的API连接。请注意,这将会计入你的使用额度",
"Create New": "创建新的",
"Edit": "编辑",
"World Info": "世界背景",
"Locked = World Editor will stay open": "锁定=世界编辑器将保持打开状态",
"Entries can activate other entries by mentioning their keywords": "条目可以通过提及其关键字来激活其他条目",
"Lookup for the entry keys in the context will respect the case": "在上下文中查找条目将遵守大小写",
"Lookup for the entry keys in the context will respect the case": "在上下文中查找条目关键词将遵守大小写",
"If the entry key consists of only one word, it would not be matched as part of other words": "如果条目键仅包含一个词,它将不会被匹配为其他词汇的一部分",
"Open all Entries": "打开所有条目",
"Close all Entries": "关闭所有条目",
@@ -483,7 +482,7 @@
"removes blur and uses alternative background color for divs": "去除模糊并为div使用替代的背景颜色",
"If checked and the character card contains a prompt override (System Prompt), use that instead.": "如果选中并且角色卡包含提示覆盖(系统提示),请改用该选项。",
"If checked and the character card contains a jailbreak override (Post History Instruction), use that instead.": "如果选中并且角色卡包含越狱覆盖(发布历史指令),请改用该选项。",
"AI Response Formatting": "AI回复格式",
"AI Response Formatting": "AI 回复格式",
"Change Background Image": "更改背景图片",
"Extensions": "扩展",
"Click to set a new User Name": "点击设置新用户名",
@@ -493,7 +492,7 @@
"Character Management": "角色管理",
"Locked = Character Management panel will stay open": "锁定=角色管理面板将保持打开状态",
"Select/Create Characters": "选择/创建角色",
"Token counts may be inaccurate and provided just for reference.": "令牌计数可能不准确,仅供参考。",
"Token counts may be inaccurate and provided just for reference.": "Token 计数可能不准确,仅供参考。",
"Click to select a new avatar for this character": "点击选择此角色的新头像",
"Add to Favorites": "添加到收藏夹",
"Advanced Definition": "高级定义",
@@ -525,7 +524,7 @@
"Associate one or more auxillary Lorebooks with this character.": "将一个或多个辅助的 Lorebook 与这个角色关联。",
"NOTE: These choices are optional and won't be preserved on character export!": "注意:这些选择是可选的,不会在导出角色时保留!",
"Rename chat file": "重命名聊天文件",
"Export JSONL chat file": "导出JSONL聊天文件",
"Export JSONL chat file": "导出 JSONL 聊天文件",
"Download chat as plain text document": "将聊天内容下载为纯文本文档",
"Delete chat file": "删除聊天文件",
"Delete tag": "删除标签",
@@ -553,7 +552,7 @@
"Add": "添加",
"Abort request": "取消请求",
"Send a message": "发送消息",
"Ask AI to write your message for you": "让AI代替你写",
"Ask AI to write your message for you": "让 AI 代替你写消息",
"Continue the last message": "继续上一条消息",
"Bind user name to that avatar": "将用户名绑定到该头像",
"Select this as default persona for the new chats.": "将此选择为新聊天的默认角色。",

View File

@@ -8,6 +8,7 @@
<meta name="apple-mobile-web-app-capable" content="yes">
<meta name="darkreader-lock">
<meta name="robots" content="noindex, nofollow" />
<link rel="manifest" href="/manifest.json">
<link href="webfonts/NotoSans/stylesheet.css" rel="stylesheet">
<!-- fontawesome webfonts-->
<link href="css/fontawesome.css" rel="stylesheet">
@@ -121,7 +122,7 @@
<div id="clickSlidersTips" data-i18n="clickslidertips" class="toggle-description wide100p editable-slider-notification">
Click slider numbers to input manually.
</div>
<div id="labModeWarning" class="redWarningBG textAlignCenter">MAD LAB MODE ON</div>
<div id="labModeWarning" class="redWarningBG textAlignCenter displayNone">MAD LAB MODE ON</div>
<a href="https://docs.sillytavern.app/usage/common-settings/" target="_blank" title="Documentation on sampling parameters.">
<span name="samplerHelpButton" class="note-link-span topRightInset fa-solid fa-circle-question"></span>
</a>
@@ -182,7 +183,7 @@
</div>
</div>
<div id="textgenerationwebui_api-presets">
<h4 class="margin0"><span data-i18n="Text Gen WebUI presets">Text Gen WebUI presets</span></h4>
<h4 class="margin0"><span data-i18n="Text Completion presets">Text Completion presets</span></h4>
<div class="flex-container">
<select id="settings_preset_textgenerationwebui" class="flex1 text_pole" data-preset-manager-for="textgenerationwebui">
</select>
@@ -213,8 +214,8 @@
</div>
<div id="common-gen-settings-block" class="width100p">
<div id="pro-settings-block" class="flex-container gap10h5v justifyCenter">
<div id="amount_gen_block" class="alignitemscenter flex-container marginBot5 flexFlowColumn flexBasis48p flexGrow flexShrink gap0" data-i18n="response legth(tokens)">
<small>Response (tokens)</small>
<div id="amount_gen_block" class="alignitemscenter flex-container marginBot5 flexFlowColumn flexBasis48p flexGrow flexShrink gap0">
<small data-i18n="response legth(tokens)">Response (tokens)</small>
<input class="neo-range-slider" type="range" id="amount_gen" name="volume" min="16" max="1024" step="1">
<div data-randomization-disabled="true" class="wide100p">
<input class="neo-range-input" type="number" min="16" max="1024" step="1" data-for="amount_gen" id="amount_gen_counter">
@@ -236,8 +237,8 @@
</label>
</div>
</div>
<div id="max_context_block" class="alignitemscenter flex-container marginBot5 flexFlowColumn flexBasis48p flexGrow flexShrink gap0" data-i18n="context size(tokens)">
<small>Context (tokens)</small>
<div id="max_context_block" class="alignitemscenter flex-container marginBot5 flexFlowColumn flexBasis48p flexGrow flexShrink gap0">
<small data-i18n="context size(tokens)">Context (tokens)</small>
<input class="neo-range-slider" type="range" id="max_context" name="volume" min="512" max="8192" step="64">
<div data-randomization-disabled="true" class="wide100p">
<input class="neo-range-input" type="number" min="512" max="8192" step="64" data-for="max_context" id="max_context_counter">
@@ -245,7 +246,7 @@
<div class="flex-container alignitemscenter justifyCenter marginTop5" id="max_context_unlocked_block">
<label class="checkbox_label">
<input id="max_context_unlocked" type="checkbox" />
<small data-i18n="unlocked">Unlocked
<small><span data-i18n="unlocked">Unlocked</span>
<div id="max_context_unlocked_warning" class="fa-solid fa-circle-info opacity50p " title="Only select models support context sizes greater than 4096 tokens.&#13;Increase only if you know what you're doing."></div>
</small>
</label>
@@ -411,7 +412,7 @@
Max Response Length (tokens)
</div>
<div class="wide100p">
<input type="number" id="openai_max_tokens" name="openai_max_tokens" class="text_pole" min="50" max="1000">
<input type="number" id="openai_max_tokens" name="openai_max_tokens" class="text_pole" min="50" max="8000">
</div>
</div>
<div data-source="openrouter">
@@ -735,12 +736,12 @@
<input class="neo-range-input" type="number" min="0" max="1" step="0.01" data-for="top_p" id="top_p_counter">
</div>
<div data-newbie-hidden class="alignitemscenter flex-container flexFlowColumn flexBasis48p flexGrow flexShrink gap0">
<small data-i18n="Typical Sampling">
Typical Sampling
<div class="fa-solid fa-circle-info opacity50p" title="Typical Sampling prioritizes tokens based on their deviation from the average entropy of the set.&#13;It maintains tokens whose cumulative probability is close to a predefined threshold (e.g., 0.5), emphasizing those with average information content.&#13;Set to 1.0 to disable."></div>
<small data-i18n="Typical P">
Typical P
<div class="fa-solid fa-circle-info opacity50p" title="Typical P Sampling prioritizes tokens based on their deviation from the average entropy of the set.&#13;It maintains tokens whose cumulative probability is close to a predefined threshold (e.g., 0.5), emphasizing those with average information content.&#13;Set to 1.0 to disable."></div>
</small>
<input class="neo-range-slider" type="range" id="typical" name="volume" min="0" max="1" step="0.001">
<input class="neo-range-input" type="number" min="0" max="1" step="0.001" data-for="typical" id="typical_counter">
<input class="neo-range-slider" type="range" id="typical_p" name="volume" min="0" max="1" step="0.001">
<input class="neo-range-input" type="number" min="0" max="1" step="0.001" data-for="typical_p" id="typical_p_counter">
</div>
<div data-newbie-hidden class="alignitemscenter flex-container flexFlowColumn flexBasis48p flexGrow flexShrink gap0">
<small data-i18n="Min P">
@@ -867,7 +868,7 @@
<small>3</small>
</div>
<div data-id="4">
<span data-i18n="Typical Sampling">Typical P Sampling</span>
<span data-i18n="Typical P">Typical P</span>
<small>4</small>
</div>
<div data-id="5">
@@ -1077,7 +1078,7 @@
<div class="toggle_button right_menu_button"></div>
</div>
<div data-id="5">
<span data-i18n="Typical Sampling">Typical Sampling</span>
<span data-i18n="Typical P">Typical P</span>
<small>5</small>
<div class="toggle_button right_menu_button"></div>
</div>
@@ -1097,103 +1098,103 @@
</div><!-- end of novel settings-->
<div id="textgenerationwebui_api-settings">
<div class="flex-container gap10h5v justifyCenter">
<div class="alignitemscenter flex-container flexFlowColumn flexBasis48p flexGrow flexShrink gap0" data-i18n="temperature">
<small>Temperature</small>
<div class="alignitemscenter flex-container flexFlowColumn flexBasis48p flexGrow flexShrink gap0">
<small data-i18n="temperature">Temperature</small>
<input class="neo-range-slider" type="range" id="temp_textgenerationwebui" name="volume" min="0.0" max="2.0" step="0.01" x-setting-id="temp">
<input class="neo-range-input" type="number" min="0.0" max="2.0" step="0.01" data-for="temp_textgenerationwebui" id="temp_counter_textgenerationwebui">
</div>
<div data-newbie-hidden class="alignitemscenter flex-container flexFlowColumn flexBasis48p flexGrow flexShrink gap0" data-i18n="Top K">
<small>Top K</small>
<div data-newbie-hidden class="alignitemscenter flex-container flexFlowColumn flexBasis48p flexGrow flexShrink gap0">
<small data-i18n="Top K">Top K</small>
<input class="neo-range-slider" type="range" id="top_k_textgenerationwebui" name="volume" min="0" max="200" step="1">
<input class="neo-range-input" type="number" min="0" max="200" step="1" data-for="top_k_textgenerationwebui" id="top_k_counter_textgenerationwebui">
</div>
<div data-newbie-hidden class="alignitemscenter flex-container flexFlowColumn flexBasis48p flexGrow flexShrink gap0" data-i18n="Top P">
<small>Top P</small>
<div data-newbie-hidden class="alignitemscenter flex-container flexFlowColumn flexBasis48p flexGrow flexShrink gap0">
<small data-i18n="Top P">Top P</small>
<input class="neo-range-slider" type="range" id="top_p_textgenerationwebui" name="volume" min="0" max="1" step="0.01">
<input class="neo-range-input" type="number" min="0" max="1" step="0.01" data-for="top_p_textgenerationwebui" id="top_p_counter_textgenerationwebui">
</div>
<div data-newbie-hidden class="alignitemscenter flex-container flexFlowColumn flexBasis48p flexGrow flexShrink gap0" data-i18n="Typical P">
<div data-newbie-hidden class="alignitemscenter flex-container flexFlowColumn flexBasis48p flexGrow flexShrink gap0">
<small>Typical P</small>
<input class="neo-range-slider" type="range" id="typical_p_textgenerationwebui" name="volume" min="0" max="1" step="0.01">
<input class="neo-range-input" type="number" min="0" max="1" step="0.01" data-for="typical_p_textgenerationwebui" id="typical_p_counter_textgenerationwebui">
</div>
<div data-newbie-hidden class="alignitemscenter flex-container flexFlowColumn flexBasis48p flexGrow flexShrink gap0" data-i18n="Min P">
<small>Min P</small>
<div data-newbie-hidden class="alignitemscenter flex-container flexFlowColumn flexBasis48p flexGrow flexShrink gap0">
<small data-i18n="Min P">Min P</small>
<input class="neo-range-slider" type="range" id="min_p_textgenerationwebui" name="volume" min="0" max="1" step="0.01">
<input class="neo-range-input" type="number" min="0" max="1" step="0.05" data-for="min_p_textgenerationwebui" id="min_p_counter_textgenerationwebui">
</div>
<div data-newbie-hidden class="alignitemscenter flex-container flexFlowColumn flexBasis48p flexGrow flexShrink gap0" data-i18n="Top A">
<small>Top A</small>
<div data-newbie-hidden class="alignitemscenter flex-container flexFlowColumn flexBasis48p flexGrow flexShrink gap0">
<small data-i18n="Top A">Top A</small>
<input class="neo-range-slider" type="range" id="top_a_textgenerationwebui" name="volume" min="0" max="1" step="0.01">
<input class="neo-range-input" type="number" min="0" max="1" step="0.01" data-for="top_a_textgenerationwebui" id="top_a_counter_textgenerationwebui">
</div>
<div data-newbie-hidden class="alignitemscenter flex-container flexFlowColumn flexBasis48p flexGrow flexShrink gap0" data-i18n="Tail Free Sampling">
<small>Tail Free Sampling</small>
<div data-newbie-hidden class="alignitemscenter flex-container flexFlowColumn flexBasis48p flexGrow flexShrink gap0">
<small data-i18n="Tail Free Sampling">Tail Free Sampling</small>
<input class="neo-range-slider" type="range" id="tfs_textgenerationwebui" name="volume" min="0" max="1" step="0.01">
<input class="neo-range-input" type="number" min="0" max="1" step="0.01" data-for="tfs_textgenerationwebui" id="tfs_counter_textgenerationwebui">
</div>
<div data-newbie-hidden class="alignitemscenter flex-container flexFlowColumn flexBasis48p flexGrow flexShrink gap0" data-i18n="Epsilon Cutoff">
<small>Epsilon Cutoff</small>
<div data-newbie-hidden class="alignitemscenter flex-container flexFlowColumn flexBasis48p flexGrow flexShrink gap0">
<small data-i18n="Epsilon Cutoff">Epsilon Cutoff</small>
<input class="neo-range-slider" type="range" id="epsilon_cutoff_textgenerationwebui" name="volume" min="0" max="9" step="0.01">
<input class="neo-range-input" type="number" min="0" max="9" step="0.01" data-for="epsilon_cutoff_textgenerationwebui" id="epsilon_cutoff_counter_textgenerationwebui">
</div>
<div data-newbie-hidden class="alignitemscenter flex-container flexFlowColumn flexBasis48p flexGrow flexShrink gap0" data-i18n="Eta Cutoff">
<small>Eta Cutoff</small>
<div data-newbie-hidden class="alignitemscenter flex-container flexFlowColumn flexBasis48p flexGrow flexShrink gap0">
<small data-i18n="Eta Cutoff">Eta Cutoff</small>
<input class="neo-range-slider" type="range" id="eta_cutoff_textgenerationwebui" name="volume" min="0" max="20" step="0.01">
<input class="neo-range-input" type="number" min="0" max="20" step="0.01" data-for="eta_cutoff_textgenerationwebui" id="eta_cutoff_counter_textgenerationwebui">
</div>
<div class="alignitemscenter flex-container flexFlowColumn flexBasis48p flexGrow flexShrink gap0" data-i18n="rep.pen">
<small>Repetition Penalty</small>
<div class="alignitemscenter flex-container flexFlowColumn flexBasis48p flexGrow flexShrink gap0">
<small data-i18n="rep.pen">Repetition Penalty</small>
<input class="neo-range-slider" type="range" id="rep_pen_textgenerationwebui" name="volume" min="1" max="1.5" step="0.01">
<input class="neo-range-input" type="number" min="1" max="1.5" step="0.01" data-for="rep_pen_textgenerationwebui" id="rep_pen_counter_textgenerationwebui">
</div>
<div data-forAphro=False class="alignitemscenter flex-container flexFlowColumn flexBasis48p flexGrow flexShrink gap0" data-i18n="rep.pen range">
<small>Repetition Penalty Range</small>
<div data-forAphro=False class="alignitemscenter flex-container flexFlowColumn flexBasis48p flexGrow flexShrink gap0">
<small data-i18n="rep.pen range">Repetition Penalty Range</small>
<input class="neo-range-slider" type="range" id="rep_pen_range_textgenerationwebui" name="volume" min="0" max="4096" step="1">
<input class="neo-range-input" type="number" min="0" max="4096" step="1" data-for="rep_pen_range_textgenerationwebui" id="rep_pen_range_counter_textgenerationwebui">
</div>
<div data-forAphro=False data-newbie-hidden class="alignitemscenter flex-container flexFlowColumn flexBasis48p flexGrow flexShrink gap0" data-i18n="Encoder Rep. Pen.">
<small>Encoder Penalty</small>
<div data-forAphro=False data-newbie-hidden class="alignitemscenter flex-container flexFlowColumn flexBasis48p flexGrow flexShrink gap0">
<small data-i18n="Encoder Rep. Pen.">Encoder Penalty</small>
<input class="neo-range-slider" type="range" id="encoder_rep_pen_textgenerationwebui" name="volume" min="0.8" max="1.5" step="0.01" />
<input class="neo-range-input" type="number" min="0.8" max="1.5" step="0.01" data-for="encoder_rep_pen_textgenerationwebui" id="encoder_rep_pen_counter_textgenerationwebui">
</div>
<div data-newbie-hidden class="alignitemscenter flex-container flexFlowColumn flexBasis48p flexGrow flexShrink gap0" data-i18n="Frequency Penalty">
<small>Frequency Penalty</small>
<div data-newbie-hidden class="alignitemscenter flex-container flexFlowColumn flexBasis48p flexGrow flexShrink gap0">
<small data-i18n="Frequency Penalty">Frequency Penalty</small>
<input class="neo-range-slider" type="range" id="freq_pen_textgenerationwebui" name="volume" min="-2" max="2" step="0.01" />
<input class="neo-range-input" type="number" data-for="freq_pen_textgenerationwebui" min="-2" max="2" step="0.01" id="freq_pen_counter_textgenerationwebui">
</div>
<div data-newbie-hidden class="alignitemscenter flex-container flexFlowColumn flexBasis48p flexGrow flexShrink gap0" data-i18n="Presence Penalty">
<small>Presence Penalty</small>
<div data-newbie-hidden class="alignitemscenter flex-container flexFlowColumn flexBasis48p flexGrow flexShrink gap0">
<small data-i18n="Presence Penalty">Presence Penalty</small>
<input class="neo-range-slider" type="range" id="presence_pen_textgenerationwebui" name="volume" min="-2" max="2" step="0.01" />
<input class="neo-range-input" type="number" min="-2" max="2" step="0.01" data-for="presence_pen_textgenerationwebui" id="presence_pen_counter_textgenerationwebui">
</div>
<div data-forAphro=False data-newbie-hidden class="alignitemscenter flex-container flexFlowColumn flexBasis48p flexGrow flexShrink gap0" data-i18n="No Repeat Ngram Size">
<small>No Repeat Ngram Size</small>
<div data-forAphro=False data-newbie-hidden class="alignitemscenter flex-container flexFlowColumn flexBasis48p flexGrow flexShrink gap0">
<small data-i18n="No Repeat Ngram Size">No Repeat Ngram Size</small>
<input class="neo-range-slider" type="range" id="no_repeat_ngram_size_textgenerationwebui" name="volume" min="0" max="20" step="1">
<input class="neo-range-input" type="number" min="0" max="20" step="1" data-for="no_repeat_ngram_size_textgenerationwebui" id="no_repeat_ngram_size_counter_textgenerationwebui">
</div>
<div data-newbie-hidden class="alignitemscenter flex-container flexFlowColumn flexBasis48p flexGrow flexShrink gap0" data-i18n="Min Length">
<small>Min Length</small>
<div data-newbie-hidden class="alignitemscenter flex-container flexFlowColumn flexBasis48p flexGrow flexShrink gap0">
<small data-i18n="Min Length">Min Length</small>
<input class="neo-range-slider" type="range" id="min_length_textgenerationwebui" name="volume" min="0" max="2000" step="1" />
<input class="neo-range-input" type="number" min="0" max="2000" step="1" data-for="min_length_textgenerationwebui" id="min_length_counter_textgenerationwebui">
</div>
<!--
<div data-forAphro=True class="alignitemscenter flex-container flexFlowColumn flexBasis48p flexGrow flexShrink gap0" data-i18n="Responses">
<div data-tg-type="aphrodite" class="alignitemscenter flex-container flexFlowColumn flexBasis48p flexGrow flexShrink gap0" data-i18n="Responses">
<small>Responses</small>
<input class="neo-range-slider" type="range" id="n_aphrodite_textgenerationwebui" name="volume" min="1" max="5" step="1">
<input class="neo-range-input" type="number" min="1" max="5" step="1" data-for="n_aphrodite" id="n_aphrodite_counter_textgenerationwebui">
</div>
<div data-forAphro=True class="alignitemscenter flex-container flexFlowColumn flexBasis48p flexGrow flexShrink gap0 displayNone" data-i18n="Keep # Best Responses">
<div data-tg-type="aphrodite" class="alignitemscenter flex-container flexFlowColumn flexBasis48p flexGrow flexShrink gap0 displayNone" data-i18n="Keep # Best Responses">
<small>Keep # Best Responses</small>
<input class="neo-range-slider" type="range" id="best_of_aphrodite_textgenerationwebui" name="volume" min="1" max="5" step="1">
<input class="neo-range-input" type="number" min="1" max="5" step="1" data-for="best_of_aphrodite" id="best_of_aphrodite_counter_textgenerationwebui">
</div>
<div data-forAphro=True class="alignitemscenter flex-container flexFlowColumn flexBasis48p flexGrow flexShrink gap0 displayNone" data-i18n="Logit Probabilities">
<div data-tg-type="aphrodite" class="alignitemscenter flex-container flexFlowColumn flexBasis48p flexGrow flexShrink gap0 displayNone" data-i18n="Logit Probabilities">
<small>Logit Probabilities</small>
<input class="neo-range-slider" type="range" id="log_probs_aphrodite_textgenerationwebui" name="volume" min="0" max="5" step="1">
<input class="neo-range-input" type="number" min="0" max="5" step="1" data-for="log_probs_aphrodite" id="log_probs_aphrodite_counter_textgenerationwebui">
</div>
<div data-forAphro=True class="alignitemscenter flex-container flexFlowColumn flexBasis48p flexGrow flexShrink gap0 displayNone" data-i18n="Prompt Logit Probabilities">
<div data-tg-type="aphrodite" class="alignitemscenter flex-container flexFlowColumn flexBasis48p flexGrow flexShrink gap0 displayNone" data-i18n="Prompt Logit Probabilities">
<small>Prompt Logit Probabilities</small>
<input class="neo-range-slider" type="range" id="prompt_log_probs_aphrodite_textgenerationwebui" name="volume" min="0" max="5" step="1">
<input class="neo-range-input" type="number" min="0" max="5" step="1" data-for="prompt_log_probs_aphrodite" id="prompt_log_probs_aphrodite_counter_textgenerationwebui">
@@ -1204,18 +1205,18 @@
<div class=" fa-solid fa-circle-info opacity50p " title="Mode=1 is only for llama.cpp&#13;More helpful tips coming soon."></div>
</h4>
<div class="flex-container flexFlowRow gap10px flexShrink">
<div class="alignitemscenter flex-container marginBot5 flexFlowColumn flexGrow flexShrink gap0" data-i18n="Mirostat Mode">
<small>Mode</small>
<div class="alignitemscenter flex-container marginBot5 flexFlowColumn flexGrow flexShrink gap0">
<small data-i18n="Mirostat Mode">Mode</small>
<input class="neo-range-slider" type="range" id="mirostat_mode_textgenerationwebui" name="volume" min="0" max="2" step="1" />
<input class="neo-range-input" type="number" min="0" max="2" step="1" data-for="mirostat_mode_textgenerationwebui" id="mirostat_mode_counter_textgenerationwebui">
</div>
<div class="alignitemscenter flex-container marginBot5 flexFlowColumn flexGrow flexShrink gap0" data-i18n="Mirostat Tau">
<small>Tau</small>
<div class="alignitemscenter flex-container marginBot5 flexFlowColumn flexGrow flexShrink gap0">
<small data-i18n="Mirostat Tau">Tau</small>
<input class="neo-range-slider" type="range" id="mirostat_tau_textgenerationwebui" name="volume" min="0" max="20" step="0.01" />
<input class="neo-range-input" type="number" min="0" max="20" step="0.01" data-for="mirostat_tau_textgenerationwebui" id="mirostat_tau_counter_textgenerationwebui">
</div>
<div class="alignitemscenter flex-container marginBot5 flexFlowColumn flexGrow flexShrink gap0" data-i18n="Mirostat Eta">
<small>Eta</small>
<div class="alignitemscenter flex-container marginBot5 flexFlowColumn flexGrow flexShrink gap0">
<small data-i18n="Mirostat Eta">Eta</small>
<input class="neo-range-slider" type="range" id="mirostat_eta_textgenerationwebui" name="volume" min="0" max="1" step="0.01" />
<input class="neo-range-input" type="number" min="0" max="1" step="0.01" data-for="mirostat_eta_textgenerationwebui" id="mirostat_eta_counter_textgenerationwebui">
</div>
@@ -1226,10 +1227,10 @@
<div class=" fa-solid fa-circle-info opacity50p " title="Helpful tip coming soon."></div>
</h4>
<div class="flex-container flexFlowRow alignitemscenter gap10px flexShrink">
<div class="alignitemscenter flex-container marginBot5 flexFlowColumn flexGrow flexShrink gap0" data-i18n="Number of Beams">
<small># of Beams</small>
<input class="neo-range-slider" type="range" id="num_beams_textgenerationwebui" name="volume" min="1" max="20" step="1" />
<input class="neo-range-input" type="number" min="1" max="20" step="1" data-for="num_beams_textgenerationwebui" id="num_beams_counter_textgenerationwebui">
<div class="alignitemscenter flex-container marginBot5 flexFlowColumn flexGrow flexShrink gap0">
<small data-i18n="Number of Beams"># of Beams</small>
<input class="neo-range-slider" type="range" id="num_beams_textgenerationwebui" name="volume" min="0" max="20" step="1" />
<input class="neo-range-input" type="number" min="0" max="20" step="1" data-for="num_beams_textgenerationwebui" id="num_beams_counter_textgenerationwebui">
</div>
<div class="alignitemscenter flex-container marginBot5 flexFlowColumn flexGrow flexShrink gap0">
<small data-i18n="Length Penalty">Length Penalty</small>
@@ -1244,7 +1245,7 @@
</div>
</div>
</div>
<div data-newbie-hidden name="contrastiveSearchBlock" class="alignitemscenter flex-container flexFlowColumn flexBasis48p flexGrow flexShrink gap0">
<div data-forAphro=False data-newbie-hidden name="contrastiveSearchBlock" class="alignitemscenter flex-container flexFlowColumn flexBasis48p flexGrow flexShrink gap0">
<h4 class="textAlignCenter" data-i18n="Contrastive search">Contrast Search
<div class=" fa-solid fa-circle-info opacity50p " title="Helpful tip coming soon."></div>
</h4>
@@ -1272,14 +1273,14 @@
<div class="fa-solid fa-circle-info opacity50p " data-i18n="Ban the eos_token. This forces the model to never end the generation prematurely" title="Ban the eos_token. This forces the model to never end the generation prematurely."></div>
</small>
</label>
<!--
<label data-forAphro=True class="checkbox_label" for="ignore_eos_token_aphrodite_textgenerationwebui">
<input type="checkbox" id="ignore_eos_token_aphrodite_textgenerationwebui" />
<small data-i18n="Ignore EOS Token">Ignore EOS Token
<div class="fa-solid fa-circle-info opacity50p " data-i18n="Ignore the EOS Token even if it generates." title="Ignore the EOS Token even if it generates."></div>
</small>
</label>
-->
<label data-tg-type="aphrodite" class="checkbox_label" for="ignore_eos_token_aphrodite_textgenerationwebui">
<input type="checkbox" id="ignore_eos_token_aphrodite_textgenerationwebui" />
<small data-i18n="Ignore EOS Token">Ignore EOS Token
<div class="fa-solid fa-circle-info opacity50p " data-i18n="Ignore the EOS Token even if it generates." title="Ignore the EOS Token even if it generates."></div>
</small>
</label>
<label class="checkbox_label flexGrow flexShrink" for="skip_special_tokens_textgenerationwebui">
<input type="checkbox" id="skip_special_tokens_textgenerationwebui" />
<small data-i18n="Skip Special Tokens">Skip Special Tokens</small>
@@ -1290,15 +1291,15 @@
<div class="fa-solid fa-circle-info opacity50p " data-i18n="Use the temperature sampler last." title="Use the temperature sampler last."></div>
</small>
</label>
<!--
<label data-forAphro=True class="checkbox_label" for="spaces_between_special_tokens_aphrodite_textgenerationwebui">
<input type="checkbox" id="spaces_between_special_tokens_aphrodite_textgenerationwebui" />
<small data-i18n="Spaces Between Special Tokens">Spaces Between Special Tokens</small>
</label>
-->
<label data-tg-type="aphrodite" class="checkbox_label" for="spaces_between_special_tokens_aphrodite_textgenerationwebui">
<input type="checkbox" id="spaces_between_special_tokens_aphrodite_textgenerationwebui" />
<small data-i18n="Spaces Between Special Tokens">Spaces Between Special Tokens</small>
</label>
</div>
</div>
<div data-newbie-hidden class="flex-container flexFlowColumn alignitemscenter flexBasis48p flexGrow flexShrink gap0">
<div data-forAphro=False data-newbie-hidden class="flex-container flexFlowColumn alignitemscenter flexBasis48p flexGrow flexShrink gap0">
<small data-i18n="Seed" class="textAlignCenter">Seed</small>
<input type="number" id="seed_textgenerationwebui" class="text_pole textAlignCenter" min="-1" value="-1" maxlength="100" />
</div>
@@ -1306,23 +1307,23 @@
<hr data-newbie-hidden class="width100p">
<h4 class="range-block-title justifyCenter">
<span data-i18n="Banned Tokens">Banned Tokens</span>
<div class="margin5 fa-solid fa-circle-info opacity50p " title="LLaMA models only.&#13;Sequences you don't want to appear in the output.&#13;One per line. Text or [token ids].&#13;Most tokens have a leading space."></div>
<div class="margin5 fa-solid fa-circle-info opacity50p " title="LLaMA / Mistral / Yi models only. Make sure to select an appropriate tokenizer first.&#13;Sequences you don't want to appear in the output.&#13;One per line. Text or [token ids].&#13;Most tokens have a leading space. Use token counter if unsure."></div>
</h4>
<div class="wide100p">
<textarea id="banned_tokens_textgenerationwebui" class="text_pole textarea_compact" name="banned_tokens_textgenerationwebui" rows="3" placeholder="Example:&#10;some text&#10;[42, 69, 1337]"></textarea>
</div>
</div>
<div class="wide100p">
<div data-forAphro=False class="wide100p">
<hr data-newbie-hidden class="width100p">
<h4 data-i18n="CFG" class="textAlignCenter">CFG
<div class="margin5 fa-solid fa-circle-info opacity50p " title="Helpful tip coming soon."></div>
</h4>
<div data-newbie-hidden data-forAphro=False class="alignitemscenter flex-container flexFlowColumn flexShrink gap0">
<div data-newbie-hidden class="alignitemscenter flex-container flexFlowColumn flexShrink gap0">
<small>Scale</small>
<input class="neo-range-slider" type="range" id="guidance_scale_textgenerationwebui" name="volume" min="0.1" max="4" step="0.05">
<input class="neo-range-input" type="number" min="0.1" max="4" step="0.05" data-for="guidance_scale_textgenerationwebui" id="guidance_scale_counter_textgenerationwebui">
</div>
<div data-newbie-hidden data-forAphro=False class="range-block">
<div data-newbie-hidden class="range-block">
<div class="range-block-title justifyLeft">
<span data-i18n="Negative Prompt">Negative Prompt</span>
<small>
@@ -1345,6 +1346,49 @@
</h4>
<textarea id="grammar_string_textgenerationwebui" rows="4" class="text_pole textarea_compact monospace" placeholder="Type in the desired custom grammar"></textarea>
</div>
<div data-newbie-hidden data-tg-type="koboldcpp" class="range-block flexFlowColumn">
<hr class="wide100p">
<div class="range-block-title">
<span data-i18n="Samplers Order">Samplers Order</span>
</div>
<div class="toggle-description widthUnset" data-i18n="Samplers will be applied in a top-down order. Use with caution.">
Samplers will be applied in a top-down order.
Use with caution.
</div>
<div id="koboldcpp_order" class="prompt_order">
<div data-id="0">
<span data-i18n="Top K">Top K</span>
<small>0</small>
</div>
<div data-id="1">
<span data-i18n="Top A">Top A</span>
<small>1</small>
</div>
<div data-id="2">
<span data-i18n="Top P">Top P & Min P</span>
<small>2</small>
</div>
<div data-id="3">
<span data-i18n="Tail Free Sampling">Tail Free Sampling</span>
<small>3</small>
</div>
<div data-id="4">
<span data-i18n="Typical P">Typical P</span>
<small>4</small>
</div>
<div data-id="5">
<span data-i18n="Temperature">Temperature</span>
<small>5</small>
</div>
<div data-id="6">
<span data-i18n="Repetition Penalty">Repetition Penalty</span>
<small>6</small>
</div>
</div>
<div id="koboldcpp_default_order" class="menu_button menu_button_icon">
<span data-i18n="Load default order">Load default order</span>
</div>
</div>
</div>
</div><!-- end of textgen settings-->
<div id="openai_settings">
@@ -1383,6 +1427,17 @@
</span>
</div>
</div>
<div class="range-block" data-source="openai,openrouter">
<label for="openai_image_inlining" class="checkbox_label flexWrap widthFreeExpand">
<input id="openai_image_inlining" type="checkbox" />
<span data-i18n="Send inline images">Send inline images</span>
<div id="image_inlining_hint" class="flexBasis100p toggle-description justifyLeft">
Sends images in prompts if the model supports it (e.g. GPT-4V or Llava 13B).
Use the <code><i class="fa-solid fa-paperclip"></i></code> action on any message or the
<code><i class="fa-solid fa-wand-magic-sparkles"></i></code> menu to attach an image file to the chat.
</div>
</label>
</div>
<div class="range-block" data-source="ai21">
<label for="use_ai21_tokenizer" title="Use AI21 Tokenizer" class="checkbox_label widthFreeExpand">
<input id="use_ai21_tokenizer" type="checkbox" /><span data-i18n="Use AI21 Tokenizer">Use AI21 Tokenizer</span>
@@ -1456,10 +1511,10 @@
<div class="flex-container flexFlowColumn">
<div id="main-API-selector-block">
<select id="main_api">
<option value="kobold"><span data-i18n="KoboldAI">KoboldAI</span></option>
<option value="kobold"><span data-i18n="KoboldAI">KoboldAI Classic</span></option>
<option value="koboldhorde"><span data-i18n="KoboldAI Horde">KoboldAI Horde</span></option>
<option value="textgenerationwebui"><span data-i18n="Text Gen WebUI">Text Gen WebUI (ooba, Mancer, Aphrodite)</span></option>
<option value="novel"><span data-i18n="NovelAI">NovelAI</span></option>
<option value="textgenerationwebui"><span data-i18n="Text Completion">Text Completion (ooba, Mancer, Aphrodite, TabbyAPI, KoboldCpp)</span></option>
<option value="openai"><span data-i18n="Chat Completion (OpenAI, Claude, Window/OpenRouter, Scale, AI21)">Chat Completion (OpenAI, Claude, Window, OpenRouter, Scale, AI21, PaLM)</span></option>
</select>
</div>
@@ -1588,6 +1643,8 @@
<option value="ooba">Default (oobabooga)</option>
<option value="mancer">Mancer</option>
<option value="aphrodite">Aphrodite</option>
<option value="tabby">TabbyAPI</option>
<option value="koboldcpp">KoboldCpp</option>
</select>
</div>
<div data-tg-type="mancer" class="flex-container flexFlowColumn">
@@ -1618,7 +1675,7 @@
oobabooga/text-generation-webui
</a>
<span data-i18n="Make sure you run it with">
Make sure you run it with <tt>--extensions openai</tt> flag
Make sure you run it with <tt>--api</tt> flag
</span>
</div>
<div class="flex1">
@@ -1648,8 +1705,41 @@
<input id="aphrodite_api_url_text" class="text_pole wide100p" maxlength="500" value="" autocomplete="off" data-server-history="aphrodite">
</div>
</div>
<div data-tg-type="tabby">
<div class="flex-container flexFlowColumn">
<a href="https://github.com/theroyallab/tabbyAPI" target="_blank">
theroyallab/tabbyAPI
</a>
</div>
<h4 data-i18n="Tabby API key">Tabby API key</h4>
<div class="flex-container">
<input id="api_key_tabby" name="api_key_tabby" class="text_pole flex1 wide100p" maxlength="500" size="35" type="text" autocomplete="off">
<div title="Clear your API key" data-i18n="[title]Clear your API key" class="menu_button fa-solid fa-circle-xmark clear-api-key" data-key="api_key_tabby">
</div>
</div>
<div data-for="api_key_tabby" class="neutral_warning" data-i18n="For privacy reasons, your API key will be hidden after you reload the page.">
For privacy reasons, your API key will be hidden after you reload the page.
</div>
<div class="flex1">
<h4 data-i18n="API url">API URL</h4>
<small data-i18n="Example: http://127.0.0.1:5000">Example: http://127.0.0.1:5000</small>
<input id="tabby_api_url_text" class="text_pole wide100p" maxlength="500" value="" autocomplete="off" data-server-history="tabby">
</div>
</div>
<div data-tg-type="koboldcpp">
<div class="flex-container flexFlowColumn">
<a href="https://github.com/LostRuins/koboldcpp" target="_blank">
LostRuins/koboldcpp
</a>
</div>
<div class="flex1">
<h4 data-i18n="API url">API URL</h4>
<small data-i18n="Example: http://127.0.0.1:5001">Example: http://127.0.0.1:5001</small>
<input id="koboldcpp_api_url_text" class="text_pole wide100p" maxlength="500" value="" autocomplete="off" data-server-history="koboldcpp">
</div>
</div>
<div class="flex-container">
<div id="api_button_textgenerationwebui" class="api_button menu_button" type="submit" data-i18n="Connect" data-server-connect="ooba_blocking,aphrodite">Connect</div>
<div id="api_button_textgenerationwebui" class="api_button menu_button" type="submit" data-i18n="Connect" data-server-connect="ooba_blocking,aphrodite,tabby,koboldcpp">Connect</div>
<div class="api_loading menu_button" data-i18n="Cancel">Cancel</div>
</div>
<label class="checkbox_label margin-bot-10px" for="legacy_api_textgenerationwebui">
@@ -1723,6 +1813,7 @@
</optgroup>
<optgroup label="GPT-4">
<option value="gpt-4">gpt-4</option>
<option value="gpt-4-vision-preview">gpt-4-vision-preview</option>
<option value="gpt-4-1106-preview">gpt-4-1106-preview</option>
<option value="gpt-4-0613">gpt-4-0613</option>
<option value="gpt-4-0314">gpt-4-0314</option>
@@ -1741,6 +1832,10 @@
<optgroup id="openai_external_category" label="External">
</optgroup>
</select>
<label for="openai_bypass_status_check" class="checkbox_label">
<input id="openai_bypass_status_check" type="checkbox" />
<span data-i18n="Bypass API status check">Bypass API status check</span>
</label>
<label for="openai_show_external_models" class="checkbox_label">
<input id="openai_show_external_models" type="checkbox" />
<span data-i18n="Show External models (provided by API)">Show "External" models (provided by API)</span>
@@ -1775,6 +1870,7 @@
<option value="claude-instant-v1-100k">claude-instant-v1-100k</option>
</optgroup>
<optgroup label="Sub-versions">
<option value="claude-2.1">claude-2.1</option>
<option value="claude-2.0">claude-2.0</option>
<option value="claude-v1.3">claude-v1.3</option>
<option value="claude-v1.3-100k">claude-v1.3-100k</option>
@@ -1815,6 +1911,36 @@
<option data-i18n="Connect to the API">-- Connect to the API --</option>
</select>
</div>
<div class="marginTopBot5">
<div class="inline-drawer wide100p">
<div class="inline-drawer-toggle inline-drawer-header">
<b data-i18n="Model Order">OpenRouter Model Sorting</b>
<div class="fa-solid fa-circle-chevron-down inline-drawer-icon down"></div>
</div>
<div class="inline-drawer-content m-b-1">
<div class="marginTopBot5">
<label for="openrouter_sort_models" class="checkbox_label">
<select id="openrouter_sort_models">
<option data-i18n="Alphabetically" value="alphabetically">Alphabetically</option>
<option data-i18n="Price" value="pricing.prompt">Price (cheapest)</option>
<option data-i18n="Context Size" value="context_length">Context Size</option>
</select>
</label>
</div>
<div class="marginTopBot5">
<label for="openrouter_group_models" class="checkbox_label">
<input id="openrouter_group_models" type="checkbox" />
<span data-i18n="Group by vendors">Group by vendors</span>
</label>
<div class="toggle-description justifyLeft wide100p">
<span data-i18n="Group by vendors Description">
Put OpenAI models in one group, Anthropic models in other group, etc. Can be combined with sorting.
</span>
</div>
</div>
</div>
</div>
</div>
<div class="marginTopBot5">
<label for="openrouter_use_fallback" class="checkbox_label">
<input id="openrouter_use_fallback" type="checkbox" />
@@ -1916,7 +2042,7 @@
<select id="model_palm_select" class="displayNone"></select>
</form>
<div class="flex-container flex">
<div id="api_button_openai" class="menu_button menu_button_icon" type="submit" data-i18n="Connect">Connect</div>
<div id="api_button_openai" class="api_button menu_button menu_button_icon" type="submit" data-i18n="Connect">Connect</div>
<div class="api_loading menu_button" data-i18n="Cancel">Cancel</div>
<div data-source="openrouter" id="openrouter_authorize" class="menu_button menu_button_icon" title="Get your OpenRouter API token using OAuth flow. You will be redirected to openrouter.ai" data-i18n="[title]Get your OpenRouter API token using OAuth flow. You will be redirected to openrouter.ai">Authorize</div>
<div id="test_api_button" class="menu_button menu_button_icon" title="Verifies your API connection by sending a short test message. Be aware that you'll be credited for it!" data-i18n="[title]Verifies your API connection by sending a short test message. Be aware that you'll be credited for it!">Test Message</div>
@@ -2079,9 +2205,6 @@
<label>
<small data-i18n="System Prompt">System Prompt</small>
</label>
<div class="prompt_overridden">
Overridden by the Character Definitions.
</div>
<textarea id="instruct_system_prompt" class="text_pole textarea_compact autoSetHeight" rows="1"></textarea>
</div>
<div data-newbie-hidden class="inline-drawer wide100p flexFlowColumn">
@@ -2228,6 +2351,7 @@
<option value="4">NerdStash (NovelAI Clio)</option>
<option value="5">NerdStash v2 (NovelAI Kayra)</option>
<option value="7">Mistral</option>
<option value="8">Yi</option>
<option value="6">API (WebUI / koboldcpp)</option>
</select>
</div>
@@ -2502,6 +2626,7 @@
<input type="search" class="text_pole textarea_compact" data-i18n="[placeholder]Search..." id="world_info_search" placeholder="Search...">
<select id="world_info_sort_order" class="margin0">
<option data-rule="priority" value="0">Priority</option>
<option data-rule="custom" value="13">Custom</option>
<option data-order="asc" data-field="comment" value="1">Title A-Z</option>
<option data-order="desc" data-field="comment" value="2">Title Z-A</option>
<option data-order="asc" data-field="content" data-rule="length" value="3">Tokens ↗</option>
@@ -2574,6 +2699,7 @@
<span data-i18n="Avatar Style">Avatars:</span>
<select id="avatar_style" class="widthNatural flex1 margin0">
<option value="0" data-i18n="Circle">Circle</option>
<option value="2" data-i18n="Square">Square</option>
<option value="1" data-i18n="Rectangle">Rectangle</option>
</select>
</div>
@@ -2677,10 +2803,23 @@
</div>
</div>
</div>
<div id="chat-truncation-block" class="range-block">
<div class="range-block-title" data-i18n="Chat Truncation">
Chat Truncation <small>(0 = unlimited)</small>
</div>
<div class="range-block-range-and-counter">
<div class="range-block-range">
<input type="range" id="chat_truncation" name="chat_truncation" min="0" max="1000" step="25">
</div>
<div class="range-block-counter">
<input type="number" min="0" max="1000" step="1" data-for="chat_truncation" id="chat_truncation_counter">
</div>
</div>
</div>
</div>
</div>
</div>
<div name="UserSettingsSecondColumn" id="UI-Customization" class="flex-container flexFlowColumn wide100p">
<div name="UserSettingsSecondColumn" id="UI-Customization" class="flex-container flexFlowColumn wide100p flexNoGap">
<div name="themeToggles">
<h4 data-i18n="Theme Toggles">Theme Toggles</h4>
<label data-newbie-hidden for="fast_ui_mode" class="checkbox_label" title="removes blur from window backgrounds" data-i18n="[title]removes blur from window backgrounds">
@@ -2737,6 +2876,13 @@
</label>
</div>
<h4><span data-i18n="Miscellaneous">Miscellaneous</span></h4>
<div title="If set in the advanced character definitions, this field will be displayed in the characters list.">
<label for="aux_field" data-i18n="Aux List Field">Aux List Field</label>
<select id="aux_field">
<option data-i18n="Character Version" value="character_version">Character Version</option>
<option data-i18n="Created by" value="creator">Created by</option>
</select>
</div>
<div>
<label for="play_message_sound" class="checkbox_label">
<input id="play_message_sound" type="checkbox" />
@@ -3042,8 +3188,33 @@
</div>
<div class="drawer-content closedDrawer">
<div class="flex-container wide100p alignitemscenter spaceBetween">
<h3 class="margin0"><span data-i18n="Persona Management">Persona Management</span></h3>
<a href="https://docs.sillytavern.app/usage/core-concepts/personas/" target="_blank" data-i18n="How do I use this?">How do I use this?</a>
<div class="flex-container alignItemsBaseline wide100p">
<div class="flex1 flex-container alignItemsBaseline">
<h3 class="margin0" data-i18n="Persona Management">Persona Management</h3>
<a href="https://docs.sillytavern.app/usage/core-concepts/personas/" target="_blank" data-i18n="How do I use this?">
<span class="fa-solid fa-circle-question note-link-span"></span>
</a>
</div>
<div class="flex-container">
<div class="menu_button menu_button_icon user_stats_button" title="Click for stats!">
<i class="fa-solid fa-ranking-star"></i>
<span data-i18n="Usage Stats">Usage Stats</span>
</div>
<div id="personas_backup" class="menu_button menu_button_icon" title="Backup your personas to a file">
<i class="fa-solid fa-file-export"></i>
<span data-i18n="Backup">Backup</span>
</div>
<div id="personas_restore" class="menu_button menu_button_icon" title="Restore your personas from a file">
<i class="fa-solid fa-file-import"></i>
<span data-i18n="Restore">Restore</span>
</div>
<div id="create_dummy_persona" class="menu_button menu_button_icon" title="Create a dummy persona" data-i18n="[title]Create a dummy persona">
<i class="fa-solid fa-person-circle-question fa-fw"></i>
<span data-i18n="Create">Create</span>
</div>
<input id="personas_restore_input" type="file" accept=".json" hidden>
</div>
</div>
<div id="persona-management-block" class="flex-container wide100p">
<div class="flex1">
<h4 data-i18n="Name">Name</h4>
@@ -3083,13 +3254,6 @@
<div class="flex1">
<h4 class="title_restorable">
<span data-i18n="Your Persona">Your Persona</span>
<div class="menu_button menu_button_icon user_stats_button" title="Click for stats!">
<i class="fa-solid fa-ranking-star"></i><span data-i18n="Usage Stats">Usage Stats</span>
</div>
<div id="create_dummy_persona" class="menu_button menu_button_icon" title="Create a dummy persona" data-i18n="[title]Create a dummy persona">
<i class="fa-solid fa-person-circle-question fa-fw"></i>
<span data-i18n="Create">Create</span>
</div>
</h4>
<div id="user_avatar_block">
<div class="avatar_upload">+</div>
@@ -3348,6 +3512,10 @@
<input id="rm_group_automode" type="checkbox" />
<span data-i18n="Auto Mode">Auto Mode</span>
</label>
<label id="rm_group_hidemutedsprites_label" class="checkbox_label whitespacenowrap">
<input id="rm_group_hidemutedsprites" type="checkbox" />
<span data-i18n="Hide Muted Member Sprites">Hide Muted Member Sprites</span>
</label>
</div>
</div>
</div>
@@ -3623,6 +3791,7 @@
<span id="ChatHistoryCharName"></span><span data-i18n="Chat History">Chat History</span>
<a href="https://docs.sillytavern.app/usage/core-concepts/chatfilemanagement/#chat-import" class="notes-link" target="_blank"><span class="fa-solid fa-circle-question note-link-span"></span></a>
</div>
<div class="fa-solid fa-plus menu_button" title="New Chat" id="newChatFromManageScreenButton"></div>
<input type="text" id="select_chat_search" class="menu_button" placeholder="Search..." autocomplete="off">
<div id="select_chat_cross" class="opacity50p hoverglow fa-solid fa-circle-xmark fontsize120p" alt="Close Past Chat Popup"></div>
</div>
@@ -3752,7 +3921,7 @@
<form class="world_entry_form wi-card-entry">
<div class="inline-drawer wide100p">
<div class="inline-drawer-toggle inline-drawer-header gap5px padding0">
<!-- <span class="drag-handle">&#9776;</span> -->
<span class="drag-handle">&#9776;</span>
<div class="gap5px world_entry_thin_controls wide100p alignitemscenter">
<div class="fa-fw fa-solid fa-circle-chevron-down inline-drawer-icon down"></div>
<span class="flex-container alignitemscenter wide100p">
@@ -3854,7 +4023,7 @@
</span>
</small>
</label>
<textarea class="text_pole" name="content" rows="4" data-i18n="[placeholder]What this keyword should mean to the AI, sent verbatim" placeholder="What this keyword should mean to the AI, sent verbatim"></textarea>
<textarea class="text_pole" name="content" rows="8" data-i18n="[placeholder]What this keyword should mean to the AI, sent verbatim" placeholder="What this keyword should mean to the AI, sent verbatim"></textarea>
</div>
</div>
<div class="world_entry_thin_controls commentContainer">
@@ -4049,6 +4218,7 @@
<div title="Prompt" class="mes_prompt fa-solid fa-square-poll-horizontal " data-i18n="[title]Prompt"></div>
<div title="Exclude message from prompts" class="mes_hide fa-solid fa-eye" data-i18n="[title]Exclude message from prompts"></div>
<div title="Include message in prompts" class="mes_unhide fa-solid fa-eye-slash" data-i18n="[title]Include message in prompts"></div>
<div title="Embed file or image" class="mes_embed fa-solid fa-paperclip" data-i18n="[title]Embed file or image"></div>
<div title="Create bookmark" class="mes_create_bookmark fa-regular fa-solid fa-book-bookmark" data-i18n="[title]Create Bookmark"></div>
<div title="Create branch" class="mes_create_branch fa-regular fa-code-branch" data-i18n="[title]Create Branch"></div>
<div title="Copy" class="mes_copy fa-solid fa-copy " data-i18n="[title]Copy"></div>
@@ -4228,6 +4398,15 @@
<div id="typing_indicator_template" class="template_element">
<div class="typing_indicator"><span class="typing_indicator_name">CHAR</span> is typing</div>
</div>
<div id="message_file_template" class="template_element">
<div class="mes_file_container">
<div class="fa-lg fa-solid fa-file-alt mes_file_icon"></div>
<div class="mes_file_name"></div>
<div class="mes_file_size"></div>
<div class="right_menu_button mes_file_open fa-solid fa-magnifying-glass" title="View contents" data-i18n="[title]View contents"></div>
<div class="right_menu_button mes_file_delete fa-solid fa-trash-can" title="Remove the file" data-i18n="[title]Remove the file"></div>
</div>
</div>
<div id="movingDivs">
<div id="floatingPrompt" class="drawer-content flexGap5">
<div class="panelControlBar flex-container">
@@ -4515,17 +4694,33 @@
<div id="dialogue_del_mes_ok" class="menu_button">Delete</div>
<div id="dialogue_del_mes_cancel" class="menu_button">Cancel</div>
</div>
<form id="send_form" class="no-connection">
<div id="options_button" class="fa-solid fa-bars"></div>
<textarea id="send_textarea" data-i18n="[placeholder]Not connected to API!" placeholder="Not connected to API!" name="text"></textarea>
<div id="send_but_sheld">
<div id="mes_stop" title="Abort request" class="mes_stop" data-i18n="[title]Abort request">
<i class="fa-solid fa-circle-stop"></i>
<div id="send_form" class="no-connection">
<form id="file_form" class="wide100p displayNone">
<div class="file_attached">
<input id="file_form_input" type="file" hidden>
<input id="embed_file_input" type="file" hidden>
<i class="fa-solid fa-file-alt"></i>
<span class="file_name">File Name</span>
<span class="file_size">File Size</span>
<button id="file_form_reset" type="reset" class="menu_button" title="Remove the file" data-i18n="[title]Remove the file">
<i class="fa fa-times"></i>
</button>
</div>
</form>
<div id="nonQRFormItems">
<div id="leftSendForm" class="alignContentCenter">
<div id="options_button" class="fa-solid fa-bars"></div>
</div>
<textarea id="send_textarea" data-i18n="[placeholder]Not connected to API!" placeholder="Not connected to API!" name="text"></textarea>
<div id="rightSendForm" class="alignContentCenter">
<div id="mes_stop" title="Abort request" class="mes_stop" data-i18n="[title]Abort request">
<i class="fa-solid fa-circle-stop"></i>
</div>
<div id="mes_continue" class="fa-fw fa-solid fa-arrow-right displayNone" title="Continue the last message" data-i18n="[title]Continue the last message"></div>
<div id="send_but" class="fa-solid fa-paper-plane displayNone" title="Send a message" data-i18n="[title]Send a message"></div>
</div>
<div id="mes_continue" class="fa-fw fa-solid fa-arrow-right displayNone" title="Continue the last message" data-i18n="[title]Continue the last message"></div>
<div id="send_but" class="fa-solid fa-paper-plane displayNone" title="Send a message" data-i18n="[title]Send a message"></div>
</div>
</form>
</div>
</div>
</div>
<!-- popups live outside sheld to avoid blur conflicts -->

30
public/manifest.json Normal file
View File

@@ -0,0 +1,30 @@
{
"name": "SillyTavern",
"short_name": "SillyTavern",
"start_url": "/",
"display": "standalone",
"theme_color": "#202124",
"background_color": "#202124",
"icons": [
{
"src": "img/apple-icon-57x57.png",
"sizes": "57x57",
"type": "image/png"
},
{
"src": "img/apple-icon-72x72.png",
"sizes": "72x72",
"type": "image/png"
},
{
"src": "img/apple-icon-114x114.png",
"sizes": "114x114",
"type": "image/png"
},
{
"src": "img/apple-icon-144x144.png",
"sizes": "144x144",
"type": "image/png"
}
]
}

File diff suppressed because it is too large Load Diff

View File

@@ -13,6 +13,7 @@ import {
} from "../script.js";
import { favsToHotswap } from "./RossAscends-mods.js";
import { hideLoader, showLoader } from "./loader.js";
import { convertCharacterToPersona } from "./personas.js";
import { createTagInput, getTagKeyForCharacter, tag_map } from "./tags.js";
@@ -614,9 +615,12 @@ class BulkEditOverlay {
const deleteChats = document.getElementById('del_char_checkbox').checked ?? false;
showLoader();
toastr.info("We're deleting your characters, please wait...", 'Working on it');
Promise.all(this.selectedCharacters.map(async characterId => CharacterContextMenu.delete(characterId, deleteChats)))
.then(() => getCharacters())
.then(() => this.browseState())
.finally(() => hideLoader());
}
);
}

View File

@@ -179,6 +179,13 @@ class PromptCollection {
}
function PromptManagerModule() {
this.systemPrompts = [
'main',
'nsfw',
'jailbreak',
'enhanceDefinitions',
];
this.configuration = {
version: 1,
prefix: '',
@@ -398,6 +405,10 @@ PromptManagerModule.prototype.init = function (moduleConfiguration, serviceSetti
document.getElementById(this.configuration.prefix + 'prompt_manager_popup_entry_form_injection_position').value = prompt.injection_position ?? 0;
document.getElementById(this.configuration.prefix + 'prompt_manager_popup_entry_form_injection_depth').value = prompt.injection_depth ?? DEFAULT_DEPTH;
document.getElementById(this.configuration.prefix + 'prompt_manager_depth_block').style.visibility = prompt.injection_position === INJECTION_POSITION.ABSOLUTE ? 'visible' : 'hidden';
if (!this.systemPrompts.includes(promptId)) {
document.getElementById(this.configuration.prefix + 'prompt_manager_popup_entry_form_injection_position').removeAttribute('disabled');
}
}
// Append prompt to selected character
@@ -721,6 +732,12 @@ PromptManagerModule.prototype.getTokenHandler = function () {
return this.tokenHandler;
}
PromptManagerModule.prototype.isPromptDisabledForActiveCharacter = function (identifier) {
const promptOrderEntry = this.getPromptOrderEntry(this.activeCharacter, identifier);
if (promptOrderEntry) return !promptOrderEntry.enabled;
return false;
}
/**
* Add a prompt to the current character's prompt list.
* @param {object} prompt - The prompt to be added.
@@ -859,7 +876,8 @@ PromptManagerModule.prototype.isPromptEditAllowed = function (prompt) {
* @returns {boolean} True if the prompt can be deleted, false otherwise.
*/
PromptManagerModule.prototype.isPromptToggleAllowed = function (prompt) {
return prompt.marker ? false : !this.configuration.toggleDisabled.includes(prompt.identifier);
const forceTogglePrompts = ['charDescription', 'charPersonality', 'scenario', 'personaDescription', 'worldInfoBefore', 'worldInfoAfter'];
return prompt.marker && !forceTogglePrompts.includes(prompt.identifier) ? false : !this.configuration.toggleDisabled.includes(prompt.identifier);
}
/**
@@ -1114,6 +1132,11 @@ PromptManagerModule.prototype.loadPromptIntoEditForm = function (prompt) {
injectionPositionField.value = prompt.injection_position ?? INJECTION_POSITION.RELATIVE;
injectionDepthField.value = prompt.injection_depth ?? DEFAULT_DEPTH;
injectionDepthBlock.style.visibility = prompt.injection_position === INJECTION_POSITION.ABSOLUTE ? 'visible' : 'hidden';
injectionPositionField.removeAttribute('disabled');
if (this.systemPrompts.includes(prompt.identifier)) {
injectionPositionField.setAttribute('disabled', 'disabled');
}
const resetPromptButton = document.getElementById(this.configuration.prefix + 'prompt_manager_popup_entry_form_reset');
if (true === prompt.system_prompt) {
@@ -1198,6 +1221,7 @@ PromptManagerModule.prototype.clearEditForm = function () {
roleField.selectedIndex = 0;
promptField.value = '';
injectionPositionField.selectedIndex = 0;
injectionPositionField.removeAttribute('disabled');
injectionDepthField.value = DEFAULT_DEPTH;
injectionDepthBlock.style.visibility = 'unset';

View File

@@ -906,7 +906,7 @@ export function initRossMods() {
if (power_user.gestures === false) {
return
}
if ($(".mes_edit_buttons, #character_popup, #dialogue_popup, #WorldInfo").is(":visible")) {
if ($(".mes_edit_buttons, .drawer-content, #character_popup, #dialogue_popup, #WorldInfo, #right-nav-panel, #left-nav-panel, #select_chat_popup, #floatingPrompt").is(":visible")) {
return
}
var SwipeButR = $('.swipe_right:last');
@@ -921,7 +921,7 @@ export function initRossMods() {
if (power_user.gestures === false) {
return
}
if ($(".mes_edit_buttons, #character_popup, #dialogue_popup, #WorldInfo").is(":visible")) {
if ($(".mes_edit_buttons, .drawer-content, #character_popup, #dialogue_popup, #WorldInfo, #right-nav-panel, #left-nav-panel, #select_chat_popup, #floatingPrompt").is(":visible")) {
return
}
var SwipeButL = $('.swipe_left:last');

View File

@@ -1,12 +1,21 @@
// Move chat functions here from script.js (eventually)
import {
addCopyToCodeBlocks,
appendMediaToMessage,
callPopup,
chat,
eventSource,
event_types,
getCurrentChatId,
hideSwipeButtons,
saveChatConditional,
name2,
saveChatDebounced,
showSwipeButtons,
} from "../script.js";
import { getBase64Async, humanFileSize, saveBase64AsFile } from "./utils.js";
const fileSizeLimit = 1024 * 1024 * 1; // 1 MB
/**
* Mark message as hidden (system message).
@@ -30,7 +39,7 @@ export async function hideChatMessage(messageId, messageBlock) {
hideSwipeButtons();
showSwipeButtons();
await saveChatConditional();
saveChatDebounced();
}
/**
@@ -55,19 +64,225 @@ export async function unhideChatMessage(messageId, messageBlock) {
hideSwipeButtons();
showSwipeButtons();
await saveChatConditional();
saveChatDebounced();
}
jQuery(function() {
$(document).on('click', '.mes_hide', async function() {
/**
* Adds a file attachment to the message.
* @param {object} message Message object
* @returns {Promise<void>}
*/
export async function populateFileAttachment(message, inputId = 'file_form_input') {
try {
if (!message) return;
if (!message.extra) message.extra = {};
const fileInput = document.getElementById(inputId);
if (!(fileInput instanceof HTMLInputElement)) return;
const file = fileInput.files[0];
if (!file) return;
// If file is image
if (file.type.startsWith('image/')) {
const base64Img = await getBase64Async(file);
const base64ImgData = base64Img.split(',')[1];
const extension = file.type.split('/')[1];
const imageUrl = await saveBase64AsFile(base64ImgData, name2, file.name, extension);
message.extra.image = imageUrl;
message.extra.inline_image = true;
} else {
const fileText = await file.text();
message.extra.file = {
text: fileText,
size: file.size,
name: file.name,
};
}
} catch (error) {
console.error('Could not upload file', error);
} finally {
$('#file_form').trigger('reset');
}
}
/**
* Validates file to make sure it is not binary or not image.
* @param {File} file File object
* @returns {Promise<boolean>} True if file is valid, false otherwise.
*/
async function validateFile(file) {
const fileText = await file.text();
const isImage = file.type.startsWith('image/');
const isBinary = /^[\x00-\x08\x0E-\x1F\x7F-\xFF]*$/.test(fileText);
if (!isImage && file.size > fileSizeLimit) {
toastr.error(`File is too big. Maximum size is ${humanFileSize(fileSizeLimit)}.`);
return false;
}
// If file is binary
if (isBinary && !isImage) {
toastr.error('Binary files are not supported. Select a text file or image.');
return false;
}
return true;
}
export function hasPendingFileAttachment() {
const fileInput = document.getElementById('file_form_input');
if (!(fileInput instanceof HTMLInputElement)) return false;
const file = fileInput.files[0];
return !!file;
}
/**
* Displays file information in the message sending form.
* @returns {Promise<void>}
*/
async function onFileAttach() {
const fileInput = document.getElementById('file_form_input');
if (!(fileInput instanceof HTMLInputElement)) return;
const file = fileInput.files[0];
if (!file) return;
const isValid = await validateFile(file);
// If file is binary
if (!isValid) {
$('#file_form').trigger('reset');
return;
}
$('#file_form .file_name').text(file.name);
$('#file_form .file_size').text(humanFileSize(file.size));
$('#file_form').removeClass('displayNone');
// Reset form on chat change
eventSource.once(event_types.CHAT_CHANGED, () => {
$('#file_form').trigger('reset');
});
}
/**
* Deletes file from message.
* @param {number} messageId Message ID
*/
async function deleteMessageFile(messageId) {
const confirm = await callPopup('Are you sure you want to delete this file?', 'confirm');
if (!confirm) {
console.debug('Delete file cancelled');
return;
}
const message = chat[messageId];
if (!message?.extra?.file) {
console.debug('Message has no file');
return;
}
delete message.extra.file;
$(`.mes[mesid="${messageId}"] .mes_file_container`).remove();
saveChatDebounced();
}
/**
* Opens file from message in a modal.
* @param {number} messageId Message ID
*/
async function viewMessageFile(messageId) {
const messageText = chat[messageId]?.extra?.file?.text;
if (!messageText) {
console.debug('Message has no file or it is empty');
return;
}
const modalTemplate = $('<div><pre><code></code></pre></div>');
modalTemplate.find('code').addClass('txt').text(messageText);
modalTemplate.addClass('file_modal');
addCopyToCodeBlocks(modalTemplate);
callPopup(modalTemplate, 'text');
}
/**
* Inserts a file embed into the message.
* @param {number} messageId
* @param {JQuery<HTMLElement>} messageBlock
* @returns {Promise<void>}
*/
function embedMessageFile(messageId, messageBlock) {
const message = chat[messageId];
if (!message) {
console.warn('Failed to find message with id', messageId);
return;
}
$('#embed_file_input')
.off('change')
.on('change', parseAndUploadEmbed)
.trigger('click');
async function parseAndUploadEmbed(e) {
const file = e.target.files[0];
if (!file) return;
const isValid = await validateFile(file);
if (!isValid) {
$('#file_form').trigger('reset');
return;
}
await populateFileAttachment(message, 'embed_file_input');
appendMediaToMessage(message, messageBlock);
saveChatDebounced();
}
}
jQuery(function () {
$(document).on('click', '.mes_hide', async function () {
const messageBlock = $(this).closest('.mes');
const messageId = Number(messageBlock.attr('mesid'));
await hideChatMessage(messageId, messageBlock);
});
$(document).on('click', '.mes_unhide', async function() {
$(document).on('click', '.mes_unhide', async function () {
const messageBlock = $(this).closest('.mes');
const messageId = Number(messageBlock.attr('mesid'));
await unhideChatMessage(messageId, messageBlock);
});
$(document).on('click', '.mes_file_delete', async function () {
const messageBlock = $(this).closest('.mes');
const messageId = Number(messageBlock.attr('mesid'));
await deleteMessageFile(messageId);
});
$(document).on('click', '.mes_file_open', async function () {
const messageBlock = $(this).closest('.mes');
const messageId = Number(messageBlock.attr('mesid'));
await viewMessageFile(messageId);
});
// Do not change. #attachFile is added by extension.
$(document).on('click', '#attachFile', function () {
$('#file_form_input').trigger('click');
});
$(document).on('click', '.mes_embed', function () {
const messageBlock = $(this).closest('.mes');
const messageId = Number(messageBlock.attr('mesid'));
embedMessageFile(messageId, messageBlock);
});
$('#file_form_input').on('change', onFileAttach);
$('#file_form').on('reset', function () {
$('#file_form').addClass('displayNone');
});
})

View File

@@ -1,4 +1,4 @@
import { callPopup, eventSource, event_types, saveSettings, saveSettingsDebounced, getRequestHeaders, substituteParams, renderTemplate } from "../script.js";
import { callPopup, eventSource, event_types, saveSettings, saveSettingsDebounced, getRequestHeaders, substituteParams, renderTemplate, animation_duration } from "../script.js";
import { hideLoader, showLoader } from "./loader.js";
import { isSubsetOf } from "./utils.js";
export {
@@ -103,7 +103,7 @@ class ModuleWorkerWrapper {
}
// Called by the extension
async update() {
async update(...args) {
// Don't touch me I'm busy...
if (this.isBusy) {
return;
@@ -112,7 +112,7 @@ class ModuleWorkerWrapper {
// I'm free. Let's update!
try {
this.isBusy = true;
await this.callback();
await this.callback(...args);
}
finally {
this.isBusy = false;
@@ -347,27 +347,28 @@ function addExtensionsButtonAndMenu() {
$(document.body).append(extensionsMenuHTML);
$('#send_but_sheld').prepend(buttonHTML);
$('#leftSendForm').prepend(buttonHTML);
const button = $('#extensionsMenuButton');
const dropdown = $('#extensionsMenu');
//dropdown.hide();
let popper = Popper.createPopper(button.get(0), dropdown.get(0), {
placement: 'top-end',
placement: 'top-start',
});
$(button).on('click', function () {
popper.update()
dropdown.fadeIn(250);
if (!dropdown.is(':visible')) {
dropdown.fadeIn(animation_duration);
}
});
$("html").on('touchstart mousedown', function (e) {
let clickTarget = $(e.target);
if (dropdown.is(':visible')
&& clickTarget.closest(button).length == 0
&& clickTarget.closest(dropdown).length == 0) {
$(dropdown).fadeOut(250);
const clickTarget = $(e.target);
const noCloseTargets = ['#sd_gen'];
if (dropdown.is(':visible') && !noCloseTargets.some(id => clickTarget.closest(id).length > 0)) {
$(dropdown).fadeOut(animation_duration);
}
});
}
@@ -511,8 +512,8 @@ async function generateExtensionHtml(name, manifest, isActive, isDisabled, isExt
isUpToDate = data.isUpToDate;
displayVersion = ` (${branch}-${commitHash.substring(0, 7)})`;
updateButton = isUpToDate ?
`<span class="update-button"><button class="btn_update menu_button" data-name="${name.replace('third-party', '')}" title="Up to date"><i class="fa-solid fa-code-commit"></i></button></span>` :
`<span class="update-button"><button class="btn_update menu_button" data-name="${name.replace('third-party', '')}" title="Update available"><i class="fa-solid fa-download"></i></button></span>`;
`<span class="update-button"><button class="btn_update menu_button" data-name="${name.replace('third-party', '')}" title="Up to date"><i class="fa-solid fa-code-commit fa-fw"></i></button></span>` :
`<span class="update-button"><button class="btn_update menu_button" data-name="${name.replace('third-party', '')}" title="Update available"><i class="fa-solid fa-download fa-fw"></i></button></span>`;
originHtml = `<a href="${origin}" target="_blank" rel="noopener noreferrer">`;
}
@@ -592,7 +593,7 @@ function getModuleInformation() {
* Generates the HTML strings for all extensions and displays them in a popup.
*/
async function showExtensionsDetails() {
try{
try {
showLoader();
let htmlDefault = '<h3>Built-in Extensions:</h3>';
let htmlExternal = '<h3>Installed Extensions:</h3>';
@@ -640,6 +641,7 @@ async function showExtensionsDetails() {
*/
async function onUpdateClick() {
const extensionName = $(this).data('name');
$(this).find('i').addClass('fa-spin');
await updateExtension(extensionName, false);
}
@@ -657,6 +659,11 @@ async function updateExtension(extensionName, quiet) {
});
const data = await response.json();
if (!quiet) {
showExtensionsDetails();
}
if (data.isUpToDate) {
if (!quiet) {
toastr.success('Extension is already up to date');
@@ -664,10 +671,6 @@ async function updateExtension(extensionName, quiet) {
} else {
toastr.success(`Extension ${extensionName} updated to ${data.shortCommitHash}`);
}
if (!quiet) {
showExtensionsDetails();
}
} catch (error) {
console.error('Error:', error);
}
@@ -843,12 +846,19 @@ async function checkForExtensionUpdates(force) {
}
async function autoUpdateExtensions() {
if (!Object.values(manifests).some(x => x.auto_update)) {
return;
}
toastr.info('Auto-updating extensions. This may take several minutes.', 'Please wait...', { timeOut: 10000, extendedTimeOut: 20000 });
const promises = [];
for (const [id, manifest] of Object.entries(manifests)) {
if (manifest.auto_update && id.startsWith('third-party')) {
console.debug(`Auto-updating 3rd-party extension: ${manifest.display_name} (${id})`);
await updateExtension(id.replace('third-party', ''), true);
promises.push(updateExtension(id.replace('third-party', ''), true));
}
}
await Promise.allSettled(promises);
}
/**

View File

@@ -67,7 +67,7 @@ function downloadAssetsList(url) {
const asset = availableAssets[assetType][i];
const elemId = `assets_install_${assetType}_${i}`;
let element = $('<button />', { id: elemId, type: "button", class: "asset-download-button menu_button" })
const label = $("<i class=\"fa-solid fa-download fa-xl\"></i>");
const label = $("<i class=\"fa-fw fa-solid fa-download fa-xl\"></i>");
element.append(label);
//if (DEBUG_TONY_SAMA_FORK_MODE)

View File

@@ -3,19 +3,18 @@ import { getContext, getApiUrl, doExtrasFetch, extension_settings, modules } fro
import { callPopup, getRequestHeaders, saveSettingsDebounced, substituteParams } from "../../../script.js";
import { getMessageTimeStamp } from "../../RossAscends-mods.js";
import { SECRET_KEYS, secret_state } from "../../secrets.js";
import { getMultimodalCaption } from "../shared.js";
export { MODULE_NAME };
const MODULE_NAME = 'caption';
const UPDATE_INTERVAL = 1000;
const PROMPT_DEFAULT = 'Whats in this image?';
const TEMPLATE_DEFAULT = '[{{user}} sends {{char}} a picture that contains: {{caption}}]';
async function moduleWorker() {
const hasConnection = getContext().onlineStatus !== 'no_connection';
$('#send_picture').toggle(hasConnection);
}
/**
* Migrates old extension settings to the new format.
* Must keep this function for compatibility with old settings.
*/
function migrateSettings() {
if (extension_settings.caption.local !== undefined) {
extension_settings.caption.source = extension_settings.caption.local ? 'local' : 'extras';
@@ -27,6 +26,20 @@ function migrateSettings() {
extension_settings.caption.source = 'extras';
}
if (extension_settings.caption.source === 'openai') {
extension_settings.caption.source = 'multimodal';
extension_settings.caption.multimodal_api = 'openai';
extension_settings.caption.multimodal_model = 'gpt-4-vision-preview';
}
if (!extension_settings.caption.multimodal_api) {
extension_settings.caption.multimodal_api = 'openai';
}
if (!extension_settings.caption.multimodal_model) {
extension_settings.caption.multimodal_model = 'gpt-4-vision-preview';
}
if (!extension_settings.caption.prompt) {
extension_settings.caption.prompt = PROMPT_DEFAULT;
}
@@ -36,6 +49,9 @@ function migrateSettings() {
}
}
/**
* Sets an image icon for the send button.
*/
async function setImageIcon() {
try {
const sendButton = $('#send_picture .extensionsMenuExtensionButton');
@@ -47,6 +63,9 @@ async function setImageIcon() {
}
}
/**
* Sets a spinner icon for the send button.
*/
async function setSpinnerIcon() {
try {
const sendButton = $('#send_picture .extensionsMenuExtensionButton');
@@ -58,6 +77,11 @@ async function setSpinnerIcon() {
}
}
/**
* Sends a captioned message to the chat.
* @param {string} caption Caption text
* @param {string} image Image URL
*/
async function sendCaptionedMessage(caption, image) {
const context = getContext();
let template = extension_settings.caption.template || TEMPLATE_DEFAULT;
@@ -93,14 +117,13 @@ async function sendCaptionedMessage(caption, image) {
};
context.chat.push(message);
context.addOneMessage(message);
await context.generate('caption');
}
/**
*
* Generates a caption for an image using a selected source.
* @param {string} base64Img Base64 encoded image without the data:image/...;base64, prefix
* @param {string} fileData Base64 encoded image with the data:image/...;base64, prefix
* @returns
* @returns {Promise<{caption: string}>} Generated caption
*/
async function doCaptionRequest(base64Img, fileData) {
switch (extension_settings.caption.source) {
@@ -110,13 +133,18 @@ async function doCaptionRequest(base64Img, fileData) {
return await captionExtras(base64Img);
case 'horde':
return await captionHorde(base64Img);
case 'openai':
return await captionOpenAI(fileData);
case 'multimodal':
return await captionMultimodal(fileData);
default:
throw new Error('Unknown caption source.');
}
}
/**
* Generates a caption for an image using Extras API.
* @param {string} base64Img Base64 encoded image without the data:image/...;base64, prefix
* @returns {Promise<{caption: string}>} Generated caption
*/
async function captionExtras(base64Img) {
if (!modules.includes('caption')) {
throw new Error('No captioning module is available.');
@@ -142,6 +170,11 @@ async function captionExtras(base64Img) {
return data;
}
/**
* Generates a caption for an image using a local model.
* @param {string} base64Img Base64 encoded image without the data:image/...;base64, prefix
* @returns {Promise<{caption: string}>} Generated caption
*/
async function captionLocal(base64Img) {
const apiResult = await fetch('/api/extra/caption', {
method: 'POST',
@@ -157,6 +190,11 @@ async function captionLocal(base64Img) {
return data;
}
/**
* Generates a caption for an image using a Horde model.
* @param {string} base64Img Base64 encoded image without the data:image/...;base64, prefix
* @returns {Promise<{caption: string}>} Generated caption
*/
async function captionHorde(base64Img) {
const apiResult = await fetch('/api/horde/caption-image', {
method: 'POST',
@@ -172,20 +210,15 @@ async function captionHorde(base64Img) {
return data;
}
async function captionOpenAI(base64Img) {
/**
* Generates a caption for an image using a multimodal model.
* @param {string} base64Img Base64 encoded image with the data:image/...;base64, prefix
* @returns {Promise<{caption: string}>} Generated caption
*/
async function captionMultimodal(base64Img) {
const prompt = extension_settings.caption.prompt || PROMPT_DEFAULT;
const apiResult = await fetch('/api/openai/caption-image', {
method: 'POST',
headers: getRequestHeaders(),
body: JSON.stringify({ image: base64Img, prompt: prompt }),
});
if (!apiResult.ok) {
throw new Error('Failed to caption image via OpenAI.');
}
const data = await apiResult.json();
return data;
const caption = await getMultimodalCaption(base64Img, prompt);
return { caption };
}
async function onSelectImage(e) {
@@ -201,11 +234,8 @@ async function onSelectImage(e) {
const fileData = await getBase64Async(file);
const base64Format = fileData.split(',')[0].split(';')[0].split('/')[1];
const base64Data = fileData.split(',')[1];
const data = await doCaptionRequest(base64Data, fileData);
const caption = data.caption;
const imageToSave = data.thumbnail ? data.thumbnail : base64Data;
const format = data.thumbnail ? 'jpeg' : base64Format;
const imagePath = await saveBase64AsFile(imageToSave, context.name2, '', format);
const { caption } = await doCaptionRequest(base64Data, fileData);
const imagePath = await saveBase64AsFile(base64Data, context.name2, '', base64Format);
await sendCaptionedMessage(caption, imagePath);
}
catch (error) {
@@ -228,20 +258,26 @@ jQuery(function () {
const sendButton = $(`
<div id="send_picture" class="list-group-item flex-container flexGap5">
<div class="fa-solid fa-image extensionsMenuExtensionButton"></div>
Send a Picture
Generate Caption
</div>`);
const attachFileButton = $(`
<div id="attachFile" class="list-group-item flex-container flexGap5">
<div class="fa-solid fa-paperclip extensionsMenuExtensionButton"></div>
Attach a File
</div>`);
$('#extensionsMenu').prepend(sendButton);
$(sendButton).hide();
$('#extensionsMenu').prepend(attachFileButton);
$(sendButton).on('click', () => {
const hasCaptionModule =
(modules.includes('caption') && extension_settings.caption.source === 'extras') ||
(extension_settings.caption.source === 'openai' && secret_state[SECRET_KEYS.OPENAI]) ||
(extension_settings.caption.source === 'multimodal' && extension_settings.caption.multimodal_api === 'openai' && secret_state[SECRET_KEYS.OPENAI]) ||
(extension_settings.caption.source === 'multimodal' && extension_settings.caption.multimodal_api === 'openrouter' && secret_state[SECRET_KEYS.OPENROUTER]) ||
extension_settings.caption.source === 'local' ||
extension_settings.caption.source === 'horde';
if (!hasCaptionModule) {
toastr.error('No captioning module is available. Choose other captioning source in the extension settings.');
toastr.error('Choose other captioning source in the extension settings.', 'Captioning is not available');
return;
}
@@ -249,7 +285,7 @@ jQuery(function () {
});
}
function addPictureSendForm() {
const inputHtml = `<input id="img_file" type="file" accept="image/*">`;
const inputHtml = `<input id="img_file" type="file" hidden accept="image/*">`;
const imgForm = document.createElement('form');
imgForm.id = 'img_form';
$(imgForm).append(inputHtml);
@@ -257,6 +293,29 @@ jQuery(function () {
$('#form_sheld').append(imgForm);
$('#img_file').on('change', onSelectImage);
}
function switchMultimodalBlocks() {
const isMultimodal = extension_settings.caption.source === 'multimodal';
$('#caption_multimodal_block').toggle(isMultimodal);
$('#caption_prompt_block').toggle(isMultimodal);
$('#caption_multimodal_api').val(extension_settings.caption.multimodal_api);
$('#caption_multimodal_model').val(extension_settings.caption.multimodal_model);
$('#caption_multimodal_model option').each(function () {
const type = $(this).data('type');
$(this).toggle(type === extension_settings.caption.multimodal_api);
});
$('#caption_multimodal_api').on('change', () => {
const api = String($('#caption_multimodal_api').val());
const model = String($(`#caption_multimodal_model option[data-type="${api}"]`).first().val());
extension_settings.caption.multimodal_api = api;
extension_settings.caption.multimodal_model = model;
saveSettingsDebounced();
switchMultimodalBlocks();
});
$('#caption_multimodal_model').on('change', () => {
extension_settings.caption.multimodal_model = String($('#caption_multimodal_model').val());
saveSettingsDebounced();
});
}
function addSettings() {
const html = `
<div class="caption_settings">
@@ -266,20 +325,39 @@ jQuery(function () {
<div class="inline-drawer-icon fa-solid fa-circle-chevron-down down"></div>
</div>
<div class="inline-drawer-content">
<label for="caption_source">Source:</label>
<label for="caption_source">Source</label>
<select id="caption_source" class="text_pole">
<option value="local">Local</option>
<option value="multimodal">Multimodal (OpenAI / OpenRouter)</option>
<option value="extras">Extras</option>
<option value="horde">Horde</option>
<option value="openai">OpenAI</option>
</select>
<label for="caption_prompt">Caption Prompt (OpenAI):</label>
<textarea id="caption_prompt" class="text_pole" rows="1" placeholder="&lt; Use default &gt;">${PROMPT_DEFAULT}</textarea>
<label for="caption_template">Message Template: <small>(use <tt>{{caption}}</tt> macro)</small></label>
<div id="caption_multimodal_block" class="flex-container wide100p">
<div class="flex1 flex-container flexFlowColumn flexNoGap">
<label for="caption_multimodal_api">API</label>
<select id="caption_multimodal_api" class="flex1 text_pole">
<option value="openai">OpenAI</option>
<option value="openrouter">OpenRouter</option>
</select>
</div>
<div class="flex1 flex-container flexFlowColumn flexNoGap">
<label for="caption_multimodal_model">Model</label>
<select id="caption_multimodal_model" class="flex1 text_pole">
<option data-type="openai" value="gpt-4-vision-preview">gpt-4-vision-preview</option>
<option data-type="openrouter" value="openai/gpt-4-vision-preview">openai/gpt-4-vision-preview</option>
<option data-type="openrouter" value="haotian-liu/llava-13b">haotian-liu/llava-13b</option>
</select>
</div>
</div>
<div id="caption_prompt_block">
<label for="caption_prompt">Caption Prompt</label>
<textarea id="caption_prompt" class="text_pole" rows="1" placeholder="&lt; Use default &gt;">${PROMPT_DEFAULT}</textarea>
</div>
<label for="caption_template">Message Template <small>(use <code>{{caption}}</code> macro)</small></label>
<textarea id="caption_template" class="text_pole" rows="2" placeholder="&lt; Use default &gt;">${TEMPLATE_DEFAULT}</textarea>
<label class="checkbox_label margin-bot-10px" for="caption_refine_mode">
<input id="caption_refine_mode" type="checkbox" class="checkbox">
Edit captions before generation
Edit captions before saving
</label>
</div>
</div>
@@ -293,7 +371,7 @@ jQuery(function () {
addSendPictureButton();
setImageIcon();
migrateSettings();
moduleWorker();
switchMultimodalBlocks();
$('#caption_refine_mode').prop('checked', !!(extension_settings.caption.refine_mode));
$('#caption_source').val(extension_settings.caption.source);
@@ -302,6 +380,7 @@ jQuery(function () {
$('#caption_refine_mode').on('input', onRefineModeInput);
$('#caption_source').on('change', () => {
extension_settings.caption.source = String($('#caption_source').val());
switchMultimodalBlocks();
saveSettingsDebounced();
});
$('#caption_prompt').on('input', () => {
@@ -312,5 +391,4 @@ jQuery(function () {
extension_settings.caption.template = String($('#caption_template').val());
saveSettingsDebounced();
});
setInterval(moduleWorker, UPDATE_INTERVAL);
});

View File

@@ -4,6 +4,7 @@ import { getContext, getApiUrl, modules, extension_settings, ModuleWorkerWrapper
import { loadMovingUIState, power_user } from "../../power-user.js";
import { registerSlashCommand } from "../../slash-commands.js";
import { onlyUnique, debounce, getCharaFilename, trimToEndSentence, trimToStartSentence } from "../../utils.js";
import { hideMutedSprites } from "../../group-chats.js";
export { MODULE_NAME };
const MODULE_NAME = 'expressions';
@@ -118,7 +119,7 @@ async function visualNovelSetCharacterSprites(container, name, expression) {
const isDisabled = group.disabled_members.includes(avatar);
// skip disabled characters
if (isDisabled) {
if (isDisabled && hideMutedSprites) {
continue;
}

View File

@@ -651,14 +651,12 @@ jQuery(function () {
<option value="main">Main API</option>
<option value="extras">Extras API</option>
</select><br>
<div class="flex-container justifyspacebetween alignitemscenter">
<span class="flex1">Current summary:</span>
<div id="memory_restore" class="menu_button flex1 margin0"><span>Restore Previous</span></div>
</div>
<textarea id="memory_contents" class="text_pole textarea_compact" rows="6" placeholder="Summary will be generated here..."></textarea>
<div class="memory_contents_controls">
<div id="memory_force_summarize" class="menu_button menu_button_icon">
@@ -669,14 +667,17 @@ jQuery(function () {
<label for="memory_skipWIAN"><input id="memory_skipWIAN" type="checkbox" />No WI/AN</label>
</div>
<div class="memory_contents_controls">
<div id="summarySettingsBlockToggle" class="menu_button">Settings</div>
<div id="summarySettingsBlockToggle" class="menu_button menu_button_icon" title="Edit summarization prompt, insertion position, etc.">
<i class="fa-solid fa-cog"></i>
<span>Summary Settings</span>
</div>
</div>
<div id="summarySettingsBlock" style="display:none;">
<div id="summarySettingsBlock" style="display:none;">
<div class="memory_template">
<label for="memory_template">Insertion string:</label>
<label for="memory_template">Insertion Template</label>
<textarea id="memory_template" class="text_pole textarea_compact" rows="2" placeholder="{{summary}} will resolve to the current summary contents."></textarea>
</div>
<label for="memory_position">Position:</label>
<label for="memory_position">Injection Position</label>
<div class="radio_group">
<label>
<input type="radio" name="memory_position" value="2" />

View File

@@ -0,0 +1,44 @@
<div id="quickReply_contextMenuEditor_template">
<div class="quickReply_contextMenuEditor">
<h3><strong>Context Menu Editor</strong></h3>
<div id="quickReply_contextMenuEditor_content">
<template id="quickReply_contextMenuEditor_itemTemplate">
<div class="quickReplyContextMenuEditor_item flex-container alignitemscenter" data-order="0">
<span class="drag-handle ui-sortable-handle"></span>
<select class="quickReply_contextMenuEditor_preset"></select>
<label class="flex-container" title="When enabled, the current Quick Reply will be sent together with (before) the clicked QR from the context menu.">
Chaining:
<input type="checkbox" class="quickReply_contextMenuEditor_chaining">
</label>
<span class="quickReply_contextMenuEditor_remove menu_button menu_button_icon fa-solid fa-trash-can" title="Remove entry"></span>
</div>
</template>
</div>
<div class="quickReply_contextMenuEditor_actions">
<span id="quickReply_contextMenuEditor_addPreset" class="menu_button menu_button_icon fa-solid fa-plus" title="Add preset to context menu"></span>
</div>
<h3><strong>Auto-Execute</strong></h3>
<div class="flex-container flexFlowColumn">
<label class="checkbox_label" for="quickReply_hidden">
<input type="checkbox" id="quickReply_hidden" >
<span><i class="fa-solid fa-fw fa-eye-slash"></i> Invisible (auto-execute only)</span>
</label>
<label class="checkbox_label" for="quickReply_autoExecute_appStartup">
<input type="checkbox" id="quickReply_autoExecute_appStartup" >
<span><i class="fa-solid fa-fw fa-rocket"></i> Execute on app startup</span>
</label>
<label class="checkbox_label" for="quickReply_autoExecute_userMessage">
<input type="checkbox" id="quickReply_autoExecute_userMessage" >
<span><i class="fa-solid fa-fw fa-user"></i> Execute on user message</span>
</label>
<label class="checkbox_label" for="quickReply_autoExecute_botMessage">
<input type="checkbox" id="quickReply_autoExecute_botMessage" >
<span><i class="fa-solid fa-fw fa-robot"></i> Execute on AI message</span>
</label>
<label class="checkbox_label" for="quickReply_autoExecute_chatLoad">
<input type="checkbox" id="quickReply_autoExecute_chatLoad" >
<span><i class="fa-solid fa-fw fa-message"></i> Execute on opening chat</span>
</label>
</div>
</div>
</div>

View File

@@ -1,7 +1,10 @@
import { saveSettingsDebounced, callPopup, getRequestHeaders, substituteParams } from "../../../script.js";
import { saveSettingsDebounced, callPopup, getRequestHeaders, substituteParams, eventSource, event_types } from "../../../script.js";
import { getContext, extension_settings } from "../../extensions.js";
import { initScrollHeight, resetScrollHeight } from "../../utils.js";
import { initScrollHeight, resetScrollHeight, getSortableDelay } from "../../utils.js";
import { executeSlashCommands, registerSlashCommand } from "../../slash-commands.js";
import { ContextMenu } from "./src/ContextMenu.js";
import { MenuItem } from "./src/MenuItem.js";
import { MenuHeader } from "./src/MenuHeader.js";
export { MODULE_NAME };
@@ -21,7 +24,7 @@ const defaultSettings = {
//method from worldinfo
async function updateQuickReplyPresetList() {
var result = await fetch("/getsettings", {
const result = await fetch("/getsettings", {
method: "POST",
headers: getRequestHeaders(),
body: JSON.stringify({}),
@@ -99,10 +102,105 @@ function onQuickReplyInput(id) {
function onQuickReplyLabelInput(id) {
extension_settings.quickReply.quickReplySlots[id - 1].label = $(`#quickReply${id}Label`).val();
$(`#quickReply${id}`).text(String($(`#quickReply${id}Label`).val()));
addQuickReplyBar();
saveSettingsDebounced();
}
async function onQuickReplyContextMenuChange(id) {
extension_settings.quickReply.quickReplySlots[id - 1].contextMenu = JSON.parse($(`#quickReplyContainer > [data-order="${id}"]`).attr('data-contextMenu'))
saveSettingsDebounced();
}
async function onQuickReplyCtxButtonClick(id) {
const editorHtml = $(await $.get('scripts/extensions/quick-reply/contextMenuEditor.html'));
const popupResult = callPopup(editorHtml, "confirm", undefined, { okButton: "Save", wide: false, large: false, rows: 1 });
const qr = extension_settings.quickReply.quickReplySlots[id - 1];
if (!qr.contextMenu) {
qr.contextMenu = [];
}
/**@type {HTMLTemplateElement}*/
const tpl = document.querySelector('#quickReply_contextMenuEditor_itemTemplate');
const fillPresetSelect = (select, item) => {
[{ name: 'Select a preset', value: '' }, ...presets].forEach(preset => {
const opt = document.createElement('option'); {
opt.value = preset.value ?? preset.name;
opt.textContent = preset.name;
opt.selected = preset.name == item.preset;
select.append(opt);
}
});
};
const addCtxItem = (item, idx) => {
const dom = tpl.content.cloneNode(true);
const ctxItem = dom.querySelector('.quickReplyContextMenuEditor_item');
ctxItem.setAttribute('data-order', idx);
const select = ctxItem.querySelector('.quickReply_contextMenuEditor_preset');
fillPresetSelect(select, item);
dom.querySelector('.quickReply_contextMenuEditor_chaining').checked = item.chain;
$('.quickReply_contextMenuEditor_remove', ctxItem).on('click', () => ctxItem.remove());
document.querySelector('#quickReply_contextMenuEditor_content').append(ctxItem);
}
[...qr.contextMenu, {}].forEach((item, idx) => {
addCtxItem(item, idx)
});
$('#quickReply_contextMenuEditor_addPreset').on('click', () => {
addCtxItem({}, document.querySelector('#quickReply_contextMenuEditor_content').children.length);
});
$('#quickReply_contextMenuEditor_content').sortable({
delay: getSortableDelay(),
stop: () => { },
});
$('#quickReply_autoExecute_userMessage').prop('checked', qr.autoExecute_userMessage ?? false);
$('#quickReply_autoExecute_botMessage').prop('checked', qr.autoExecute_botMessage ?? false);
$('#quickReply_autoExecute_chatLoad').prop('checked', qr.autoExecute_chatLoad ?? false);
$('#quickReply_autoExecute_appStartup').prop('checked', qr.autoExecute_appStartup ?? false);
$('#quickReply_hidden').prop('checked', qr.hidden ?? false);
$('#quickReply_hidden').on('input', () => {
const state = !!$('#quickReply_hidden').prop('checked');
qr.hidden = state;
saveSettingsDebounced();
});
$('#quickReply_autoExecute_appStartup').on('input', () => {
const state = !!$('#quickReply_autoExecute_appStartup').prop('checked');
qr.autoExecute_appStartup = state;
saveSettingsDebounced();
});
$('#quickReply_autoExecute_userMessage').on('input', () => {
const state = !!$('#quickReply_autoExecute_userMessage').prop('checked');
qr.autoExecute_userMessage = state;
saveSettingsDebounced();
});
$('#quickReply_autoExecute_botMessage').on('input', () => {
const state = !!$('#quickReply_autoExecute_botMessage').prop('checked');
qr.autoExecute_botMessage = state;
saveSettingsDebounced();
});
$('#quickReply_autoExecute_chatLoad').on('input', () => {
const state = !!$('#quickReply_autoExecute_chatLoad').prop('checked');
qr.autoExecute_chatLoad = state;
saveSettingsDebounced();
});
if (await popupResult) {
qr.contextMenu = Array.from(document.querySelectorAll('#quickReply_contextMenuEditor_content > .quickReplyContextMenuEditor_item'))
.map(item => ({
preset: item.querySelector('.quickReply_contextMenuEditor_preset').value,
chain: item.querySelector('.quickReply_contextMenuEditor_chaining').checked,
}))
.filter(item => item.preset);
$(`#quickReplyContainer[data-order="${id}"]`).attr('data-contextMenu', JSON.stringify(qr.contextMenu));
updateQuickReplyPreset();
onQuickReplyLabelInput(id);
}
}
async function onQuickReplyEnabledInput() {
let isEnabled = $(this).prop('checked')
extension_settings.quickReply.quickReplyEnabled = !!isEnabled;
@@ -129,13 +227,32 @@ async function onAutoInputInject() {
}
async function sendQuickReply(index) {
const existingText = $("#send_textarea").val();
const prompt = extension_settings.quickReply.quickReplySlots[index]?.mes || '';
return await performQuickReply(prompt, index);
}
async function executeQuickReplyByName(name) {
if (!extension_settings.quickReply.quickReplyEnabled) {
throw new Error('Quick Reply is disabled');
}
const qr = extension_settings.quickReply.quickReplySlots.find(x => x.label == name);
if (!qr) {
throw new Error(`Quick Reply "${name}" not found`);
}
return await performQuickReply(qr.mes);
}
window['executeQuickReplyByName'] = executeQuickReplyByName;
async function performQuickReply(prompt, index) {
if (!prompt) {
console.warn(`Quick reply slot ${index} is empty! Aborting.`);
return;
}
const existingText = $("#send_textarea").val();
let newText;
@@ -150,14 +267,14 @@ async function sendQuickReply(index) {
newText = `${prompt} `;
}
newText = substituteParams(newText);
// the prompt starts with '/' - execute slash commands natively
if (prompt.startsWith('/')) {
await executeSlashCommands(newText);
return;
const result = await executeSlashCommands(newText);
return result?.pipe;
}
newText = substituteParams(newText);
$("#send_textarea").val(newText);
// Set the focus back to the textarea
@@ -170,14 +287,55 @@ async function sendQuickReply(index) {
}
function buildContextMenu(qr, chainMes = null, hierarchy = [], labelHierarchy = []) {
const tree = {
label: qr.label,
mes: (chainMes && qr.mes ? `${chainMes} | ` : '') + qr.mes,
children: [],
};
qr.contextMenu?.forEach(ctxItem => {
let chain = ctxItem.chain;
let subName = ctxItem.preset;
const sub = presets.find(it => it.name == subName);
if (sub) {
// prevent circular references
if (hierarchy.indexOf(sub.name) == -1) {
const nextHierarchy = [...hierarchy, sub.name];
const nextLabelHierarchy = [...labelHierarchy, tree.label];
tree.children.push(new MenuHeader(sub.name));
sub.quickReplySlots.forEach(subQr => {
const subInfo = buildContextMenu(subQr, chain ? tree.mes : null, nextHierarchy, nextLabelHierarchy);
tree.children.push(new MenuItem(
subInfo.label,
subInfo.mes,
(evt) => {
evt.stopPropagation();
performQuickReply(subInfo.mes.replace(/%%parent(-\d+)?%%/g, (_, index) => {
return nextLabelHierarchy.slice(parseInt(index ?? '-1'))[0];
}));
},
subInfo.children,
));
});
}
}
});
return tree;
}
function addQuickReplyBar() {
$('#quickReplyBar').remove();
let quickReplyButtonHtml = '';
for (let i = 0; i < extension_settings.quickReply.numberOfSlots; i++) {
let quickReplyMes = extension_settings.quickReply.quickReplySlots[i]?.mes || '';
let quickReplyLabel = extension_settings.quickReply.quickReplySlots[i]?.label || '';
quickReplyButtonHtml += `<div title="${quickReplyMes}" class="quickReplyButton" data-index="${i}" id="quickReply${i + 1}">${quickReplyLabel}</div>`;
const qr = extension_settings.quickReply.quickReplySlots[i];
const quickReplyMes = qr?.mes || '';
const quickReplyLabel = qr?.label || '';
const hidden = qr?.hidden ?? false;
let expander = '';
if (extension_settings.quickReply.quickReplySlots[i]?.contextMenu?.length) {
expander = '<span class="ctx-expander" title="Open context menu">⋮</span>';
}
quickReplyButtonHtml += `<div title="${quickReplyMes}" class="quickReplyButton ${hidden ? 'displayNone' : ''}" data-index="${i}" id="quickReply${i + 1}">${quickReplyLabel}${expander}</div>`;
}
const quickReplyBarFullHtml = `
@@ -194,6 +352,27 @@ function addQuickReplyBar() {
let index = $(this).data('index');
sendQuickReply(index);
});
$('.quickReplyButton > .ctx-expander').on('click', function (evt) {
evt.stopPropagation();
let index = $(this.closest('.quickReplyButton')).data('index');
const qr = extension_settings.quickReply.quickReplySlots[index];
if (qr.contextMenu?.length) {
evt.preventDefault();
const tree = buildContextMenu(qr);
const menu = new ContextMenu(tree.children);
menu.show(evt);
}
})
$('.quickReplyButton').on('contextmenu', function (evt) {
let index = $(this).data('index');
const qr = extension_settings.quickReply.quickReplySlots[index];
if (qr.contextMenu?.length) {
evt.preventDefault();
const tree = buildContextMenu(qr);
const menu = new ContextMenu(tree.children);
menu.show(evt);
}
});
}
async function moduleWorker() {
@@ -248,6 +427,50 @@ async function saveQuickReplyPreset() {
}
}
//just a copy of save function with the name hardcoded to currently selected preset
async function updateQuickReplyPreset() {
const name = $("#quickReplyPresets").val()
if (!name) {
return;
}
const quickReplyPreset = {
name: name,
quickReplyEnabled: extension_settings.quickReply.quickReplyEnabled,
quickReplySlots: extension_settings.quickReply.quickReplySlots,
numberOfSlots: extension_settings.quickReply.numberOfSlots,
AutoInputInject: extension_settings.quickReply.AutoInputInject,
selectedPreset: name,
}
const response = await fetch('/savequickreply', {
method: 'POST',
headers: getRequestHeaders(),
body: JSON.stringify(quickReplyPreset)
});
if (response.ok) {
const quickReplyPresetIndex = presets.findIndex(x => x.name == name);
if (quickReplyPresetIndex == -1) {
presets.push(quickReplyPreset);
const option = document.createElement('option');
option.selected = true;
option.value = name;
option.innerText = name;
$('#quickReplyPresets').append(option);
}
else {
presets[quickReplyPresetIndex] = quickReplyPreset;
$(`#quickReplyPresets option[value="${name}"]`).prop('selected', true);
}
saveSettingsDebounced();
} else {
toastr.warning('Failed to save Quick Reply Preset.')
}
}
async function onQuickReplyNumberOfSlotsInput() {
const $input = $('#quickReplyNumberOfSlots');
let numberOfSlots = Number($input.val());
@@ -291,10 +514,13 @@ function generateQuickReplyElements() {
let quickReplyHtml = '';
for (let i = 1; i <= extension_settings.quickReply.numberOfSlots; i++) {
let itemNumber = i + 1
quickReplyHtml += `
<div class="flex-container alignitemsflexstart">
<div class="flex-container alignitemscenter" data-order="${i}">
<span class="drag-handle ui-sortable-handle">☰</span>
<input class="text_pole wide30p" id="quickReply${i}Label" placeholder="(Button label)">
<textarea id="quickReply${i}Mes" placeholder="(Custom message or /command)" class="text_pole widthUnset flex1" rows="2"></textarea>
<span class="menu_button menu_button_icon" id="quickReply${i}CtxButton" title="Additional options: context menu, auto-execution"></span>
<textarea id="quickReply${i}Mes" placeholder="(Custom message or /command)" class="text_pole widthUnset flex1 autoSetHeight" rows="2"></textarea>
</div>
`;
}
@@ -304,6 +530,8 @@ function generateQuickReplyElements() {
for (let i = 1; i <= extension_settings.quickReply.numberOfSlots; i++) {
$(`#quickReply${i}Mes`).on('input', function () { onQuickReplyInput(i); });
$(`#quickReply${i}Label`).on('input', function () { onQuickReplyLabelInput(i); });
$(`#quickReply${i}CtxButton`).on('click', function () { onQuickReplyCtxButtonClick(i); });
$(`#quickReplyContainer > [data-order="${i}"]`).attr('data-contextMenu', JSON.stringify(extension_settings.quickReply.quickReplySlots[i - 1]?.contextMenu ?? []));
}
$('.quickReplySettings .inline-drawer-toggle').off('click').on('click', function () {
@@ -352,6 +580,95 @@ async function doQR(_, text) {
whichQR.trigger('click')
}
function saveQROrder() {
//update html-level order data to match new sort
let i = 1
$('#quickReplyContainer').children().each(function () {
$(this).attr('data-order', i)
$(this).find('input').attr('id', `quickReply${i}Label`)
$(this).find('textarea').attr('id', `quickReply${i}Mes`)
i++
});
//rebuild the extension_Settings array based on new order
i = 1
$('#quickReplyContainer').children().each(function () {
onQuickReplyContextMenuChange(i)
onQuickReplyLabelInput(i)
onQuickReplyInput(i)
i++
});
}
/**
* Executes quick replies on message received.
* @param {number} index New message index
* @returns {Promise<void>}
*/
async function onMessageReceived(index) {
if (!extension_settings.quickReply.quickReplyEnabled) return;
for (let i = 0; i < extension_settings.quickReply.numberOfSlots; i++) {
const qr = extension_settings.quickReply.quickReplySlots[i];
if (qr?.autoExecute_botMessage) {
const message = getContext().chat[index];
if (message?.mes && message?.mes !== '...') {
await sendQuickReply(i);
}
}
}
}
/**
* Executes quick replies on message sent.
* @param {number} index New message index
* @returns {Promise<void>}
*/
async function onMessageSent(index) {
if (!extension_settings.quickReply.quickReplyEnabled) return;
for (let i = 0; i < extension_settings.quickReply.numberOfSlots; i++) {
const qr = extension_settings.quickReply.quickReplySlots[i];
if (qr?.autoExecute_userMessage) {
const message = getContext().chat[index];
if (message?.mes && message?.mes !== '...') {
await sendQuickReply(i);
}
}
}
}
/**
* Executes quick replies on chat changed.
* @param {string} chatId New chat id
* @returns {Promise<void>}
*/
async function onChatChanged(chatId) {
if (!extension_settings.quickReply.quickReplyEnabled) return;
for (let i = 0; i < extension_settings.quickReply.numberOfSlots; i++) {
const qr = extension_settings.quickReply.quickReplySlots[i];
if (qr?.autoExecute_chatLoad && chatId) {
await sendQuickReply(i);
}
}
}
/**
* Executes quick replies on app ready.
* @returns {Promise<void>}
*/
async function onAppReady() {
if (!extension_settings.quickReply.quickReplyEnabled) return;
for (let i = 0; i < extension_settings.quickReply.numberOfSlots; i++) {
const qr = extension_settings.quickReply.quickReplySlots[i];
if (qr?.autoExecute_appStartup) {
await sendQuickReply(i);
}
}
}
jQuery(async () => {
moduleWorker();
setInterval(moduleWorker, UPDATE_INTERVAL);
@@ -386,7 +703,10 @@ jQuery(async () => {
</select>
<div id="quickReplyPresetSaveButton" class="menu_button menu_button_icon">
<div class="fa-solid fa-save"></div>
<span>Save</span>
<span>Save New</span>
</div>
<div id="quickReplyPresetUpdateButton" class="menu_button menu_button_icon">
<span>Update</span>
</div>
</div>
<label for="quickReplyNumberOfSlots">Number of slots:</label>
@@ -413,6 +733,12 @@ jQuery(async () => {
$('#quickReplyEnabled').on('input', onQuickReplyEnabledInput);
$('#quickReplyNumberOfSlotsApply').on('click', onQuickReplyNumberOfSlotsInput);
$("#quickReplyPresetSaveButton").on('click', saveQuickReplyPreset);
$("#quickReplyPresetUpdateButton").on('click', updateQuickReplyPreset);
$('#quickReplyContainer').sortable({
delay: getSortableDelay(),
stop: saveQROrder,
});
$("#quickReplyPresets").on('change', async function () {
const quickReplyPresetSelected = $(this).find(':selected').val();
@@ -423,6 +749,11 @@ jQuery(async () => {
await loadSettings('init');
addQuickReplyBar();
eventSource.on(event_types.MESSAGE_RECEIVED, onMessageReceived);
eventSource.on(event_types.MESSAGE_SENT, onMessageSent);
eventSource.on(event_types.CHAT_CHANGED, onChatChanged);
eventSource.on(event_types.APP_READY, onAppReady);
});
jQuery(() => {

View File

@@ -0,0 +1,65 @@
import { MenuItem } from "./MenuItem.js";
export class ContextMenu {
/**@type {MenuItem[]}*/ itemList = [];
/**@type {Boolean}*/ isActive = false;
/**@type {HTMLElement}*/ root;
/**@type {HTMLElement}*/ menu;
constructor(/**@type {MenuItem[]}*/items) {
this.itemList = items;
items.forEach(item => {
item.onExpand = () => {
items.filter(it => it != item)
.forEach(it => it.collapse());
};
});
}
render() {
if (!this.root) {
const blocker = document.createElement('div'); {
this.root = blocker;
blocker.classList.add('ctx-blocker');
blocker.addEventListener('click', () => this.hide());
const menu = document.createElement('ul'); {
this.menu = menu;
menu.classList.add('list-group');
menu.classList.add('ctx-menu');
this.itemList.forEach(it => menu.append(it.render()));
blocker.append(menu);
}
}
}
return this.root;
}
show({ clientX, clientY }) {
if (this.isActive) return;
this.isActive = true;
this.render();
this.menu.style.bottom = `${window.innerHeight - clientY}px`;
this.menu.style.left = `${clientX}px`;
document.body.append(this.root);
}
hide() {
if (this.root) {
this.root.remove();
}
this.isActive = false;
}
toggle(/**@type {PointerEvent}*/evt) {
if (this.isActive) {
this.hide();
} else {
this.show(evt);
}
}
}

View File

@@ -0,0 +1,20 @@
import { MenuItem } from "./MenuItem.js";
export class MenuHeader extends MenuItem {
constructor(/**@type {String}*/label) {
super(label, null, null);
}
render() {
if (!this.root) {
const item = document.createElement('li'); {
this.root = item;
item.classList.add('list-group-item');
item.classList.add('ctx-header');
item.append(this.label);
}
}
return this.root;
}
}

View File

@@ -0,0 +1,76 @@
import { SubMenu } from "./SubMenu.js";
export class MenuItem {
/**@type {String}*/ label;
/**@type {Object}*/ value;
/**@type {Function}*/ callback;
/**@type {MenuItem[]}*/ childList = [];
/**@type {SubMenu}*/ subMenu;
/**@type {Boolean}*/ isForceExpanded = false;
/**@type {HTMLElement}*/ root;
/**@type {Function}*/ onExpand;
constructor(/**@type {String}*/label, /**@type {Object}*/value, /**@type {function}*/callback, /**@type {MenuItem[]}*/children = []) {
this.label = label;
this.value = value;
this.callback = callback;
this.childList = children;
}
render() {
if (!this.root) {
const item = document.createElement('li'); {
this.root = item;
item.classList.add('list-group-item');
item.classList.add('ctx-item');
item.title = this.value;
if (this.callback) {
item.addEventListener('click', (evt) => this.callback(evt, this));
}
item.append(this.label);
if (this.childList.length > 0) {
item.classList.add('ctx-has-children');
const sub = new SubMenu(this.childList);
this.subMenu = sub;
const trigger = document.createElement('div'); {
trigger.classList.add('ctx-expander');
trigger.textContent = '⋮';
trigger.addEventListener('click', (evt) => {
evt.stopPropagation();
this.toggle();
});
item.append(trigger);
}
item.addEventListener('mouseover', () => sub.show(item));
item.addEventListener('mouseleave', () => sub.hide());
}
}
}
return this.root;
}
expand() {
this.subMenu?.show(this.root);
if (this.onExpand) {
this.onExpand();
}
}
collapse() {
this.subMenu?.hide();
}
toggle() {
if (this.subMenu.isActive) {
this.expand();
} else {
this.collapse();
}
}
}

View File

@@ -0,0 +1,64 @@
import { MenuItem } from "./MenuItem.js";
export class SubMenu {
/**@type {MenuItem[]}*/ itemList = [];
/**@type {Boolean}*/ isActive = false;
/**@type {HTMLElement}*/ root;
constructor(/**@type {MenuItem[]}*/items) {
this.itemList = items;
}
render() {
if (!this.root) {
const menu = document.createElement('ul'); {
this.root = menu;
menu.classList.add('list-group');
menu.classList.add('ctx-menu');
menu.classList.add('ctx-sub-menu');
this.itemList.forEach(it => menu.append(it.render()));
}
}
return this.root;
}
show(/**@type {HTMLElement}*/parent) {
if (this.isActive) return;
this.isActive = true;
this.render();
parent.append(this.root);
requestAnimationFrame(() => {
const rect = this.root.getBoundingClientRect();
console.log(window.innerHeight, rect);
if (rect.bottom > window.innerHeight - 5) {
this.root.style.top = `${window.innerHeight - 5 - rect.bottom}px`;
}
if (rect.right > window.innerWidth - 5) {
this.root.style.left = 'unset';
this.root.style.right = '100%';
}
});
}
hide() {
if (this.root) {
this.root.remove();
this.root.style.top = '';
this.root.style.left = '';
}
this.isActive = false;
}
toggle(/**@type {HTMLElement}*/parent) {
if (this.isActive) {
this.hide();
} else {
this.show(parent);
}
}
}

View File

@@ -1,7 +1,9 @@
#quickReplyBar {
outline: none;
/*
padding: 5px 0;
border-bottom: 1px solid var(--SmartThemeBorderColor);
*/
margin: 0;
transition: 0.3s;
opacity: 0.7;
@@ -12,7 +14,7 @@
display: none;
max-width: 100%;
overflow-x: auto;
order: 10;
order: 1;
}
#quickReplies {
@@ -31,6 +33,7 @@
border: 1px solid var(--SmartThemeBorderColor);
border-radius: 10px;
padding: 3px 5px;
margin: 3px 0;
width: min-content;
cursor: pointer;
transition: 0.3s;
@@ -44,4 +47,61 @@
opacity: 1;
filter: brightness(1.2);
cursor: pointer;
}
}
.ctx-blocker {
/* backdrop-filter: blur(1px); */
/* background-color: rgba(0 0 0 / 10%); */
bottom: 0;
left: 0;
position: fixed;
right: 0;
top: 0;
z-index: 999;
}
.ctx-menu {
position: fixed;
overflow: visible;
}
.list-group .list-group-item.ctx-header {
font-weight: bold;
cursor: default;
}
.ctx-item+.ctx-header {
border-top: 1px solid;
}
.ctx-item {
position: relative;
}
.ctx-expander {
border-left: 1px solid;
margin-left: 1em;
text-align: center;
width: 2em;
}
.ctx-expander:hover {
font-weight: bold;
}
.ctx-sub-menu {
position: absolute;
top: 0;
left: 100%;
}
@media screen and (max-width: 1000px) {
.ctx-blocker {
position: absolute;
}
.list-group .list-group-item.ctx-item {
padding: 1em;
}
}

View File

@@ -0,0 +1,46 @@
import { getRequestHeaders } from "../../script.js";
import { extension_settings } from "../extensions.js";
import { SECRET_KEYS, secret_state } from "../secrets.js";
import { createThumbnail } from "../utils.js";
/**
* Generates a caption for an image using a multimodal model.
* @param {string} base64Img Base64 encoded image
* @param {string} prompt Prompt to use for captioning
* @returns {Promise<string>} Generated caption
*/
export async function getMultimodalCaption(base64Img, prompt) {
if (extension_settings.caption.multimodal_api === 'openai' && !secret_state[SECRET_KEYS.OPENAI]) {
throw new Error('OpenAI API key is not set.');
}
if (extension_settings.caption.multimodal_api === 'openrouter' && !secret_state[SECRET_KEYS.OPENROUTER]) {
throw new Error('OpenRouter API key is not set.');
}
// OpenRouter has a payload limit of ~2MB
const base64Bytes = base64Img.length * 0.75;
const compressionLimit = 2 * 1024 * 1024;
if (extension_settings.caption.multimodal_api === 'openrouter' && base64Bytes > compressionLimit) {
const maxSide = 1024;
base64Img = await createThumbnail(base64Img, maxSide, maxSide, 'image/jpeg');
}
const apiResult = await fetch('/api/openai/caption-image', {
method: 'POST',
headers: getRequestHeaders(),
body: JSON.stringify({
image: base64Img,
prompt: prompt,
api: extension_settings.caption.multimodal_api || 'openai',
model: extension_settings.caption.multimodal_model || 'gpt-4-vision-preview',
}),
});
if (!apiResult.ok) {
throw new Error('Failed to caption image via OpenAI.');
}
const { caption } = await apiResult.json();
return caption;
}

View File

@@ -0,0 +1,31 @@
<div id="sd_comfy_workflow_editor_template">
<div class="sd_comfy_workflow_editor">
<h3><strong>ComfyUI Workflow Editor: <span id="sd_comfy_workflow_editor_name"></span></strong></h3>
<div class="sd_comfy_workflow_editor_content">
<div class="flex-container flexFlowColumn sd_comfy_workflow_editor_workflow_container">
<label for="sd_comfy_workflow_editor_workflow">Workflow (JSON)</label>
<textarea id="sd_comfy_workflow_editor_workflow" class="text_pole wide100p textarea_compact flex1" placeholder="Put the ComfyUI's workflow (JSON) here and replace the variable settings with placeholders."></textarea>
</div>
<div class="sd_comfy_workflow_editor_placeholder_container">
<div>Placeholders</div>
<ul class="sd_comfy_workflow_editor_placeholder_list">
<li data-placeholder="prompt" class="sd_comfy_workflow_editor_not_found">"%prompt%"</li>
<li data-placeholder="negative_prompt" class="sd_comfy_workflow_editor_not_found">"%negative_prompt%"</li>
<li data-placeholder="model" class="sd_comfy_workflow_editor_not_found">"%model%"</li>
<li data-placeholder="vae" class="sd_comfy_workflow_editor_not_found">"%vae%"</li>
<li data-placeholder="sampler" class="sd_comfy_workflow_editor_not_found">"%sampler%"</li>
<li data-placeholder="scheduler" class="sd_comfy_workflow_editor_not_found">"%scheduler%"</li>
<li data-placeholder="steps" class="sd_comfy_workflow_editor_not_found">"%steps%"</li>
<li data-placeholder="scale" class="sd_comfy_workflow_editor_not_found">"%scale%"</li>
<li data-placeholder="width" class="sd_comfy_workflow_editor_not_found">"%width%"</li>
<li data-placeholder="height" class="sd_comfy_workflow_editor_not_found">"%height%"</li>
<li><hr></li>
<li data-placeholder="seed" class="sd_comfy_workflow_editor_not_found">
"%seed%"
<a href="javascript:;" class="notes-link"><span class="note-link-span" title="Will generate a new random seed in SillyTavern that is then used in the ComfyUI workflow.">?</span></a>
</li>
</ul>
</div>
</div>
</div>
</div>

View File

@@ -7,17 +7,23 @@ import {
getRequestHeaders,
event_types,
eventSource,
appendImageToMessage,
generateQuietPrompt,
this_chid,
getCurrentChatId,
animation_duration,
appendMediaToMessage,
getUserAvatar,
user_avatar,
getCharacterAvatar,
formatCharacterAvatar,
} from "../../../script.js";
import { getApiUrl, getContext, extension_settings, doExtrasFetch, modules, renderExtensionTemplate } from "../../extensions.js";
import { selected_group } from "../../group-chats.js";
import { stringFormat, initScrollHeight, resetScrollHeight, getCharaFilename, saveBase64AsFile } from "../../utils.js";
import { stringFormat, initScrollHeight, resetScrollHeight, getCharaFilename, saveBase64AsFile, getBase64Async, delay } from "../../utils.js";
import { getMessageTimeStamp, humanizedDateTime } from "../../RossAscends-mods.js";
import { SECRET_KEYS, secret_state } from "../../secrets.js";
import { getNovelUnlimitedImageGeneration, getNovelAnlas, loadNovelSubscriptionData } from "../../nai-settings.js";
import { getMultimodalCaption } from "../shared.js";
export { MODULE_NAME };
// Wraps a string into monospace font-face span
@@ -37,6 +43,7 @@ const sources = {
novel: 'novel',
vlad: 'vlad',
openai: 'openai',
comfy: 'comfy',
}
const generationMode = {
@@ -48,6 +55,15 @@ const generationMode = {
FACE: 5,
FREE: 6,
BACKGROUND: 7,
CHARACTER_MULTIMODAL: 8,
USER_MULTIMODAL: 9,
FACE_MULTIMODAL: 10,
}
const multimodalMap = {
[generationMode.CHARACTER]: generationMode.CHARACTER_MULTIMODAL,
[generationMode.USER]: generationMode.USER_MULTIMODAL,
[generationMode.FACE]: generationMode.FACE_MULTIMODAL,
}
const modeLabels = {
@@ -58,6 +74,9 @@ const modeLabels = {
[generationMode.NOW]: 'Last Message',
[generationMode.RAW_LAST]: 'Raw Last Message',
[generationMode.BACKGROUND]: 'Background',
[generationMode.CHARACTER_MULTIMODAL]: 'Character (Multimodal Mode)',
[generationMode.FACE_MULTIMODAL]: 'Portrait (Multimodal Mode)',
[generationMode.USER_MULTIMODAL]: 'User (Multimodal Mode)',
}
const triggerWords = {
@@ -71,7 +90,7 @@ const triggerWords = {
}
const messageTrigger = {
activationRegex: /\b(send|mail|imagine|generate|make|create|draw|paint|render)\b.*\b(pic|picture|image|drawing|painting|photo|photograph)\b(?:\s+of)?(?:\s+(?:a|an|the)?)?(.+)/i,
activationRegex: /\b(send|mail|imagine|generate|make|create|draw|paint|render)\b.*\b(pic|picture|image|drawing|painting|photo|photograph)\b(?:\s+of)?(?:\s+(?:a|an|the|this|that|those)?)?(.+)/i,
specialCases: {
[generationMode.CHARACTER]: ['you', 'yourself'],
[generationMode.USER]: ['me', 'myself'],
@@ -117,6 +136,9 @@ const promptTemplates = {
[generationMode.RAW_LAST]: "[Pause your roleplay and provide ONLY the last chat message string back to me verbatim. Do not write anything after the string. Do not roleplay at all in your response. Do not continue the roleplay story.]",
[generationMode.BACKGROUND]: "[Pause your roleplay and provide a detailed description of {{char}}'s surroundings in the form of a comma-delimited list of keywords and phrases. The list must include all of the following items in this order: location, time of day, weather, lighting, and any other relevant details. Do not include descriptions of characters and non-visual qualities such as names, personality, movements, scents, mental traits, or anything which could not be seen in a still photograph. Do not write in full sentences. Prefix your description with the phrase 'background,'. Ignore the rest of the story when crafting this description. Do not roleplay as {{user}} when writing this description, and do not attempt to continue the story.]",
[generationMode.FACE_MULTIMODAL]: `Provide an exhaustive comma-separated list of tags describing the appearance of the character on this image in great detail. Start with "close-up portrait".`,
[generationMode.CHARACTER_MULTIMODAL]: `Provide an exhaustive comma-separated list of tags describing the appearance of the character on this image in great detail. Start with "full body portrait".`,
[generationMode.USER_MULTIMODAL]: `Provide an exhaustive comma-separated list of tags describing the appearance of the character on this image in great detail. Start with "full body portrait".`,
}
const helpString = [
@@ -150,6 +172,9 @@ const defaultSettings = {
steps_step: 1,
steps: 20,
// Scheduler
scheduler: 'normal',
// Image dimensions (Width & Height)
dimension_min: 64,
dimension_max: 2048,
@@ -161,6 +186,7 @@ const defaultSettings = {
negative_prompt: defaultNegative,
sampler: 'DDIM',
model: '',
vae: '',
// Automatic1111/Horde exclusives
restore_faces: false,
@@ -176,6 +202,7 @@ const defaultSettings = {
refine_mode: false,
expand: false,
interactive_mode: false,
multimodal_captioning: false,
prompts: promptTemplates,
@@ -213,6 +240,10 @@ const defaultSettings = {
style: 'Default',
styles: defaultStyles,
// ComyUI settings
comfy_url: 'http://127.0.0.1:8188',
comfy_workflow: 'Default_Comfy_Workflow.json',
}
function processTriggers(chat, _, abort) {
@@ -251,12 +282,12 @@ function processTriggers(chat, _, abort) {
console.log(`SD: Triggered by "${message}", detected subject: ${subject}"`);
for (const [specialMode, triggers] of Object.entries(messageTrigger.specialCases)) {
outer: for (const [specialMode, triggers] of Object.entries(messageTrigger.specialCases)) {
for (const trigger of triggers) {
if (subject === trigger) {
subject = triggerWords[specialMode][0];
console.log(`SD: Detected special case "${trigger}", switching to mode ${specialMode}`);
break;
break outer;
}
}
}
@@ -341,6 +372,7 @@ async function loadSettings() {
$('#sd_enable_hr').prop('checked', extension_settings.sd.enable_hr);
$('#sd_refine_mode').prop('checked', extension_settings.sd.refine_mode);
$('#sd_expand').prop('checked', extension_settings.sd.expand);
$('#sd_multimodal_captioning').prop('checked', extension_settings.sd.multimodal_captioning);
$('#sd_auto_url').val(extension_settings.sd.auto_url);
$('#sd_auto_auth').val(extension_settings.sd.auto_auth);
$('#sd_vlad_url').val(extension_settings.sd.vlad_url);
@@ -348,6 +380,8 @@ async function loadSettings() {
$('#sd_interactive_mode').prop('checked', extension_settings.sd.interactive_mode);
$('#sd_openai_style').val(extension_settings.sd.openai_style);
$('#sd_openai_quality').val(extension_settings.sd.openai_quality);
$('#sd_comfy_url').val(extension_settings.sd.comfy_url);
$('#sd_comfy_prompt').val(extension_settings.sd.comfy_prompt);
for (const style of extension_settings.sd.styles) {
const option = document.createElement('option');
@@ -357,10 +391,39 @@ async function loadSettings() {
$('#sd_style').append(option);
}
// Find a closest resolution option match for the current width and height
let resolutionId = null, minAspectDiff = Infinity, minResolutionDiff = Infinity;
for (const [id, resolution] of Object.entries(resolutionOptions)) {
const aspectDiff = Math.abs((resolution.width / resolution.height) - (extension_settings.sd.width / extension_settings.sd.height));
const resolutionDiff = Math.abs(resolution.width * resolution.height - extension_settings.sd.width * extension_settings.sd.height);
if (resolutionDiff < minResolutionDiff || (resolutionDiff === minResolutionDiff && aspectDiff < minAspectDiff)) {
resolutionId = id;
minAspectDiff = aspectDiff;
minResolutionDiff = resolutionDiff;
}
if (resolutionDiff === 0 && aspectDiff === 0) {
break;
}
}
$('#sd_resolution').val(resolutionId);
toggleSourceControls();
addPromptTemplates();
await Promise.all([loadSamplers(), loadModels()]);
await loadSettingOptions();
}
async function loadSettingOptions() {
return Promise.all([
loadSamplers(),
loadModels(),
loadSchedulers(),
loadVaes(),
loadComfyWorkflows()
]);
}
function addPromptTemplates() {
@@ -400,6 +463,11 @@ function onInteractiveModeInput() {
saveSettingsDebounced();
}
function onMultimodalCaptioningInput() {
extension_settings.sd.multimodal_captioning = !!$(this).prop('checked');
saveSettingsDebounced();
}
function onStyleSelect() {
const selectedStyle = String($('#sd_style').find(':selected').val());
const styleObject = extension_settings.sd.styles.find(x => x.name === selectedStyle);
@@ -587,6 +655,46 @@ function onSamplerChange() {
saveSettingsDebounced();
}
const resolutionOptions = {
sd_res_512x512: { width: 512, height: 512, name: '512x512 (1:1, icons, profile pictures)' },
sd_res_600x600: { width: 600, height: 600, name: '600x600 (1:1, icons, profile pictures)' },
sd_res_512x768: { width: 512, height: 768, name: '512x768 (2:3, vertical character card)' },
sd_res_768x512: { width: 768, height: 512, name: '768x512 (3:2, horizontal 35-mm movie film)' },
sd_res_960x540: { width: 960, height: 540, name: '960x540 (16:9, horizontal wallpaper)' },
sd_res_540x960: { width: 540, height: 960, name: '540x960 (9:16, vertical wallpaper)' },
sd_res_1920x1088: { width: 1920, height: 1088, name: '1920x1088 (16:9, 1080p, horizontal wallpaper)' },
sd_res_1088x1920: { width: 1088, height: 1920, name: '1088x1920 (9:16, 1080p, vertical wallpaper)' },
sd_res_1280x720: { width: 1280, height: 720, name: '1280x720 (16:9, 720p, horizontal wallpaper)' },
sd_res_720x1280: { width: 720, height: 1280, name: '720x1280 (9:16, 720p, vertical wallpaper)' },
sd_res_1024x1024: { width: 1024, height: 1024, name: '1024x1024 (1:1, SDXL)' },
sd_res_1152x896: { width: 1152, height: 896, name: '1152x896 (9:7, SDXL)' },
sd_res_896x1152: { width: 896, height: 1152, name: '896x1152 (7:9, SDXL)' },
sd_res_1216x832: { width: 1216, height: 832, name: '1216x832 (19:13, SDXL)' },
sd_res_832x1216: { width: 832, height: 1216, name: '832x1216 (13:19, SDXL)' },
sd_res_1344x768: { width: 1344, height: 768, name: '1344x768 (4:3, SDXL)' },
sd_res_768x1344: { width: 768, height: 1344, name: '768x1344 (3:4, SDXL)' },
sd_res_1536x640: { width: 1536, height: 640, name: '1536x640 (24:10, SDXL)' },
sd_res_640x1536: { width: 640, height: 1536, name: '640x1536 (10:24, SDXL)' },
};
function onResolutionChange() {
const selectedOption = $("#sd_resolution").val();
const selectedResolution = resolutionOptions[selectedOption];
if (!selectedResolution) {
console.warn(`Could not find resolution option for ${selectedOption}`);
return;
}
$("#sd_height").val(selectedResolution.height).trigger('input');
$("#sd_width").val(selectedResolution.width).trigger('input');
}
function onSchedulerChange() {
extension_settings.sd.scheduler = $('#sd_scheduler').find(':selected').val();
saveSettingsDebounced();
}
function onWidthInput() {
extension_settings.sd.width = Number($('#sd_width').val());
$('#sd_width_value').text(extension_settings.sd.width);
@@ -605,7 +713,7 @@ async function onSourceChange() {
extension_settings.sd.sampler = null;
toggleSourceControls();
saveSettingsDebounced();
await Promise.all([loadModels(), loadSamplers()]);
await loadSettingOptions();
}
async function onOpenAiStyleSelect() {
@@ -711,6 +819,16 @@ function onHrSecondPassStepsInput() {
saveSettingsDebounced();
}
function onComfyUrlInput() {
extension_settings.sd.comfy_url = $('#sd_comfy_url').val();
saveSettingsDebounced();
}
function onComfyWorkflowChange() {
extension_settings.sd.comfy_workflow = $('#sd_comfy_workflow').find(':selected').val();
saveSettingsDebounced();
}
async function validateAutoUrl() {
try {
if (!extension_settings.sd.auto_url) {
@@ -727,8 +845,7 @@ async function validateAutoUrl() {
throw new Error('SD WebUI returned an error.');
}
await loadSamplers();
await loadModels();
await loadSettingOptions();
toastr.success('SD WebUI API connected.');
} catch (error) {
toastr.error(`Could not validate SD WebUI API: ${error.message}`);
@@ -751,14 +868,37 @@ async function validateVladUrl() {
throw new Error('SD.Next returned an error.');
}
await loadSamplers();
await loadModels();
await loadSettingOptions();
toastr.success('SD.Next API connected.');
} catch (error) {
toastr.error(`Could not validate SD.Next API: ${error.message}`);
}
}
async function validateComfyUrl() {
try {
if (!extension_settings.sd.comfy_url) {
throw new Error('URL is not set.');
}
const result = await fetch(`/api/sd/comfy/ping`, {
method: 'POST',
headers: getRequestHeaders(),
body: JSON.stringify({
url: extension_settings.sd.comfy_url,
})
});
if (!result.ok) {
throw new Error('ComfyUI returned an error.');
}
await loadSettingOptions();
toastr.success('ComfyUI API connected.');
} catch (error) {
toastr.error(`Could not validate ComfyUI API: ${error.message}`);
}
}
async function onModelChange() {
extension_settings.sd.model = $('#sd_model').find(':selected').val();
saveSettingsDebounced();
@@ -799,6 +939,10 @@ async function getAutoRemoteModel() {
}
}
async function onVaeChange() {
extension_settings.sd.vae = $('#sd_vae').find(':selected').val();
}
async function getAutoRemoteUpscalers() {
try {
const result = await fetch('/api/sd/upscalers', {
@@ -894,6 +1038,9 @@ async function loadSamplers() {
case sources.openai:
samplers = await loadOpenAiSamplers();
break;
case sources.comfy:
samplers = await loadComfySamplers();
break;
}
for (const sampler of samplers) {
@@ -1003,6 +1150,28 @@ async function loadNovelSamplers() {
];
}
async function loadComfySamplers() {
if (!extension_settings.sd.comfy_url) {
return [];
}
try {
const result = await fetch(`/api/sd/comfy/samplers`, {
method: 'POST',
headers: getRequestHeaders(),
body: JSON.stringify({
url: extension_settings.sd.comfy_url,
})
});
if (!result.ok) {
throw new Error('ComfyUI returned an error.');
}
return await result.json();
} catch (error) {
return [];
}
}
async function loadModels() {
$('#sd_model').empty();
let models = [];
@@ -1026,6 +1195,9 @@ async function loadModels() {
case sources.openai:
models = await loadOpenAiModels();
break;
case sources.comfy:
models = await loadComfyModels();
break;
}
for (const model of models) {
@@ -1180,6 +1352,10 @@ async function loadNovelModels() {
}
return [
{
value: 'nai-diffusion-3',
text: 'NAI Diffusion Anime V3',
},
{
value: 'nai-diffusion-2',
text: 'NAI Diffusion Anime V2',
@@ -1199,16 +1375,193 @@ async function loadNovelModels() {
];
}
async function loadComfyModels() {
if (!extension_settings.sd.comfy_url) {
return [];
}
try {
const result = await fetch(`/api/sd/comfy/models`, {
method: 'POST',
headers: getRequestHeaders(),
body: JSON.stringify({
url: extension_settings.sd.comfy_url,
})
});
if (!result.ok) {
throw new Error('ComfyUI returned an error.');
}
return await result.json();
} catch (error) {
return [];
}
}
async function loadSchedulers() {
$('#sd_scheduler').empty();
let schedulers = [];
switch (extension_settings.sd.source) {
case sources.extras:
schedulers = ['N/A'];
break;
case sources.horde:
schedulers = ['N/A'];
break;
case sources.auto:
schedulers = ['N/A'];
break;
case sources.novel:
schedulers = ['N/A'];
break;
case sources.vlad:
schedulers = ['N/A'];
break;
case sources.openai:
schedulers = ['N/A'];
break;
case sources.comfy:
schedulers = await loadComfySchedulers();
break;
}
for (const scheduler of schedulers) {
const option = document.createElement('option');
option.innerText = scheduler;
option.value = scheduler;
option.selected = scheduler === extension_settings.sd.scheduler;
$('#sd_scheduler').append(option);
}
}
async function loadComfySchedulers() {
if (!extension_settings.sd.comfy_url) {
return [];
}
try {
const result = await fetch(`/api/sd/comfy/schedulers`, {
method: 'POST',
headers: getRequestHeaders(),
body: JSON.stringify({
url: extension_settings.sd.comfy_url,
})
});
if (!result.ok) {
throw new Error('ComfyUI returned an error.');
}
return await result.json();
} catch (error) {
return [];
}
}
async function loadVaes() {
$('#sd_vae').empty();
let vaes = [];
switch (extension_settings.sd.source) {
case sources.extras:
vaes = ['N/A'];
break;
case sources.horde:
vaes = ['N/A'];
break;
case sources.auto:
vaes = ['N/A'];
break;
case sources.novel:
vaes = ['N/A'];
break;
case sources.vlad:
vaes = ['N/A'];
break;
case sources.openai:
vaes = ['N/A'];
break;
case sources.comfy:
vaes = await loadComfyVaes();
break;
}
for (const vae of vaes) {
const option = document.createElement('option');
option.innerText = vae;
option.value = vae;
option.selected = vae === extension_settings.sd.vae;
$('#sd_vae').append(option);
}
}
async function loadComfyVaes() {
if (!extension_settings.sd.comfy_url) {
return [];
}
try {
const result = await fetch(`/api/sd/comfy/vaes`, {
method: 'POST',
headers: getRequestHeaders(),
body: JSON.stringify({
url: extension_settings.sd.comfy_url,
})
});
if (!result.ok) {
throw new Error('ComfyUI returned an error.');
}
return await result.json();
} catch (error) {
return [];
}
}
async function loadComfyWorkflows() {
if (!extension_settings.sd.comfy_url) {
return;
}
try {
$('#sd_comfy_workflow').empty();
const result = await fetch(`/api/sd/comfy/workflows`, {
method: 'POST',
headers: getRequestHeaders(),
body: JSON.stringify({
url: extension_settings.sd.comfy_url,
})
});
if (!result.ok) {
throw new Error('ComfyUI returned an error.');
}
const workflows = await result.json();
for (const workflow of workflows) {
const option = document.createElement('option');
option.innerText = workflow;
option.value = workflow;
option.selected = workflow === extension_settings.sd.comfy_workflow;
$('#sd_comfy_workflow').append(option);
}
} catch (error) {
return;
}
}
function getGenerationType(prompt) {
let mode = generationMode.FREE;
for (const [key, values] of Object.entries(triggerWords)) {
for (const value of values) {
if (value.toLowerCase() === prompt.toLowerCase().trim()) {
return Number(key);
mode = Number(key);
break;
}
}
}
return generationMode.FREE;
if (extension_settings.sd.multimodal_captioning && multimodalMap[mode] !== undefined) {
mode = multimodalMap[mode];
}
return mode;
}
function getQuietPrompt(mode, trigger) {
@@ -1279,7 +1632,7 @@ async function generatePicture(_, trigger, message, callback) {
trigger = trigger.trim();
const generationType = getGenerationType(trigger);
console.log('Generation mode', generationType, 'triggered with', trigger);
const quiet_prompt = getQuietPrompt(generationType, trigger);
const quietPrompt = getQuietPrompt(generationType, trigger);
const context = getContext();
// if context.characterId is not null, then we get context.characters[context.characterId].avatar, else we get groupId and context.groups[groupId].id
@@ -1303,7 +1656,7 @@ async function generatePicture(_, trigger, message, callback) {
const dimensions = setTypeSpecificDimensions(generationType);
try {
const prompt = await getPrompt(generationType, message, trigger, quiet_prompt);
const prompt = await getPrompt(generationType, message, trigger, quietPrompt);
console.log('Processed image prompt:', prompt);
context.deactivateSendButtons();
@@ -1348,7 +1701,7 @@ function restoreOriginalDimensions(savedParams) {
extension_settings.sd.width = savedParams.width;
}
async function getPrompt(generationType, message, trigger, quiet_prompt) {
async function getPrompt(generationType, message, trigger, quietPrompt) {
let prompt;
switch (generationType) {
@@ -1358,8 +1711,13 @@ async function getPrompt(generationType, message, trigger, quiet_prompt) {
case generationMode.FREE:
prompt = trigger.trim();
break;
case generationMode.FACE_MULTIMODAL:
case generationMode.CHARACTER_MULTIMODAL:
case generationMode.USER_MULTIMODAL:
prompt = await generateMultimodalPrompt(generationType, quietPrompt);
break;
default:
prompt = await generatePrompt(quiet_prompt);
prompt = await generatePrompt(quietPrompt);
break;
}
@@ -1370,8 +1728,57 @@ async function getPrompt(generationType, message, trigger, quiet_prompt) {
return prompt;
}
async function generatePrompt(quiet_prompt) {
const reply = await generateQuietPrompt(quiet_prompt, false, false);
/**
* Generates a prompt using multimodal captioning.
* @param {number} generationType - The type of image generation to perform.
* @param {string} quietPrompt - The prompt to use for the image generation.
*/
async function generateMultimodalPrompt(generationType, quietPrompt) {
let avatarUrl;
if (generationType == generationMode.USER_MULTIMODAL) {
avatarUrl = getUserAvatar(user_avatar);
}
if (generationType == generationMode.CHARACTER_MULTIMODAL || generationType === generationMode.FACE_MULTIMODAL) {
const context = getContext();
if (context.groupId) {
const groupMembers = context.groups.find(x => x.id === context.groupId)?.members;
const lastMessageAvatar = context.chat?.filter(x => !x.is_system && !x.is_user)?.slice(-1)[0]?.original_avatar;
const randomMemberAvatar = Array.isArray(groupMembers) ? groupMembers[Math.floor(Math.random() * groupMembers.length)]?.avatar : null;
const avatarToUse = lastMessageAvatar || randomMemberAvatar;
avatarUrl = formatCharacterAvatar(avatarToUse);
} else {
avatarUrl = getCharacterAvatar(context.characterId);
}
}
const response = await fetch(avatarUrl);
if (!response.ok) {
throw new Error('Could not fetch avatar image.');
}
const avatarBlob = await response.blob();
const avatarBase64 = await getBase64Async(avatarBlob);
const caption = await getMultimodalCaption(avatarBase64, quietPrompt);
if (!caption) {
throw new Error('Multimodal captioning failed.');
}
return caption;
}
/**
* Generates a prompt using the main LLM API.
* @param {string} quietPrompt - The prompt to use for the image generation.
* @returns {Promise<string>} - A promise that resolves when the prompt generation completes.
*/
async function generatePrompt(quietPrompt) {
const reply = await generateQuietPrompt(quietPrompt, false, false);
return processReply(reply);
}
@@ -1405,6 +1812,9 @@ async function sendGenerationRequest(generationType, prompt, characterName = nul
case sources.openai:
result = await generateOpenAiImage(prefixedPrompt);
break;
case sources.comfy:
result = await generateComfyImage(prefixedPrompt);
break;
}
if (!result.data) {
@@ -1689,6 +2099,142 @@ async function generateOpenAiImage(prompt) {
}
}
/**
* Generates an image in ComfyUI using the provided prompt and configuration settings.
*
* @param {string} prompt - The main instruction used to guide the image generation.
* @returns {Promise<{format: string, data: string}>} - A promise that resolves when the image generation and processing are complete.
*/
async function generateComfyImage(prompt) {
const placeholders = [
'negative_prompt',
'model',
'vae',
'sampler',
'scheduler',
'steps',
'scale',
'width',
'height',
];
const workflowResponse = await fetch('/api/sd/comfy/workflow', {
method: 'POST',
headers: getRequestHeaders(),
body: JSON.stringify({
file_name: extension_settings.sd.comfy_workflow,
}),
});
if (!workflowResponse.ok) {
const text = await workflowResponse.text();
toastr.error(`Failed to load workflow.\n\n${text}`);
}
let workflow = (await workflowResponse.json()).replace('"%prompt%"', JSON.stringify(prompt));
workflow = workflow.replace('"%seed%"', JSON.stringify(Math.round(Math.random() * Number.MAX_SAFE_INTEGER)));
placeholders.forEach(ph => {
workflow = workflow.replace(`"%${ph}%"`, JSON.stringify(extension_settings.sd[ph]));
});
console.log(`{
"prompt": ${workflow}
}`);
const promptResult = await fetch(`/api/sd/comfy/generate`, {
method: 'POST',
headers: getRequestHeaders(),
body: JSON.stringify({
url: extension_settings.sd.comfy_url,
prompt: `{
"prompt": ${workflow}
}`,
})
});
return { format: 'png', data: await promptResult.text() };
}
async function onComfyOpenWorkflowEditorClick() {
let workflow = await (await fetch(`/api/sd/comfy/workflow`, {
method: 'POST',
headers: getRequestHeaders(),
body: JSON.stringify({
file_name: extension_settings.sd.comfy_workflow,
}),
})).json();
const editorHtml = $(await $.get('scripts/extensions/stable-diffusion/comfyWorkflowEditor.html'));
const popupResult = callPopup(editorHtml, "confirm", undefined, { okButton: "Save", wide: true, large: true, rows: 1 });
const checkPlaceholders = () => {
workflow = $('#sd_comfy_workflow_editor_workflow').val().toString();
$('.sd_comfy_workflow_editor_placeholder_list > li[data-placeholder]').each(function (idx) {
const key = this.getAttribute('data-placeholder');
const found = workflow.search(`"%${key}%"`) != -1;
this.classList[found ? 'remove' : 'add']('sd_comfy_workflow_editor_not_found');
});
};
$('#sd_comfy_workflow_editor_name').text(extension_settings.sd.comfy_workflow);
$('#sd_comfy_workflow_editor_workflow').val(workflow);
checkPlaceholders();
$('#sd_comfy_workflow_editor_workflow').on('input', checkPlaceholders);
if (await popupResult) {
const response = await fetch(`/api/sd/comfy/save-workflow`, {
method: 'POST',
headers: getRequestHeaders(),
body: JSON.stringify({
file_name: extension_settings.sd.comfy_workflow,
workflow: $('#sd_comfy_workflow_editor_workflow').val().toString(),
}),
});
if (!response.ok) {
const text = await response.text();
toastr.error(`Failed to save workflow.\n\n${text}`);
}
}
}
async function onComfyNewWorkflowClick() {
let name = await callPopup('<h3>Workflow name:</h3>', 'input');
if (!name) {
return;
}
if (!name.toLowerCase().endsWith('.json')) {
name += '.json';
}
extension_settings.sd.comfy_workflow = name;
const response = await fetch(`/api/sd/comfy/save-workflow`, {
method: 'POST',
headers: getRequestHeaders(),
body: JSON.stringify({
file_name: extension_settings.sd.comfy_workflow,
workflow: '',
}),
});
if (!response.ok) {
const text = await response.text();
toastr.error(`Failed to save workflow.\n\n${text}`);
}
saveSettingsDebounced();
await loadComfyWorkflows();
await delay(200);
await onComfyOpenWorkflowEditorClick();
}
async function onComfyDeleteWorkflowClick() {
const confirm = await callPopup('Delete the workflow? This action is irreversible.', 'confirm');
if (!confirm) {
return;
}
const response = await fetch('/api/sd/comfy/delete-workflow', {
method: 'POST',
headers: getRequestHeaders(),
body: JSON.stringify({
file_name: extension_settings.sd.comfy_workflow,
}),
});
if (!response.ok) {
const text = await response.text();
toastr.error(`Failed to save workflow.\n\n${text}`);
}
await loadComfyWorkflows();
onComfyWorkflowChange();
}
async function sendMessage(prompt, image, generationType) {
const context = getContext();
const messageText = `[${context.name2} sends a picture that contains: ${prompt}]`;
@@ -1760,10 +2306,10 @@ function addSDGenButtons() {
if (target.is(button) && !dropdown.is(":visible") && $("#send_but").is(":visible")) {
e.preventDefault();
dropdown.fadeIn(250);
dropdown.fadeIn(animation_duration);
popper.update();
} else {
dropdown.fadeOut(250);
dropdown.fadeOut(animation_duration);
}
});
}
@@ -1782,6 +2328,8 @@ function isValidState() {
return secret_state[SECRET_KEYS.NOVEL];
case sources.openai:
return secret_state[SECRET_KEYS.OPENAI];
case sources.comfy:
return true;
}
}
@@ -1861,7 +2409,7 @@ async function sdMessageButton(e) {
message.extra.image = image;
message.extra.title = prompt;
message.extra.generationType = generationType;
appendImageToMessage(message, $mes);
appendMediaToMessage(message, $mes);
context.saveChat();
}
@@ -1895,7 +2443,10 @@ jQuery(async () => {
$('#sd_scale').on('input', onScaleInput);
$('#sd_steps').on('input', onStepsInput);
$('#sd_model').on('change', onModelChange);
$('#sd_vae').on('change', onVaeChange);
$('#sd_sampler').on('change', onSamplerChange);
$('#sd_resolution').on('change', onResolutionChange);
$('#sd_scheduler').on('change', onSchedulerChange);
$('#sd_prompt_prefix').on('input', onPromptPrefixInput);
$('#sd_negative_prompt').on('input', onNegativePromptInput);
$('#sd_width').on('input', onWidthInput);
@@ -1920,6 +2471,12 @@ jQuery(async () => {
$('#sd_novel_upscale_ratio').on('input', onNovelUpscaleRatioInput);
$('#sd_novel_anlas_guard').on('input', onNovelAnlasGuardInput);
$('#sd_novel_view_anlas').on('click', onViewAnlasClick);
$('#sd_comfy_validate').on('click', validateComfyUrl);
$('#sd_comfy_url').on('input', onComfyUrlInput);
$('#sd_comfy_workflow').on('change', onComfyWorkflowChange);
$('#sd_comfy_open_workflow_editor').on('click', onComfyOpenWorkflowEditorClick);
$('#sd_comfy_new_workflow').on('click', onComfyNewWorkflowClick);
$('#sd_comfy_delete_workflow').on('click', onComfyDeleteWorkflowClick);
$('#sd_expand').on('input', onExpandInput);
$('#sd_style').on('change', onStyleSelect);
$('#sd_save_style').on('click', onSaveStyleClick);
@@ -1927,6 +2484,7 @@ jQuery(async () => {
$('#sd_interactive_mode').on('input', onInteractiveModeInput);
$('#sd_openai_style').on('change', onOpenAiStyleSelect);
$('#sd_openai_quality').on('change', onOpenAiQualitySelect);
$('#sd_multimodal_captioning').on('input', onMultimodalCaptioningInput);
$('.sd_settings .inline-drawer-toggle').on('click', function () {
initScrollHeight($("#sd_prompt_prefix"));
@@ -1934,8 +2492,15 @@ jQuery(async () => {
initScrollHeight($("#sd_character_prompt"));
})
for (const [key, value] of Object.entries(resolutionOptions)) {
const option = document.createElement('option');
option.value = key;
option.text = value.name;
$('#sd_resolution').append(option);
}
eventSource.on(event_types.EXTRAS_CONNECTED, async () => {
await Promise.all([loadSamplers(), loadModels()]);
await loadSettingOptions();
});
eventSource.on(event_types.CHAT_CHANGED, onChatChanged);

View File

@@ -18,6 +18,10 @@
<input id="sd_interactive_mode" type="checkbox" />
Interactive mode
</label>
<label for="sd_multimodal_captioning" class="checkbox_label" title="Use multimodal captioning to generate prompts for user and character portraits based on their avatars.">
<input id="sd_multimodal_captioning" type="checkbox" />
Use multimodal captioning for portraits
</label>
<label for="sd_expand" class="checkbox_label" title="Automatically extend prompts using text generation model">
<input id="sd_expand" type="checkbox" />
Auto-enhance prompts
@@ -30,6 +34,7 @@
<option value="vlad">SD.Next (vladmandic)</option>
<option value="novel">NovelAI Diffusion</option>
<option value="openai">OpenAI (DALL-E)</option>
<option value="comfy">ComfyUI</option>
</select>
<div data-sd-source="auto">
<label for="sd_auto_url">SD Web UI URL</label>
@@ -112,6 +117,32 @@
</select>
</div>
</div>
<div data-sd-source="comfy">
<label for="sd_comfy_url">ComfyUI URL</label>
<div class="flex-container flexnowrap">
<input id="sd_comfy_url" type="text" class="text_pole" placeholder="Example: {{comfy_url}}" value="{{comfy_url}}" />
<div id="sd_comfy_validate" class="menu_button menu_button_icon">
<i class="fa-solid fa-check"></i>
<span data-i18n="Connect">
Connect
</span>
</div>
</div>
<p><i><b>Important:</b> The server must be accessible from the SillyTavern host machine.</i></p>
<label for="sd_comfy_workflow">ComfyUI Workflow</label>
<div class="flex-container flexnowrap">
<select id="sd_comfy_workflow" class="flex1 text_pole"></select>
<div id="sd_comfy_open_workflow_editor" class="menu_button menu_button_icon" title="Open workflow editor">
<i class="fa-solid fa-pen-to-square"></i>
</div>
<div id="sd_comfy_new_workflow" class="menu_button menu_button_icon" title="Create new workflow">
<i class="fa-solid fa-plus"></i>
</div>
<div id="sd_comfy_delete_workflow" class="menu_button menu_button_icon" title="Delete workflow">
<i class="fa-solid fa-trash-can"></i>
</div>
</div>
</div>
<label for="sd_scale">CFG Scale (<span id="sd_scale_value"></span>)</label>
<input id="sd_scale" type="range" min="{{scale_min}}" max="{{scale_max}}" step="{{scale_step}}" value="{{scale}}" />
<label for="sd_steps">Sampling steps (<span id="sd_steps_value"></span>)</label>
@@ -124,6 +155,16 @@
<select id="sd_model"></select>
<label for="sd_sampler">Sampling method</label>
<select id="sd_sampler"></select>
<label for="sd_resolution">Resolution</label>
<select id="sd_resolution"><!-- Populated in JS --></select>
<div data-sd-source="comfy">
<label for="sd_scheduler">Scheduler</label>
<select id="sd_scheduler"></select>
</div>
<div data-sd-source="comfy">
<label for="sd_vae">VAE</label>
<select id="sd_vae"></select>
</div>
<div class="flex-container marginTop10 margin-bot-10px">
<label class="flex1 checkbox_label">
<input id="sd_restore_faces" type="checkbox" />

View File

@@ -27,3 +27,58 @@
z-index: 30000;
backdrop-filter: blur(--SmartThemeBlurStrength);
}
#sd_comfy_open_workflow_editor {
display: flex;
flex-direction: row;
gap: 10px;
width: fit-content;
}
#sd_comfy_workflow_editor_template {
height: 100%;
}
.sd_comfy_workflow_editor {
display: flex;
flex-direction: column;
height: 100%;
}
.sd_comfy_workflow_editor_content {
display: flex;
flex: 1 1 auto;
flex-direction: row;
}
.sd_comfy_workflow_editor_workflow_container {
flex: 1 1 auto;
}
#sd_comfy_workflow_editor_workflow {
font-family: monospace;
}
.sd_comfy_workflow_editor_placeholder_container {
flex: 0 0 auto;
}
.sd_comfy_workflow_editor_placeholder_list {
font-size: x-small;
list-style: none;
margin: 5px 0;
padding: 3px 5px;
text-align: left;
}
.sd_comfy_workflow_editor_placeholder_list>li[data-placeholder]:before {
content: "✅ ";
}
.sd_comfy_workflow_editor_placeholder_list>li.sd_comfy_workflow_editor_not_found:before {
content: "❌ ";
}
.sd_comfy_workflow_editor_placeholder_list>li>.notes-link {
cursor: help;
}

View File

@@ -45,6 +45,8 @@ class ElevenLabsTtsProvider {
this.settings.stability = $('#elevenlabs_tts_stability').val()
this.settings.similarity_boost = $('#elevenlabs_tts_similarity_boost').val()
this.settings.model = $('#elevenlabs_tts_model').find(':selected').val()
$('#elevenlabs_tts_stability_output').text(this.settings.stability);
$('#elevenlabs_tts_similarity_boost_output').text(this.settings.similarity_boost);
saveTtsProviderSettings()
}
@@ -79,6 +81,8 @@ class ElevenLabsTtsProvider {
$('#elevenlabs_tts_similarity_boost').on('input', this.onSettingsChange.bind(this))
$('#elevenlabs_tts_stability').on('input', this.onSettingsChange.bind(this))
$('#elevenlabs_tts_model').on('change', this.onSettingsChange.bind(this))
$('#elevenlabs_tts_stability_output').text(this.settings.stability);
$('#elevenlabs_tts_similarity_boost_output').text(this.settings.similarity_boost);
try {
await this.checkReady()

View File

@@ -9,6 +9,8 @@ import { SystemTtsProvider } from './system.js'
import { NovelTtsProvider } from './novel.js'
import { power_user } from '../../power-user.js'
import { registerSlashCommand } from '../../slash-commands.js'
import { OpenAITtsProvider } from './openai.js'
import {XTTSTtsProvider} from "./xtts.js"
export { talkingAnimation };
const UPDATE_INTERVAL = 1000
@@ -69,10 +71,12 @@ export function getPreviewString(lang) {
let ttsProviders = {
ElevenLabs: ElevenLabsTtsProvider,
Silero: SileroTtsProvider,
XTTSv2: XTTSTtsProvider,
System: SystemTtsProvider,
Coqui: CoquiTtsProvider,
Edge: EdgeTtsProvider,
Novel: NovelTtsProvider,
OpenAI: OpenAITtsProvider,
}
let ttsProvider
let ttsProviderName
@@ -165,42 +169,43 @@ async function moduleWorker() {
}
// take the count of messages
let lastMessageNumber = context.chat.length ? context.chat.length : 0
let lastMessageNumber = context.chat.length ? context.chat.length : 0;
// There's no new messages
let diff = lastMessageNumber - currentMessageNumber
let hashNew = getStringHash((chat.length && chat[chat.length - 1].mes) ?? '')
let diff = lastMessageNumber - currentMessageNumber;
let hashNew = getStringHash((chat.length && chat[chat.length - 1].mes) ?? '');
// if messages got deleted, diff will be < 0
if (diff < 0) {
// necessary actions will be taken by the onChatDeleted() handler
return
return;
}
// if no new messages, or same message, or same message hash, do nothing
if (diff == 0 && hashNew === lastMessageHash) {
return
return;
}
// If streaming, wait for streaming to finish before processing new messages
if (context.streamingProcessor && !context.streamingProcessor.isFinished) {
return;
}
// clone message object, as things go haywire if message object is altered below (it's passed by reference)
const message = structuredClone(chat[chat.length - 1])
const message = structuredClone(chat[chat.length - 1]);
// if last message within current message, message got extended. only send diff to TTS.
if (ttsLastMessage !== null && message.mes.indexOf(ttsLastMessage) !== -1) {
let tmp = message.mes
message.mes = message.mes.replace(ttsLastMessage, '')
ttsLastMessage = tmp
let tmp = message.mes;
message.mes = message.mes.replace(ttsLastMessage, '');
ttsLastMessage = tmp;
} else {
ttsLastMessage = message.mes
ttsLastMessage = message.mes;
}
// We're currently swiping or streaming. Don't generate voice
if (
!message ||
message.mes === '...' ||
message.mes === '' ||
(context.streamingProcessor && !context.streamingProcessor.isFinished)
) {
return
// We're currently swiping. Don't generate voice
if (!message || message.mes === '...' || message.mes === '') {
return;
}
// Don't generate if message doesn't have a display text
@@ -301,6 +306,7 @@ window.debugTtsPlayback = debugTtsPlayback
//##################//
let audioElement = new Audio()
audioElement.id = 'tts_audio'
audioElement.autoplay = true
let audioJobQueue = []
@@ -451,7 +457,7 @@ let currentTtsJob // Null if nothing is currently being processed
let currentMessageNumber = 0
function completeTtsJob() {
console.info(`Current TTS job for ${currentTtsJob.name} completed.`)
console.info(`Current TTS job for ${currentTtsJob?.name} completed.`)
currentTtsJob = null
}
@@ -496,6 +502,10 @@ async function processTtsQueue() {
const partJoiner = (ttsProvider?.separator || ' ... ');
text = matches ? matches.join(partJoiner) : text;
}
// Collapse newlines and spaces into single space
text = text.replace(/\s+/g, ' ');
console.log(`TTS: ${text}`)
const char = currentTtsJob.name
@@ -987,4 +997,5 @@ $(document).ready(function () {
eventSource.on(event_types.MESSAGE_DELETED, onChatDeleted);
eventSource.on(event_types.GROUP_UPDATED, onChatChanged)
registerSlashCommand('speak', onNarrateText, ['narrate', 'tts'], `<span class="monospace">(text)</span> narrate any text using currently selected character's voice. Use voice="Character Name" argument to set other voice from the voice map, example: <tt>/speak voice="Donald Duck" Quack!</tt>`, true, true);
document.body.appendChild(audioElement);
})

View File

@@ -0,0 +1,148 @@
import { getRequestHeaders } from "../../../script.js"
import { saveTtsProviderSettings } from "./index.js";
export { OpenAITtsProvider }
class OpenAITtsProvider {
static voices = [
{ name: 'Alloy', voice_id: 'alloy', lang: 'en-US', preview_url: 'https://cdn.openai.com/API/docs/audio/alloy.wav' },
{ name: 'Echo', voice_id: 'echo', lang: 'en-US', preview_url: 'https://cdn.openai.com/API/docs/audio/echo.wav' },
{ name: 'Fable', voice_id: 'fable', lang: 'en-US', preview_url: 'https://cdn.openai.com/API/docs/audio/fable.wav' },
{ name: 'Onyx', voice_id: 'onyx', lang: 'en-US', preview_url: 'https://cdn.openai.com/API/docs/audio/onyx.wav' },
{ name: 'Nova', voice_id: 'nova', lang: 'en-US', preview_url: 'https://cdn.openai.com/API/docs/audio/nova.wav' },
{ name: 'Shimmer', voice_id: 'shimmer', lang: 'en-US', preview_url: 'https://cdn.openai.com/API/docs/audio/shimmer.wav' },
];
settings
voices = []
separator = ' . '
audioElement = document.createElement('audio')
defaultSettings = {
voiceMap: {},
customVoices: [],
model: 'tts-1',
speed: 1,
}
get settingsHtml() {
let html = `
<div>Use OpenAI's TTS engine.</div>
<small>Hint: Save an API key in the OpenAI API settings to use it here.</small>
<div>
<label for="openai-tts-model">Model:</label>
<select id="openai-tts-model">
<optgroup label="Latest">
<option value="tts-1">tts-1</option>
<option value="tts-1-hd">tts-1-hd</option>
</optgroup>
<optgroup label="Snapshots">
<option value="tts-1-1106">tts-1-1106</option>
<option value="tts-1-hd-1106">tts-1-hd-1106</option>
</optgroup>
<select>
</div>
<div>
<label for="openai-tts-speed">Speed: <span id="openai-tts-speed-output"></span></label>
<input type="range" id="openai-tts-speed" value="1" min="0.25" max="4" step="0.25">
</div>`;
return html;
}
async loadSettings(settings) {
// Populate Provider UI given input settings
if (Object.keys(settings).length == 0) {
console.info("Using default TTS Provider settings")
}
// Only accept keys defined in defaultSettings
this.settings = this.defaultSettings;
for (const key in settings) {
if (key in this.settings) {
this.settings[key] = settings[key];
} else {
throw `Invalid setting passed to TTS Provider: ${key}`;
}
}
$('#openai-tts-model').val(this.settings.model);
$('#openai-tts-model').on('change', () => {
this.onSettingsChange();
});
$('#openai-tts-speed').val(this.settings.speed);
$('#openai-tts-speed').on('input', () => {
this.onSettingsChange();
});
$('#openai-tts-speed-output').text(this.settings.speed);
await this.checkReady();
console.debug("OpenAI TTS: Settings loaded");
}
onSettingsChange() {
// Update dynamically
this.settings.model = String($('#openai-tts-model').find(':selected').val());
this.settings.speed = Number($('#openai-tts-speed').val());
$('#openai-tts-speed-output').text(this.settings.speed);
saveTtsProviderSettings();
}
async checkReady() {
await this.fetchTtsVoiceObjects();
}
async onRefreshClick() {
return;
}
async getVoice(voiceName) {
if (!voiceName) {
throw `TTS Voice name not provided`
}
const voice = OpenAITtsProvider.voices.find(voice => voice.voice_id === voiceName || voice.name === voiceName);
if (!voice) {
throw `TTS Voice not found: ${voiceName}`
}
return voice;
}
async generateTts(text, voiceId) {
const response = await this.fetchTtsGeneration(text, voiceId)
return response
}
async fetchTtsVoiceObjects() {
return OpenAITtsProvider.voices;
}
async previewTtsVoice(_) {
return;
}
async fetchTtsGeneration(inputText, voiceId) {
console.info(`Generating new TTS for voice_id ${voiceId}`)
const response = await fetch(`/api/openai/generate-voice`, {
method: 'POST',
headers: getRequestHeaders(),
body: JSON.stringify({
"text": inputText,
"voice": voiceId,
"model": this.settings.model,
"speed": this.settings.speed,
}),
});
if (!response.ok) {
toastr.error(response.statusText, 'TTS Generation Failed');
throw new Error(`HTTP ${response.status}: ${await response.text()}`);
}
return response;
}
}

View File

@@ -0,0 +1,192 @@
import { doExtrasFetch, getApiUrl, modules } from "../../extensions.js"
import { saveTtsProviderSettings } from "./index.js"
export { XTTSTtsProvider }
class XTTSTtsProvider {
//########//
// Config //
//########//
settings
ready = false
voices = []
separator = '. '
languageLabels = {
"Arabic": "ar",
"Brazilian Portuguese": "pt",
"Chinese": "zh-cn",
"Czech": "cs",
"Dutch": "nl",
"English": "en",
"French": "fr",
"German": "de",
"Italian": "it",
"Polish": "pl",
"Russian": "ru",
"Spanish": "es",
"Turkish": "tr",
"Japanese": "ja",
"Korean": "ko",
"Hungarian": "hu",
"Hindi": "hi",
}
defaultSettings = {
provider_endpoint: "http://localhost:8020",
language: "en",
voiceMap: {}
}
get settingsHtml() {
let html = `
<label for="xtts_api_language">Language</label>
<select id="xtts_api_language">`;
for (let language in this.languageLabels) {
if (this.languageLabels[language] == this.settings?.language) {
html += `<option value="${this.languageLabels[language]}" selected="selected">${language}</option>`;
continue
}
html += `<option value="${this.languageLabels[language]}">${language}</option>`;
}
html += `
</select>
<label for="xtts_tts_endpoint">Provider Endpoint:</label>
<input id="xtts_tts_endpoint" type="text" class="text_pole" maxlength="250" value="${this.defaultSettings.provider_endpoint}"/>
`;
html += `
<span>
<span>Use <a target="_blank" href="https://github.com/daswer123/xtts-api-server">XTTSv2 TTS Server</a>.</span>
`;
return html;
}
onSettingsChange() {
// Used when provider settings are updated from UI
this.settings.provider_endpoint = $('#xtts_tts_endpoint').val()
this.settings.language = $('#xtts_api_language').val()
saveTtsProviderSettings()
}
async loadSettings(settings) {
// Pupulate Provider UI given input settings
if (Object.keys(settings).length == 0) {
console.info("Using default TTS Provider settings")
}
// Only accept keys defined in defaultSettings
this.settings = this.defaultSettings
for (const key in settings) {
if (key in this.settings) {
this.settings[key] = settings[key]
} else {
throw `Invalid setting passed to TTS Provider: ${key}`
}
}
const apiCheckInterval = setInterval(() => {
// Use Extras API if TTS support is enabled
if (modules.includes('tts') || modules.includes('xtts-tts')) {
const baseUrl = new URL(getApiUrl());
baseUrl.pathname = '/api/tts';
this.settings.provider_endpoint = baseUrl.toString();
$('#xtts_tts_endpoint').val(this.settings.provider_endpoint);
clearInterval(apiCheckInterval);
}
}, 2000);
$('#xtts_tts_endpoint').val(this.settings.provider_endpoint)
$('#xtts_tts_endpoint').on("input", () => { this.onSettingsChange() })
$('#xtts_api_language').val(this.settings.language)
$('#xtts_api_language').on("change", () => { this.onSettingsChange() })
await this.checkReady()
console.debug("XTTS: Settings loaded")
}
// Perform a simple readiness check by trying to fetch voiceIds
async checkReady() {
const response = await this.fetchTtsVoiceObjects()
}
async onRefreshClick() {
return
}
//#################//
// TTS Interfaces //
//#################//
async getVoice(voiceName) {
if (this.voices.length == 0) {
this.voices = await this.fetchTtsVoiceObjects()
}
const match = this.voices.filter(
XTTSVoice => XTTSVoice.name == voiceName
)[0]
if (!match) {
throw `TTS Voice name ${voiceName} not found`
}
return match
}
async generateTts(text, voiceId) {
const response = await this.fetchTtsGeneration(text, voiceId)
return response
}
//###########//
// API CALLS //
//###########//
async fetchTtsVoiceObjects() {
const response = await doExtrasFetch(`${this.settings.provider_endpoint}/speakers`)
if (!response.ok) {
throw new Error(`HTTP ${response.status}: ${await response.json()}`)
}
const responseJson = await response.json()
return responseJson
}
async fetchTtsGeneration(inputText, voiceId) {
console.info(`Generating new TTS for voice_id ${voiceId}`)
const response = await doExtrasFetch(
`${this.settings.provider_endpoint}/tts_to_audio/`,
{
method: 'POST',
headers: {
'Content-Type': 'application/json',
'Cache-Control': 'no-cache' // Added this line to disable caching of file so new files are always played - Rolyat 7/7/23
},
body: JSON.stringify({
"text": inputText,
"speaker_wav": voiceId,
"language": this.settings.language
})
}
)
if (!response.ok) {
toastr.error(response.statusText, 'TTS Generation Failed');
throw new Error(`HTTP ${response.status}: ${await response.text()}`);
}
return response
}
// Interface not used by XTTS TTS
async fetchTtsFromHistory(history_item_id) {
return Promise.resolve(history_item_id);
}
}

View File

@@ -19,10 +19,10 @@
</select>
<div id="vectors_advanced_settings" data-newbie-hidden>
<label for="vectors_template">
Insertion template:
Insertion Template
</label>
<textarea id="vectors_template" class="text_pole textarea_compact autoSetHeight" rows="2" placeholder="Use {{text}} macro to specify the position of retrieved text."></textarea>
<label for="vectors_position">Injection position:</label>
<label for="vectors_position">Injection Position</label>
<div class="radio_group">
<label>
<input type="radio" name="vectors_position" value="2" />

View File

@@ -76,6 +76,7 @@ import { FILTER_TYPES, FilterHelper } from './filters.js';
export {
selected_group,
is_group_automode_enabled,
hideMutedSprites,
is_group_generating,
group_generation_id,
groups,
@@ -92,6 +93,7 @@ export {
let is_group_generating = false; // Group generation flag
let is_group_automode_enabled = false;
let hideMutedSprites = true;
let groups = [];
let selected_group = null;
let group_generation_id = null;
@@ -203,6 +205,69 @@ export async function getGroupChat(groupId) {
await eventSource.emit(event_types.CHAT_CHANGED, getCurrentChatId());
}
/**
* Finds the character ID for a group member.
* @param {string} arg 1-based member index or character name
* @returns {number} 0-based character ID
*/
export function findGroupMemberId(arg) {
arg = arg?.trim();
if (!arg) {
console.warn('WARN: No argument provided for findGroupMemberId');
return;
}
const group = groups.find(x => x.id == selected_group);
if (!group || !Array.isArray(group.members)) {
console.warn('WARN: No group found for selected group ID');
return;
}
// Index is 1-based
const index = parseInt(arg) - 1;
const searchByName = isNaN(index);
if (searchByName) {
const memberNames = group.members.map(x => ({ name: characters.find(y => y.avatar === x)?.name, index: characters.findIndex(y => y.avatar === x) }));
const fuse = new Fuse(memberNames, { keys: ['name'] });
const result = fuse.search(arg);
if (!result.length) {
console.warn(`WARN: No group member found with name ${arg}`);
return;
}
const chid = result[0].item.index;
if (chid === -1) {
console.warn(`WARN: No character found for group member ${arg}`);
return;
}
console.log(`Triggering group member ${chid} (${arg}) from search result`, result[0]);
return chid;
} else {
const memberAvatar = group.members[index];
if (memberAvatar === undefined) {
console.warn(`WARN: No group member found at index ${index}`);
return;
}
const chid = characters.findIndex(x => x.avatar === memberAvatar);
if (chid === -1) {
console.warn(`WARN: No character found for group member ${memberAvatar} at index ${index}`);
return;
}
console.log(`Triggering group member ${memberAvatar} at index ${index}`);
return chid;
}
}
/**
* Gets depth prompts for group members.
* @param {string} groupId Group ID
@@ -1109,7 +1174,7 @@ function printGroupCandidates() {
function printGroupMembers() {
const storageKey = 'GroupMembers_PerPage';
$(".rm_group_members_pagination").each(function() {
$(".rm_group_members_pagination").each(function () {
$(this).pagination({
dataSource: getGroupCharacters({ doFilter: false, onlyMembers: true }),
pageRange: 1,
@@ -1195,6 +1260,15 @@ async function onGroupSelfResponsesClick() {
}
}
async function onHideMutedSpritesClick(value) {
if (openGroupId) {
let _thisGroup = groups.find((x) => x.id == openGroupId);
_thisGroup.hideMutedSprites = value;
console.log(`_thisGroup.hideMutedSprites = ${_thisGroup.hideMutedSprites}`)
await editGroup(openGroupId, false, false);
}
}
function select_group_chats(groupId, skipAnimation) {
openGroupId = groupId;
newGroupMembers = [];
@@ -1224,6 +1298,7 @@ function select_group_chats(groupId, skipAnimation) {
const groupHasMembers = !!$("#rm_group_members").children().length;
$("#rm_group_submit").prop("disabled", !groupHasMembers);
$("#rm_group_allow_self_responses").prop("checked", group && group.allow_self_responses);
$("#rm_group_hidemutedsprites").prop("checked", group && group.hideMutedSprites);
// bottom buttons
if (openGroupId) {
@@ -1454,6 +1529,7 @@ async function createGroup() {
members: members,
avatar_url: isValidImageUrl(avatar_url) ? avatar_url : default_avatar,
allow_self_responses: allowSelfResponses,
hideMutedSprites: hideMutedSprites,
activation_strategy: activationStrategy,
generation_mode: generationMode,
disabled_members: [],
@@ -1721,6 +1797,12 @@ jQuery(() => {
is_group_automode_enabled = value;
eventSource.once(event_types.GENERATION_STOPPED, stopAutoModeGeneration);
});
$("#rm_group_hidemutedsprites").on("input", function () {
const value = $(this).prop("checked");
hideMutedSprites = value;
onHideMutedSpritesClick(value);
});
$("#send_textarea").on("keyup", onSendTextareaInput);
$("#groupCurrentMemberPopoutButton").on('click', doCurMemberListPopout);
$("#rm_group_chat_name").on("input", onGroupNameInput)

View File

@@ -31,8 +31,8 @@ let horde_settings = {
trusted_workers_only: false,
};
const MAX_RETRIES = 240;
const CHECK_INTERVAL = 5000;
const MAX_RETRIES = 480;
const CHECK_INTERVAL = 2500;
const MIN_LENGTH = 16;
const getRequestArgs = () => ({
method: "GET",
@@ -152,7 +152,7 @@ async function generateHorde(prompt, params, signal, reportProgress) {
for (let retryNumber = 0; retryNumber < MAX_RETRIES; retryNumber++) {
if (signal.aborted) {
await fetch(`https://horde.koboldai.net/api/v2/generate/text/status/${task_id}`, {
fetch(`https://horde.koboldai.net/api/v2/generate/text/status/${task_id}`, {
method: 'DELETE',
headers: {
"Client-Agent": CLIENT_VERSION,

View File

@@ -358,6 +358,31 @@ function selectMatchingContextTemplate(name) {
}
}
/**
* Replaces instruct mode macros in the given input string.
* @param {string} input Input string.
* @returns {string} String with macros replaced.
*/
export function replaceInstructMacros(input) {
if (!input) {
return '';
}
input = input.replace(/{{instructSystem}}/gi, power_user.instruct.enabled ? power_user.instruct.system_prompt : '');
input = input.replace(/{{instructSystemPrefix}}/gi, power_user.instruct.enabled ? power_user.instruct.system_sequence_prefix : '');
input = input.replace(/{{instructSystemSuffix}}/gi, power_user.instruct.enabled ? power_user.instruct.system_sequence_suffix : '');
input = input.replace(/{{instructInput}}/gi, power_user.instruct.enabled ? power_user.instruct.input_sequence : '');
input = input.replace(/{{instructOutput}}/gi, power_user.instruct.enabled ? power_user.instruct.output_sequence : '');
input = input.replace(/{{instructFirstOutput}}/gi, power_user.instruct.enabled ? (power_user.instruct.first_output_sequence || power_user.instruct.output_sequence) : '');
input = input.replace(/{{instructLastOutput}}/gi, power_user.instruct.enabled ? (power_user.instruct.last_output_sequence || power_user.instruct.output_sequence) : '');
input = input.replace(/{{instructSeparator}}/gi, power_user.instruct.enabled ? power_user.instruct.separator_sequence : '');
input = input.replace(/{{instructStop}}/gi, power_user.instruct.enabled ? power_user.instruct.stop_sequence : '');
input = input.replace(/{{exampleSeparator}}/gi, power_user.context.example_separator);
input = input.replace(/{{chatStart}}/gi, power_user.context.chat_start);
return input;
}
jQuery(() => {
$('#instruct_set_default').on('click', function () {
if (power_user.instruct.preset === power_user.default_instruct) {

View File

@@ -31,6 +31,11 @@ export const kai_settings = {
seed: -1,
};
/**
* Stable version of KoboldAI has a nasty payload validation.
* It will reject any payload that has a key that is not in the whitelist.
* @typedef {Object.<string, boolean>} kai_flags
*/
export const kai_flags = {
can_use_tokenization: false,
can_use_stop_sequence: false,
@@ -38,6 +43,7 @@ export const kai_flags = {
can_use_default_badwordsids: false,
can_use_mirostat: false,
can_use_grammar: false,
can_use_min_p: false,
};
const defaultValues = Object.freeze(structuredClone(kai_settings));
@@ -48,6 +54,7 @@ const MIN_STREAMING_KCPPVERSION = '1.30';
const MIN_TOKENIZATION_KCPPVERSION = '1.41';
const MIN_MIROSTAT_KCPPVERSION = '1.35';
const MIN_GRAMMAR_KCPPVERSION = '1.44';
const MIN_MIN_P_KCPPVERSION = '1.48';
const KOBOLDCPP_ORDER = [6, 0, 1, 3, 4, 2, 5];
export function formatKoboldUrl(value) {
@@ -98,6 +105,7 @@ export function loadKoboldSettings(preset) {
*/
export function getKoboldGenerationData(finalPrompt, settings, maxLength, maxContextLength, isHorde, type) {
const isImpersonate = type === 'impersonate';
const isContinue = type === 'continue';
const sampler_order = kai_settings.sampler_order || settings.sampler_order;
let generate_data = {
@@ -114,7 +122,7 @@ export function getKoboldGenerationData(finalPrompt, settings, maxLength, maxCon
top_a: kai_settings.top_a,
top_k: kai_settings.top_k,
top_p: kai_settings.top_p,
min_p: kai_settings.min_p,
min_p: (kai_flags.can_use_min_p || isHorde) ? kai_settings.min_p : undefined,
typical: kai_settings.typical,
s1: sampler_order[0],
s2: sampler_order[1],
@@ -125,14 +133,14 @@ export function getKoboldGenerationData(finalPrompt, settings, maxLength, maxCon
s7: sampler_order[6],
use_world_info: false,
singleline: false,
stop_sequence: (kai_flags.can_use_stop_sequence || isHorde) ? getStoppingStrings(isImpersonate) : undefined,
stop_sequence: (kai_flags.can_use_stop_sequence || isHorde) ? getStoppingStrings(isImpersonate, isContinue) : undefined,
streaming: kai_settings.streaming_kobold && kai_flags.can_use_streaming && type !== 'quiet',
can_abort: kai_flags.can_use_streaming,
mirostat: kai_flags.can_use_mirostat ? kai_settings.mirostat : undefined,
mirostat_tau: kai_flags.can_use_mirostat ? kai_settings.mirostat_tau : undefined,
mirostat_eta: kai_flags.can_use_mirostat ? kai_settings.mirostat_eta : undefined,
use_default_badwordsids: kai_flags.can_use_default_badwordsids ? kai_settings.use_default_badwordsids : undefined,
grammar: kai_flags.can_use_grammar ? substituteParams(kai_settings.grammar) : undefined,
mirostat: (kai_flags.can_use_mirostat || isHorde) ? kai_settings.mirostat : undefined,
mirostat_tau: (kai_flags.can_use_mirostat || isHorde) ? kai_settings.mirostat_tau : undefined,
mirostat_eta: (kai_flags.can_use_mirostat || isHorde) ? kai_settings.mirostat_eta : undefined,
use_default_badwordsids: (kai_flags.can_use_default_badwordsids || isHorde) ? kai_settings.use_default_badwordsids : undefined,
grammar: (kai_flags.can_use_grammar || isHorde) ? substituteParams(kai_settings.grammar) : undefined,
sampler_seed: kai_settings.seed >= 0 ? kai_settings.seed : undefined,
};
return generate_data;
@@ -232,8 +240,8 @@ const sliders = [
},
{
name: "typical",
sliderId: "#typical",
counterId: "#typical_counter",
sliderId: "#typical_p",
counterId: "#typical_p_counter",
format: (val) => val,
setValue: (val) => { kai_settings.typical = Number(val); },
},
@@ -302,6 +310,7 @@ export function setKoboldFlags(version, koboldVersion) {
kai_flags.can_use_default_badwordsids = canUseDefaultBadwordIds(version);
kai_flags.can_use_mirostat = canUseMirostat(koboldVersion);
kai_flags.can_use_grammar = canUseGrammar(koboldVersion);
kai_flags.can_use_min_p = canUseMinP(koboldVersion);
}
/**
@@ -366,6 +375,17 @@ function canUseGrammar(koboldVersion) {
} else return false;
}
/**
* Determines if the Kobold min_p can be used with the given version.
* @param {{result:string, version:string;}} koboldVersion KoboldAI version object.
* @returns {boolean} True if the Kobold min_p can be used, false otherwise.
*/
function canUseMinP(koboldVersion) {
if (koboldVersion && koboldVersion.result == 'KoboldCpp') {
return (koboldVersion.version || '0.0').localeCompare(MIN_MIN_P_KCPPVERSION, undefined, { numeric: true, sensitivity: 'base' }) > -1;
} else return false;
}
/**
* Sorts the sampler items by the given order.
* @param {any[]} orderArray Sampler order array.

View File

@@ -38,9 +38,13 @@ function getMancerModelTemplate(option) {
return option.text;
}
const creditsPerPrompt = (model.limits?.context - model.limits?.completion) * model.pricing?.prompt;
const creditsPerCompletion = model.limits?.completion * model.pricing?.completion;
const creditsTotal = Math.round(creditsPerPrompt + creditsPerCompletion).toFixed(0);
return $((`
<div class="flex-container flexFlowColumn">
<div><strong>${DOMPurify.sanitize(model.name)}</strong> | <span>${model.limits?.context} ctx</span></div>
<div><strong>${DOMPurify.sanitize(model.name)}</strong> | <span>${model.limits?.context} ctx</span> / <span>${model.limits?.completion} res</span> | <small>Credits per request (max): ${creditsTotal}</small></div>
</div>
`));
}

View File

@@ -4,7 +4,8 @@ import {
getStoppingStrings,
novelai_setting_names,
saveSettingsDebounced,
setGenerationParamsFromPreset
setGenerationParamsFromPreset,
substituteParams,
} from "../script.js";
import { getCfgPrompt } from "./cfg-scale.js";
import { MAX_CONTEXT_DEFAULT } from "./power-user.js";
@@ -408,7 +409,7 @@ function getBadWordPermutations(text) {
return result.filter(onlyUnique);
}
export function getNovelGenerationData(finalPrompt, this_settings, this_amount_gen, isImpersonate, cfgValues) {
export function getNovelGenerationData(finalPrompt, settings, maxLength, isImpersonate, isContinue, cfgValues) {
if (cfgValues && cfgValues.guidanceScale && cfgValues.guidanceScale?.value !== 1) {
cfgValues.negativePrompt = (getCfgPrompt(cfgValues.guidanceScale, true))?.value;
}
@@ -418,7 +419,7 @@ export function getNovelGenerationData(finalPrompt, this_settings, this_amount_g
const tokenizerType = kayra ? tokenizers.NERD2 : (clio ? tokenizers.NERD : tokenizers.NONE);
const stopSequences = (tokenizerType !== tokenizers.NONE)
? getStoppingStrings(isImpersonate)
? getStoppingStrings(isImpersonate, isContinue)
.map(t => getTextTokens(tokenizerType, t))
: undefined;
@@ -439,7 +440,7 @@ export function getNovelGenerationData(finalPrompt, this_settings, this_amount_g
"model": nai_settings.model_novel,
"use_string": true,
"temperature": Number(nai_settings.temperature),
"max_length": this_amount_gen < maximum_output_length ? this_amount_gen : maximum_output_length,
"max_length": maxLength < maximum_output_length ? maxLength : maximum_output_length,
"min_length": Number(nai_settings.min_length),
"tail_free_sampling": Number(nai_settings.tail_free_sampling),
"repetition_penalty": Number(nai_settings.repetition_penalty),
@@ -454,7 +455,7 @@ export function getNovelGenerationData(finalPrompt, this_settings, this_amount_g
"mirostat_lr": Number(nai_settings.mirostat_lr),
"mirostat_tau": Number(nai_settings.mirostat_tau),
"cfg_scale": cfgValues?.guidanceScale?.value ?? Number(nai_settings.cfg_scale),
"cfg_uc": cfgValues?.negativePrompt ?? nai_settings.cfg_uc ?? "",
"cfg_uc": cfgValues?.negativePrompt ?? substituteParams(nai_settings.cfg_uc) ?? "",
"phrase_rep_pen": nai_settings.phrase_rep_pen,
"stop_sequences": stopSequences,
"bad_words_ids": badWordIds,
@@ -463,7 +464,7 @@ export function getNovelGenerationData(finalPrompt, this_settings, this_amount_g
"use_cache": false,
"return_full_text": false,
"prefix": prefix,
"order": nai_settings.order || this_settings.order || default_order,
"order": nai_settings.order || settings.order || default_order,
};
}

View File

@@ -5,72 +5,71 @@
*/
import {
saveSettingsDebounced,
setOnlineStatus,
abortStatusCheck,
callPopup,
characters,
event_types,
eventSource,
extension_prompt_types,
Generate,
getExtensionPrompt,
getNextMessageId,
getRequestHeaders,
getStoppingStrings,
is_send_press,
main_api,
MAX_INJECTION_DEPTH,
name1,
name2,
extension_prompt_types,
characters,
this_chid,
callPopup,
getRequestHeaders,
system_message_types,
replaceBiasMarkup,
is_send_press,
Generate,
main_api,
eventSource,
event_types,
substituteParams,
MAX_INJECTION_DEPTH,
getStoppingStrings,
getNextMessageId,
replaceItemizedPromptText,
startStatusLoading,
resultCheckStatus,
abortStatusCheck,
saveSettingsDebounced,
setOnlineStatus,
startStatusLoading,
substituteParams,
system_message_types,
this_chid,
} from "../script.js";
import { groups, selected_group } from "./group-chats.js";
import {
promptManagerDefaultPromptOrders,
chatCompletionDefaultPrompts, Prompt,
PromptManagerModule as PromptManager,
chatCompletionDefaultPrompts,
INJECTION_POSITION,
Prompt,
promptManagerDefaultPromptOrders,
PromptManagerModule as PromptManager,
} from "./PromptManager.js";
import {
getCustomStoppingStrings,
persona_description_positions,
power_user,
} from "./power-user.js";
import {
SECRET_KEYS,
secret_state,
writeSecret,
} from "./secrets.js";
import { getCustomStoppingStrings, persona_description_positions, power_user, } from "./power-user.js";
import { SECRET_KEYS, secret_state, writeSecret, } from "./secrets.js";
import {
delay,
download,
getFileText, getSortableDelay,
getBase64Async,
getFileText,
getSortableDelay,
isDataURL,
parseJsonFile,
resetScrollHeight,
stringFormat,
} from "./utils.js";
import { countTokensOpenAI, getTokenizerModel } from "./tokenizers.js";
import { formatInstructModeChat, formatInstructModeExamples, formatInstructModePrompt, formatInstructModeSystemPrompt } from "./instruct-mode.js";
import {
formatInstructModeChat,
formatInstructModeExamples,
formatInstructModePrompt,
formatInstructModeSystemPrompt
} from "./instruct-mode.js";
export {
openai_msgs,
openai_messages_count,
oai_settings,
loadOpenAISettings,
setOpenAIMessages,
setOpenAIMessageExamples,
setupChatCompletionPromptManager,
prepareOpenAIMessages,
sendOpenAIRequest,
getChatCompletionModel,
TokenHandler,
@@ -79,8 +78,6 @@ export {
MessageCollection
}
let openai_msgs = [];
let openai_msgs_example = [];
let openai_messages_count = 0;
let openai_narrator_messages_count = 0;
@@ -111,6 +108,7 @@ const max_8k = 8191;
const max_16k = 16383;
const max_32k = 32767;
const max_128k = 128 * 1000;
const max_200k = 200 * 1000;
const scale_max = 8191;
const claude_max = 9000; // We have a proper tokenizer, so theoretically could be larger (up to 9k)
const palm2_max = 7500; // The real context window is 8192, spare some for padding due to using turbo tokenizer
@@ -208,6 +206,8 @@ const default_settings = {
openrouter_model: openrouter_website_model,
openrouter_use_fallback: false,
openrouter_force_instruct: false,
openrouter_group_models: false,
openrouter_sort_models: 'alphabetically',
jailbreak_system: false,
reverse_proxy: '',
legacy_streaming: false,
@@ -221,6 +221,8 @@ const default_settings = {
exclude_assistant: false,
use_alt_scale: false,
squash_system_messages: false,
image_inlining: false,
bypass_status_check: false,
};
const oai_settings = {
@@ -254,6 +256,8 @@ const oai_settings = {
openrouter_model: openrouter_website_model,
openrouter_use_fallback: false,
openrouter_force_instruct: false,
openrouter_group_models: false,
openrouter_sort_models: 'alphabetically',
jailbreak_system: false,
reverse_proxy: '',
legacy_streaming: false,
@@ -267,6 +271,8 @@ const oai_settings = {
exclude_assistant: false,
use_alt_scale: false,
squash_system_messages: false,
image_inlining: false,
bypass_status_check: false,
};
let openai_setting_names;
@@ -380,10 +386,15 @@ function convertChatCompletionToInstruct(messages, type) {
return prompt;
}
/**
* Formats chat messages into chat completion messages.
* @param {object[]} chat - Array containing all messages.
* @returns {object[]} - Array containing all messages formatted for chat completion.
*/
function setOpenAIMessages(chat) {
let j = 0;
// clean openai msgs
openai_msgs = [];
const messages = [];
openai_narrator_messages_count = 0;
for (let i = chat.length - 1; i >= 0; i--) {
let role = chat[j]['is_user'] ? 'user' : 'assistant';
@@ -409,21 +420,30 @@ function setOpenAIMessages(chat) {
// Apply the "wrap in quotes" option
if (role == 'user' && oai_settings.wrap_in_quotes) content = `"${content}"`;
const name = chat[j]['name'];
openai_msgs[i] = { "role": role, "content": content, name: name };
const image = chat[j]?.extra?.image;
messages[i] = { "role": role, "content": content, name: name, "image": image };
j++;
}
return messages
}
/**
* Formats chat messages into chat completion messages.
* @param {string[]} mesExamplesArray - Array containing all examples.
* @returns {object[]} - Array containing all examples formatted for chat completion.
*/
function setOpenAIMessageExamples(mesExamplesArray) {
// get a nice array of all blocks of all example messages = array of arrays (important!)
openai_msgs_example = [];
const examples = [];
for (let item of mesExamplesArray) {
// remove <START> {Example Dialogue:} and replace \r\n with just \n
let replaced = item.replace(/<START>/i, "{Example Dialogue:}").replace(/\r/gm, '');
let parsed = parseExampleIntoIndividual(replaced);
// add to the example message blocks array
openai_msgs_example.push(parsed);
examples.push(parsed);
}
return examples;
}
/**
@@ -545,8 +565,9 @@ function formatWorldInfo(value) {
* This function populates the injections in the conversation.
*
* @param {Prompt[]} prompts - Array containing injection prompts.
* @param {Object[]} messages - Array containing all messages.
*/
function populationInjectionPrompts(prompts) {
function populationInjectionPrompts(prompts, messages) {
let totalInsertedMessages = 0;
for (let i = 0; i <= MAX_INJECTION_DEPTH; i++) {
@@ -572,12 +593,13 @@ function populationInjectionPrompts(prompts) {
if (roleMessages.length) {
const injectIdx = i + totalInsertedMessages;
openai_msgs.splice(injectIdx, 0, ...roleMessages);
messages.splice(injectIdx, 0, ...roleMessages);
totalInsertedMessages += roleMessages.length;
}
}
openai_msgs = openai_msgs.reverse();
messages = messages.reverse();
return messages;
}
export function isOpenRouterWithInstruct() {
@@ -586,13 +608,13 @@ export function isOpenRouterWithInstruct() {
/**
* Populates the chat history of the conversation.
*
* @param {object[]} messages - Array containing all messages.
* @param {PromptCollection} prompts - Map object containing all prompts where the key is the prompt identifier and the value is the prompt object.
* @param {ChatCompletion} chatCompletion - An instance of ChatCompletion class that will be populated with the prompts.
* @param type
* @param cyclePrompt
*/
function populateChatHistory(prompts, chatCompletion, type = null, cyclePrompt = null) {
async function populateChatHistory(messages, prompts, chatCompletion, type = null, cyclePrompt = null) {
chatCompletion.add(new MessageCollection('chatHistory'), prompts.index('chatHistory'));
let names = (selected_group && groups.find(x => x.id === selected_group)?.members.map(member => characters.find(c => c.avatar === member)?.name).filter(Boolean).join(', ')) || '';
@@ -623,17 +645,22 @@ function populateChatHistory(prompts, chatCompletion, type = null, cyclePrompt =
chatCompletion.reserveBudget(continueMessage);
}
const lastChatPrompt = openai_msgs[openai_msgs.length - 1];
const lastChatPrompt = messages[messages.length - 1];
const message = new Message('user', oai_settings.send_if_empty, 'emptyUserMessageReplacement');
if (lastChatPrompt && lastChatPrompt.role === 'assistant' && oai_settings.send_if_empty && chatCompletion.canAfford(message)) {
chatCompletion.insert(message, 'chatHistory');
}
const imageInlining = isImageInliningSupported();
// Insert chat messages as long as there is budget available
[...openai_msgs].reverse().every((chatPrompt, index) => {
const chatPool = [...messages].reverse();
for (let index = 0; index < chatPool.length; index++) {
const chatPrompt = chatPool[index];
// We do not want to mutate the prompt
const prompt = new Prompt(chatPrompt);
prompt.identifier = `chatHistory-${openai_msgs.length - index}`;
prompt.identifier = `chatHistory-${messages.length - index}`;
const chatMessage = Message.fromPrompt(promptManager.preparePrompt(prompt));
if (true === promptManager.serviceSettings.names_in_completion && prompt.name) {
@@ -641,10 +668,16 @@ function populateChatHistory(prompts, chatCompletion, type = null, cyclePrompt =
chatMessage.setName(messageName);
}
if (chatCompletion.canAfford(chatMessage)) chatCompletion.insertAtStart(chatMessage, 'chatHistory');
else return false;
return true;
});
if (imageInlining && chatPrompt.image) {
await chatMessage.addImage(chatPrompt.image);
}
if (chatCompletion.canAfford(chatMessage)) {
chatCompletion.insertAtStart(chatMessage, 'chatHistory');
} else {
break;
}
}
// Insert and free new chat
chatCompletion.freeBudget(newChatMessage);
@@ -668,12 +701,13 @@ function populateChatHistory(prompts, chatCompletion, type = null, cyclePrompt =
*
* @param {PromptCollection} prompts - Map object containing all prompts where the key is the prompt identifier and the value is the prompt object.
* @param {ChatCompletion} chatCompletion - An instance of ChatCompletion class that will be populated with the prompts.
* @param {Object[]} messageExamples - Array containing all message examples.
*/
function populateDialogueExamples(prompts, chatCompletion) {
function populateDialogueExamples(prompts, chatCompletion, messageExamples) {
chatCompletion.add(new MessageCollection('dialogueExamples'), prompts.index('dialogueExamples'));
if (openai_msgs_example.length) {
if (Array.isArray(messageExamples) && messageExamples.length) {
const newExampleChat = new Message('system', oai_settings.new_example_chat_prompt, 'newChat');
[...openai_msgs_example].forEach((dialogue, dialogueIndex) => {
[...messageExamples].forEach((dialogue, dialogueIndex) => {
let examplesAdded = 0;
if (chatCompletion.canAfford(newExampleChat)) chatCompletion.insert(newExampleChat, 'dialogueExamples');
@@ -722,14 +756,24 @@ function getPromptPosition(position) {
* @param {Object} options - An object with optional settings.
* @param {string} options.bias - A bias to be added in the conversation.
* @param {string} options.quietPrompt - Instruction prompt for extras
* @param {string} options.quietImage - Image prompt for extras
* @param {string} options.type - The type of the chat, can be 'impersonate'.
* @param {string} options.cyclePrompt - The last prompt in the conversation.
* @param {object[]} options.messages - Array containing all messages.
* @param {object[]} options.messageExamples - Array containing all message examples.
* @returns {Promise<void>}
*/
function populateChatCompletion(prompts, chatCompletion, { bias, quietPrompt, type, cyclePrompt } = {}) {
async function populateChatCompletion(prompts, chatCompletion, { bias, quietPrompt, quietImage, type, cyclePrompt, messages, messageExamples } = {}) {
// Helper function for preparing a prompt, that already exists within the prompt collection, for completion
const addToChatCompletion = (source, target = null) => {
// We need the prompts array to determine a position for the source.
if (false === prompts.has(source)) return;
if (promptManager.isPromptDisabledForActiveCharacter(source)) {
promptManager.log(`Skipping prompt ${source} because it is disabled`);
return;
}
const prompt = prompts.get(source);
const index = target ? prompts.index(target) : prompts.index(source);
const collection = new MessageCollection(source);
@@ -756,7 +800,13 @@ function populateChatCompletion(prompts, chatCompletion, { bias, quietPrompt, ty
// Add quiet prompt to control prompts
// This should always be last, even in control prompts. Add all further control prompts BEFORE this prompt
const quietPromptMessage = Message.fromPrompt(prompts.get('quietPrompt')) ?? null;
if (quietPromptMessage && quietPromptMessage.content) controlPrompts.add(quietPromptMessage);
if (quietPromptMessage && quietPromptMessage.content) {
if (isImageInliningSupported() && quietImage) {
await quietPromptMessage.addImage(quietImage);
}
controlPrompts.add(quietPromptMessage);
}
chatCompletion.reserveBudget(controlPrompts);
@@ -820,15 +870,15 @@ function populateChatCompletion(prompts, chatCompletion, { bias, quietPrompt, ty
}
// Add in-chat injections
populationInjectionPrompts(userAbsolutePrompts);
messages = populationInjectionPrompts(userAbsolutePrompts, messages);
// Decide whether dialogue examples should always be added
if (power_user.pin_examples) {
populateDialogueExamples(prompts, chatCompletion);
populateChatHistory(prompts, chatCompletion, type, cyclePrompt);
populateDialogueExamples(prompts, chatCompletion, messageExamples);
await populateChatHistory(messages, prompts, chatCompletion, type, cyclePrompt);
} else {
populateChatHistory(prompts, chatCompletion, type, cyclePrompt);
populateDialogueExamples(prompts, chatCompletion);
await populateChatHistory(messages, prompts, chatCompletion, type, cyclePrompt);
populateDialogueExamples(prompts, chatCompletion, messageExamples);
}
chatCompletion.freeBudget(controlPrompts);
@@ -966,10 +1016,12 @@ function preparePromptsForChatCompletion({ Scenario, charPersonality, name2, wor
* @param {string} content.quietPrompt - The quiet prompt to be used in the conversation.
* @param {string} content.cyclePrompt - The last prompt used for chat message continuation.
* @param {Array} content.extensionPrompts - An array of additional prompts.
* @param {object[]} content.messages - An array of messages to be used as chat history.
* @param {string[]} content.messageExamples - An array of messages to be used as dialogue examples.
* @param dryRun - Whether this is a live call or not.
* @returns {(*[]|boolean)[]} An array where the first element is the prepared chat and the second element is a boolean flag.
*/
function prepareOpenAIMessages({
export async function prepareOpenAIMessages({
name2,
charDescription,
charPersonality,
@@ -979,11 +1031,14 @@ function prepareOpenAIMessages({
bias,
type,
quietPrompt,
quietImage,
extensionPrompts,
cyclePrompt,
systemPromptOverride,
jailbreakPromptOverride,
personaDescription
personaDescription,
messages,
messageExamples,
} = {}, dryRun) {
// Without a character selected, there is no way to accurately calculate tokens
if (!promptManager.activeCharacter && dryRun) return [null, false];
@@ -1004,15 +1059,18 @@ function prepareOpenAIMessages({
worldInfoAfter,
charDescription,
quietPrompt,
quietImage,
bias,
extensionPrompts,
systemPromptOverride,
jailbreakPromptOverride,
personaDescription
personaDescription,
messages,
messageExamples,
});
// Fill the chat completion with as much context as the budget allows
populateChatCompletion(prompts, chatCompletion, { bias, quietPrompt, type, cyclePrompt });
await populateChatCompletion(prompts, chatCompletion, { bias, quietPrompt, quietImage, type, cyclePrompt, messages, messageExamples });
} catch (error) {
if (error instanceof TokenBudgetExceededError) {
toastr.error('An error occurred while counting tokens: Token budget exceeded.')
@@ -1083,7 +1141,7 @@ function checkQuotaError(data) {
}
}
async function sendWindowAIRequest(openai_msgs_tosend, signal, stream) {
async function sendWindowAIRequest(messages, signal, stream) {
if (!('ai' in window)) {
return showWindowExtensionError();
}
@@ -1138,7 +1196,7 @@ async function sendWindowAIRequest(openai_msgs_tosend, signal, stream) {
const generatePromise = window.ai.generateText(
{
messages: openai_msgs_tosend,
messages: messages,
},
{
temperature: temperature,
@@ -1224,18 +1282,16 @@ function saveModelList(data) {
model_list.sort((a, b) => a?.id && b?.id && a.id.localeCompare(b.id));
if (oai_settings.chat_completion_source == chat_completion_sources.OPENROUTER) {
model_list = openRouterSortBy(model_list, oai_settings.openrouter_sort_models);
$('#model_openrouter_select').empty();
$('#model_openrouter_select').append($('<option>', { value: openrouter_website_model, text: 'Use OpenRouter website setting' }));
model_list.forEach((model) => {
let tokens_dollar = Number(1 / (1000 * model.pricing.prompt));
let tokens_rounded = (Math.round(tokens_dollar * 1000) / 1000).toFixed(0);
let model_description = `${model.id} | ${tokens_rounded}k t/$ | ${model.context_length} ctx`;
$('#model_openrouter_select').append(
$('<option>', {
value: model.id,
text: model_description,
}));
});
if (true === oai_settings.openrouter_group_models) {
appendOpenRouterOptions(openRouterGroupByVendor(model_list), oai_settings.openrouter_group_models);
} else {
appendOpenRouterOptions(model_list);
}
$('#model_openrouter_select').val(oai_settings.openrouter_model).trigger('change');
}
@@ -1256,11 +1312,72 @@ function saveModelList(data) {
}
}
async function sendAltScaleRequest(openai_msgs_tosend, logit_bias, signal, type) {
function appendOpenRouterOptions(model_list, groupModels = false, sort = false) {
$('#model_openrouter_select').append($('<option>', { value: openrouter_website_model, text: 'Use OpenRouter website setting' }));
const appendOption = (model, parent = null) => {
let tokens_dollar = Number(1 / (1000 * model.pricing?.prompt));
let tokens_rounded = (Math.round(tokens_dollar * 1000) / 1000).toFixed(0);
const price = 0 === Number(model.pricing?.prompt) ? 'Free' : `${tokens_rounded}k t/$ `;
let model_description = `${model.id} | ${price} | ${model.context_length} ctx`;
(parent || $('#model_openrouter_select')).append(
$('<option>', {
value: model.id,
text: model_description,
}));
};
if (groupModels) {
model_list.forEach((models, vendor) => {
const optgroup = $(`<optgroup label="${vendor}">`);
models.forEach((model) => {
appendOption(model, optgroup);
});
$('#model_openrouter_select').append(optgroup);
});
} else {
model_list.forEach((model) => {
appendOption(model);
});
}
}
const openRouterSortBy = (data, property = 'alphabetically') => {
return data.sort((a, b) => {
if (property === 'context_length') {
return b.context_length - a.context_length;
} else if (property === 'pricing.prompt') {
return parseFloat(a.pricing.prompt) - parseFloat(b.pricing.prompt);
} else {
// Alphabetically
return a?.id && b?.id && a.id.localeCompare(b.id);
}
});
};
function openRouterGroupByVendor(array) {
return array.reduce((acc, curr) => {
const vendor = curr.id.split('/')[0];
if (!acc.has(vendor)) {
acc.set(vendor, []);
}
acc.get(vendor).push(curr);
return acc;
}, new Map());
}
async function sendAltScaleRequest(messages, logit_bias, signal, type) {
const generate_url = '/generate_altscale';
let firstSysMsgs = []
for (let msg of openai_msgs_tosend) {
for (let msg of messages) {
if (msg.role === 'system') {
firstSysMsgs.push(substituteParams(msg.name ? msg.name + ": " + msg.content : msg.content));
} else {
@@ -1268,20 +1385,20 @@ async function sendAltScaleRequest(openai_msgs_tosend, logit_bias, signal, type)
}
}
let subsequentMsgs = openai_msgs_tosend.slice(firstSysMsgs.length);
let subsequentMsgs = messages.slice(firstSysMsgs.length);
const joinedSysMsgs = substituteParams(firstSysMsgs.join("\n"));
const joinedSubsequentMsgs = subsequentMsgs.reduce((acc, obj) => {
return acc + obj.role + ": " + obj.content + "\n";
}, "");
openai_msgs_tosend = substituteParams(joinedSubsequentMsgs);
messages = substituteParams(joinedSubsequentMsgs);
const messageId = getNextMessageId(type);
replaceItemizedPromptText(messageId, openai_msgs_tosend);
replaceItemizedPromptText(messageId, messages);
const generate_data = {
sysprompt: joinedSysMsgs,
prompt: openai_msgs_tosend,
prompt: messages,
temp: Number(oai_settings.temp_openai),
top_p: Number(oai_settings.top_p_openai),
max_tokens: Number(oai_settings.openai_max_tokens),
@@ -1299,18 +1416,18 @@ async function sendAltScaleRequest(openai_msgs_tosend, logit_bias, signal, type)
return data.output;
}
async function sendOpenAIRequest(type, openai_msgs_tosend, signal) {
async function sendOpenAIRequest(type, messages, signal) {
// Provide default abort signal
if (!signal) {
signal = new AbortController().signal;
}
// HACK: Filter out null and non-object messages
if (!Array.isArray(openai_msgs_tosend)) {
throw new Error('openai_msgs_tosend must be an array');
if (!Array.isArray(messages)) {
throw new Error('messages must be an array');
}
openai_msgs_tosend = openai_msgs_tosend.filter(msg => msg && typeof msg === 'object');
messages = messages.filter(msg => msg && typeof msg === 'object');
let logit_bias = {};
const messageId = getNextMessageId(type);
@@ -1323,26 +1440,27 @@ async function sendOpenAIRequest(type, openai_msgs_tosend, signal) {
const isTextCompletion = (isOAI && textCompletionModels.includes(oai_settings.openai_model)) || (isOpenRouter && oai_settings.openrouter_force_instruct && power_user.instruct.enabled);
const isQuiet = type === 'quiet';
const isImpersonate = type === 'impersonate';
const isContinue = type === 'continue';
const stream = oai_settings.stream_openai && !isQuiet && !isScale && !isAI21 && !isPalm;
if (isTextCompletion && isOpenRouter) {
openai_msgs_tosend = convertChatCompletionToInstruct(openai_msgs_tosend, type);
replaceItemizedPromptText(messageId, openai_msgs_tosend);
messages = convertChatCompletionToInstruct(messages, type);
replaceItemizedPromptText(messageId, messages);
}
if (isAI21 || isPalm) {
const joinedMsgs = openai_msgs_tosend.reduce((acc, obj) => {
const joinedMsgs = messages.reduce((acc, obj) => {
const prefix = prefixMap[obj.role];
return acc + (prefix ? (selected_group ? "\n" : prefix + " ") : "") + obj.content + "\n";
}, "");
openai_msgs_tosend = substituteParams(joinedMsgs) + (isImpersonate ? `${name1}:` : `${name2}:`);
replaceItemizedPromptText(messageId, openai_msgs_tosend);
messages = substituteParams(joinedMsgs) + (isImpersonate ? `${name1}:` : `${name2}:`);
replaceItemizedPromptText(messageId, messages);
}
// If we're using the window.ai extension, use that instead
// Doesn't support logit bias yet
if (oai_settings.chat_completion_source == chat_completion_sources.WINDOWAI) {
return sendWindowAIRequest(openai_msgs_tosend, signal, stream);
return sendWindowAIRequest(messages, signal, stream);
}
const logitBiasSources = [chat_completion_sources.OPENAI, chat_completion_sources.OPENROUTER, chat_completion_sources.SCALE];
@@ -1355,12 +1473,12 @@ async function sendOpenAIRequest(type, openai_msgs_tosend, signal) {
}
if (isScale && oai_settings.use_alt_scale) {
return sendAltScaleRequest(openai_msgs_tosend, logit_bias, signal, type);
return sendAltScaleRequest(messages, logit_bias, signal, type);
}
const model = getChatCompletionModel();
const generate_data = {
"messages": openai_msgs_tosend,
"messages": messages,
"model": model,
"temperature": Number(oai_settings.temp_openai),
"frequency_penalty": Number(oai_settings.freq_pen_openai),
@@ -1372,6 +1490,16 @@ async function sendOpenAIRequest(type, openai_msgs_tosend, signal) {
"stop": getCustomStoppingStrings(openai_max_stop_strings),
};
// Empty array will produce a validation error
if (!Array.isArray(generate_data.stop) || !generate_data.stop.length) {
delete generate_data.stop;
}
// Vision models don't support logit bias
if (isImageInliningSupported()) {
delete generate_data.logit_bias;
}
// Proxy is only supported for Claude and OpenAI
if (oai_settings.reverse_proxy && [chat_completion_sources.CLAUDE, chat_completion_sources.OPENAI].includes(oai_settings.chat_completion_source)) {
validateReverseProxy();
@@ -1396,7 +1524,7 @@ async function sendOpenAIRequest(type, openai_msgs_tosend, signal) {
generate_data['use_fallback'] = oai_settings.openrouter_use_fallback;
if (isTextCompletion) {
generate_data['stop'] = getStoppingStrings(isImpersonate);
generate_data['stop'] = getStoppingStrings(isImpersonate, isContinue);
}
}
@@ -1640,7 +1768,18 @@ class InvalidCharacterNameError extends Error {
* Used for creating, managing, and interacting with a specific message object.
*/
class Message {
tokens; identifier; role; content; name;
static tokensPerImage = 85;
/** @type {number} */
tokens;
/** @type {string} */
identifier;
/** @type {string} */
role;
/** @type {string|any[]} */
content;
/** @type {string} */
name;
/**
* @constructor
@@ -1653,6 +1792,11 @@ class Message {
this.role = role;
this.content = content;
if (!this.role) {
console.log(`Message role not set, defaulting to 'system' for identifier '${this.identifier}'`);
this.role = 'system';
}
if (typeof this.content === 'string' && this.content.length > 0) {
this.tokens = tokenHandler.count({ role: this.role, content: this.content });
} else {
@@ -1665,6 +1809,30 @@ class Message {
this.tokens = tokenHandler.count({ role: this.role, content: this.content, name: this.name });
}
async addImage(image) {
const textContent = this.content;
const isDataUrl = isDataURL(image);
if (!isDataUrl) {
try {
const response = await fetch(image, { method: 'GET', cache: 'force-cache' });
if (!response.ok) throw new Error('Failed to fetch image');
const blob = await response.blob();
image = await getBase64Async(blob);
} catch (error) {
console.error('Image adding skipped', error);
return;
}
}
this.content = [
{ type: "text", text: textContent },
{ type: "image_url", image_url: { "url": image, "detail": "low" } },
];
this.tokens += Message.tokensPerImage;
}
/**
* Create a new Message instance from a prompt.
* @static
@@ -2140,6 +2308,8 @@ function loadOpenAISettings(data, settings) {
oai_settings.claude_model = settings.claude_model ?? default_settings.claude_model;
oai_settings.windowai_model = settings.windowai_model ?? default_settings.windowai_model;
oai_settings.openrouter_model = settings.openrouter_model ?? default_settings.openrouter_model;
oai_settings.openrouter_group_models = settings.openrouter_group_models ?? default_settings.openrouter_group_models;
oai_settings.openrouter_sort_models = settings.openrouter_sort_models ?? default_settings.openrouter_sort_models;
oai_settings.openrouter_use_fallback = settings.openrouter_use_fallback ?? default_settings.openrouter_use_fallback;
oai_settings.openrouter_force_instruct = settings.openrouter_force_instruct ?? default_settings.openrouter_force_instruct;
oai_settings.ai21_model = settings.ai21_model ?? default_settings.ai21_model;
@@ -2148,6 +2318,8 @@ function loadOpenAISettings(data, settings) {
oai_settings.show_external_models = settings.show_external_models ?? default_settings.show_external_models;
oai_settings.proxy_password = settings.proxy_password ?? default_settings.proxy_password;
oai_settings.assistant_prefill = settings.assistant_prefill ?? default_settings.assistant_prefill;
oai_settings.image_inlining = settings.image_inlining ?? default_settings.image_inlining;
oai_settings.bypass_status_check = settings.bypass_status_check ?? default_settings.bypass_status_check;
oai_settings.prompts = settings.prompts ?? default_settings.prompts;
oai_settings.prompt_order = settings.prompt_order ?? default_settings.prompt_order;
@@ -2168,6 +2340,8 @@ function loadOpenAISettings(data, settings) {
$('#api_url_scale').val(oai_settings.api_url_scale);
$('#openai_proxy_password').val(oai_settings.proxy_password);
$('#claude_assistant_prefill').val(oai_settings.assistant_prefill);
$('#openai_image_inlining').prop('checked', oai_settings.image_inlining);
$('#openai_bypass_status_check').prop('checked', oai_settings.bypass_status_check);
$('#model_openai_select').val(oai_settings.openai_model);
$(`#model_openai_select option[value="${oai_settings.openai_model}"`).attr('selected', true);
@@ -2180,6 +2354,7 @@ function loadOpenAISettings(data, settings) {
$('#openai_max_context').val(oai_settings.openai_max_context);
$('#openai_max_context_counter').val(`${oai_settings.openai_max_context}`);
$('#model_openrouter_select').val(oai_settings.openrouter_model);
$('#openrouter_sort_models').val(oai_settings.openrouter_sort_models);
$('#openai_max_tokens').val(oai_settings.openai_max_tokens);
@@ -2194,6 +2369,7 @@ function loadOpenAISettings(data, settings) {
$('#scale-alt').prop('checked', oai_settings.use_alt_scale);
$('#openrouter_use_fallback').prop('checked', oai_settings.openrouter_use_fallback);
$('#openrouter_force_instruct').prop('checked', oai_settings.openrouter_force_instruct);
$('#openrouter_group_models').prop('checked', oai_settings.openrouter_group_models);
$('#squash_system_messages').prop('checked', oai_settings.squash_system_messages);
if (settings.impersonation_prompt !== undefined) oai_settings.impersonation_prompt = settings.impersonation_prompt;
@@ -2277,6 +2453,11 @@ async function getStatusOpen() {
validateReverseProxy();
}
const canBypass = oai_settings.chat_completion_source === chat_completion_sources.OPENAI && oai_settings.bypass_status_check;
if (canBypass) {
setOnlineStatus('Status check bypassed');
}
try {
const response = await fetch('/getstatus_openai', {
method: 'POST',
@@ -2292,14 +2473,18 @@ async function getStatusOpen() {
const responseData = await response.json();
if (!('error' in responseData))
if (!('error' in responseData)) {
setOnlineStatus('Valid');
}
if ('data' in responseData && Array.isArray(responseData.data)) {
saveModelList(responseData.data);
}
} catch (error) {
console.error(error);
setOnlineStatus('no_connection');
if (!canBypass) {
setOnlineStatus('no_connection');
}
}
return resultCheckStatus();
@@ -2353,6 +2538,8 @@ async function saveOpenAIPreset(name, settings, triggerUi = true) {
openrouter_model: settings.openrouter_model,
openrouter_use_fallback: settings.openrouter_use_fallback,
openrouter_force_instruct: settings.openrouter_force_instruct,
openrouter_group_models: settings.openrouter_group_models,
openrouter_sort_models: settings.openrouter_sort_models,
ai21_model: settings.ai21_model,
temperature: settings.temp_openai,
frequency_penalty: settings.freq_pen_openai,
@@ -2388,6 +2575,7 @@ async function saveOpenAIPreset(name, settings, triggerUi = true) {
exclude_assistant: settings.exclude_assistant,
use_alt_scale: settings.use_alt_scale,
squash_system_messages: settings.squash_system_messages,
image_inlining: settings.image_inlining,
};
const savePresetSettings = await fetch(`/api/presets/save-openai?name=${name}`, {
@@ -2715,6 +2903,8 @@ function onSettingsPresetChange() {
openrouter_model: ['#model_openrouter_select', 'openrouter_model', false],
openrouter_use_fallback: ['#openrouter_use_fallback', 'openrouter_use_fallback', true],
openrouter_force_instruct: ['#openrouter_force_instruct', 'openrouter_force_instruct', true],
openrouter_group_models: ['#openrouter_group_models', 'openrouter_group_models', false],
openrouter_sort_models: ['#openrouter_sort_models', 'openrouter_sort_models', false],
ai21_model: ['#model_ai21_select', 'ai21_model', false],
openai_max_context: ['#openai_max_context', 'openai_max_context', false],
openai_max_tokens: ['#openai_max_tokens', 'openai_max_tokens', false],
@@ -2741,6 +2931,7 @@ function onSettingsPresetChange() {
exclude_assistant: ['#exclude_assistant', 'exclude_assistant', true],
use_alt_scale: ['#use_alt_scale', 'use_alt_scale', true],
squash_system_messages: ['#squash_system_messages', 'squash_system_messages', true],
image_inlining: ['#openai_image_inlining', 'image_inlining', true],
};
const presetName = $('#settings_preset_openai').find(":selected").text();
@@ -2785,6 +2976,9 @@ function getMaxContextOpenAI(value) {
else if (value.includes('gpt-4-1106')) {
return max_128k;
}
else if (value.includes('gpt-4-vision')) {
return max_128k;
}
else if (value.includes('gpt-3.5-turbo-1106')) {
return max_16k;
}
@@ -2831,6 +3025,9 @@ function getMaxContextWindowAI(value) {
else if (value.includes('gpt-4-1106')) {
return max_128k;
}
else if (value.includes('gpt-4-vision')) {
return max_128k;
}
else if (value.includes('gpt-4-32k')) {
return max_32k;
}
@@ -2932,7 +3129,10 @@ async function onModelChange() {
if (oai_settings.chat_completion_source == chat_completion_sources.CLAUDE) {
if (oai_settings.max_context_unlocked) {
$('#openai_max_context').attr('max', unlocked_max);
$('#openai_max_context').attr('max', max_200k);
}
else if (value == 'claude-2.1' || value == 'claude-2') {
$('#openai_max_context').attr('max', max_200k);
}
else if (value.endsWith('100k') || value.startsWith('claude-2') || value === 'claude-instant-1.2') {
$('#openai_max_context').attr('max', claude_100k_max);
@@ -3016,6 +3216,10 @@ async function onModelChange() {
eventSource.emit(event_types.CHATCOMPLETION_MODEL_CHANGED, value);
}
async function onOpenrouterModelSortChange() {
await getStatusOpen();
}
async function onNewPresetClick() {
const popupText = `
<h3>Preset name:</h3>
@@ -3217,6 +3421,32 @@ function updateScaleForm() {
}
}
/**
* Check if the model supports image inlining
* @returns {boolean} True if the model supports image inlining
*/
export function isImageInliningSupported() {
if (main_api !== 'openai') {
return false;
}
const gpt4v = 'gpt-4-vision';
const llava13b = 'llava-13b';
if (!oai_settings.image_inlining) {
return false;
}
switch (oai_settings.chat_completion_source) {
case chat_completion_sources.OPENAI:
return oai_settings.openai_model.includes(gpt4v);
case chat_completion_sources.OPENROUTER:
return oai_settings.openrouter_model.includes(gpt4v) || oai_settings.openrouter_model.includes(llava13b);
default:
return false;
}
}
$(document).ready(async function () {
$('#test_api_button').on('click', testApiConnection);
@@ -3409,6 +3639,12 @@ $(document).ready(async function () {
saveSettingsDebounced();
});
$('#openai_bypass_status_check').on('input', function () {
oai_settings.bypass_status_check = !!$(this).prop('checked');
getStatusOpen();
saveSettingsDebounced();
})
$('#chat_completion_source').on('change', function () {
oai_settings.chat_completion_source = String($(this).find(":selected").val());
toggleChatCompletionForms();
@@ -3458,11 +3694,26 @@ $(document).ready(async function () {
saveSettingsDebounced();
});
$('#openrouter_group_models').on('input', function () {
oai_settings.openrouter_group_models = !!$(this).prop('checked');
saveSettingsDebounced();
});
$('#openrouter_sort_models').on('input', function () {
oai_settings.openrouter_sort_models = String($(this).val());
saveSettingsDebounced();
});
$('#squash_system_messages').on('input', function () {
oai_settings.squash_system_messages = !!$(this).prop('checked');
saveSettingsDebounced();
});
$('#openai_image_inlining').on('input', function () {
oai_settings.image_inlining = !!$(this).prop('checked');
saveSettingsDebounced();
});
$(document).on('input', '#openai_settings .autoSetHeight', function () {
resetScrollHeight($(this));
});
@@ -3475,6 +3726,8 @@ $(document).ready(async function () {
$("#model_scale_select").on("change", onModelChange);
$("#model_palm_select").on("change", onModelChange);
$("#model_openrouter_select").on("change", onModelChange);
$("#openrouter_group_models").on("change", onOpenrouterModelSortChange);
$("#openrouter_sort_models").on("change", onOpenrouterModelSortChange);
$("#model_ai21_select").on("change", onModelChange);
$("#settings_preset_openai").on("change", onSettingsPresetChange);
$("#new_oai_preset").on("click", onNewPresetClick);

View File

@@ -1,11 +1,24 @@
/**
* This is a placeholder file for all the Persona Management code. Will be refactored into a separate file soon.
*/
import { callPopup, characters, chat_metadata, default_avatar, eventSource, event_types, getRequestHeaders, getThumbnailUrl, getUserAvatars, name1, saveMetadata, saveSettingsDebounced, setUserName, this_chid, user_avatar } from "../script.js";
import {
callPopup,
characters,
chat_metadata,
default_avatar,
eventSource,
event_types,
getRequestHeaders,
getThumbnailUrl,
getUserAvatars,
name1,
saveMetadata,
saveSettingsDebounced,
setUserName,
this_chid,
user_avatar,
} from "../script.js";
import { getContext } from "./extensions.js";
import { persona_description_positions, power_user } from "./power-user.js";
import { getTokenCount } from "./tokenizers.js";
import { debounce, delay } from "./utils.js";
import { debounce, delay, download, parseJsonFile } from "./utils.js";
/**
* Uploads an avatar file to the server
@@ -38,6 +51,28 @@ async function uploadUserAvatar(url, name) {
});
}
/**
* Prompts the user to create a persona for the uploaded avatar.
* @param {string} avatarId User avatar id
* @returns {Promise} Promise that resolves when the persona is set
*/
export async function createPersona(avatarId) {
const personaName = await callPopup('<h3>Enter a name for this persona:</h3>Cancel if you\'re just uploading an avatar.', 'input', '');
if (!personaName) {
console.debug('User cancelled creating a persona');
return;
}
await delay(500);
const personaDescription = await callPopup('<h3>Enter a description for this persona:</h3>You can always add or change it later.', 'input', '', { rows: 4 });
initPersona(avatarId, personaName, personaDescription);
if (power_user.persona_show_notifications) {
toastr.success(`You can now pick ${personaName} as a persona in the Persona Management menu.`, 'Persona Created');
}
}
async function createDummyPersona() {
const personaName = await callPopup('<h3>Enter a name for this persona:</h3>', 'input', '');
@@ -48,18 +83,28 @@ async function createDummyPersona() {
// Date + name (only ASCII) to make it unique
const avatarId = `${Date.now()}-${personaName.replace(/[^a-zA-Z0-9]/g, '')}.png`;
initPersona(avatarId, personaName, '');
await uploadUserAvatar(default_avatar, avatarId);
}
/**
* Initializes a persona for the given avatar id.
* @param {string} avatarId User avatar id
* @param {string} personaName Name for the persona
* @param {string} personaDescription Optional description for the persona
* @returns {void}
*/
export function initPersona(avatarId, personaName, personaDescription) {
power_user.personas[avatarId] = personaName;
power_user.persona_descriptions[avatarId] = {
description: '',
description: personaDescription || '',
position: persona_description_positions.IN_PROMPT,
};
await uploadUserAvatar(default_avatar, avatarId);
saveSettingsDebounced();
}
export async function convertCharacterToPersona(characterId = null) {
if (null === characterId) characterId = this_chid;
const avatarUrl = characters[characterId]?.avatar;
@@ -226,6 +271,12 @@ export function selectCurrentPersona() {
}
setPersonaDescription();
// force firstMes {{user}} update on persona switch
const context = getContext();
if (context.characterId >= 0 && !context.groupId && context.chat.length === 1) {
$("#firstmessage_textarea").trigger('input')
}
}
}
@@ -458,6 +509,96 @@ function setChatLockedPersona() {
updateUserLockIcon();
}
function onBackupPersonas() {
const timestamp = new Date().toISOString().split('T')[0].replace(/-/g, '');
const filename = `personas_${timestamp}.json`;
const data = JSON.stringify({
"personas": power_user.personas,
"persona_descriptions": power_user.persona_descriptions,
"default_persona": power_user.default_persona,
}, null, 2);
const blob = new Blob([data], { type: 'application/json' });
download(blob, filename, 'application/json');
}
async function onPersonasRestoreInput(e) {
const file = e.target.files[0];
if (!file) {
console.debug('No file selected');
return;
}
const data = await parseJsonFile(file);
if (!data) {
toastr.warning('Invalid file selected', 'Persona Management');
console.debug('Invalid file selected');
return;
}
if (!data.personas || !data.persona_descriptions || typeof data.personas !== 'object' || typeof data.persona_descriptions !== 'object') {
toastr.warning('Invalid file format', 'Persona Management');
console.debug('Invalid file selected');
return;
}
const avatarsList = await getUserAvatars();
const warnings = [];
// Merge personas with existing ones
for (const [key, value] of Object.entries(data.personas)) {
if (key in power_user.personas) {
warnings.push(`Persona "${key}" (${value}) already exists, skipping`);
continue;
}
power_user.personas[key] = value;
// If the avatar is missing, upload it
if (!avatarsList.includes(key)) {
warnings.push(`Persona image "${key}" (${value}) is missing, uploading default avatar`);
await uploadUserAvatar(default_avatar, key);
}
}
// Merge persona descriptions with existing ones
for (const [key, value] of Object.entries(data.persona_descriptions)) {
if (key in power_user.persona_descriptions) {
warnings.push(`Persona description for "${key}" (${power_user.personas[key]}) already exists, skipping`);
continue;
}
if (!power_user.personas[key]) {
warnings.push(`Persona for "${key}" does not exist, skipping`);
continue;
}
power_user.persona_descriptions[key] = value;
}
if (data.default_persona) {
if (data.default_persona in power_user.personas) {
power_user.default_persona = data.default_persona;
} else {
warnings.push(`Default persona "${data.default_persona}" does not exist, skipping`);
}
}
if (warnings.length) {
toastr.success('Personas restored with warnings. Check console for details.');
console.warn(`PERSONA RESTORE REPORT\n====================\n${warnings.join('\n')}`);
} else {
toastr.success('Personas restored successfully.');
}
await getUserAvatars();
setPersonaDescription();
saveSettingsDebounced();
$('#personas_restore_input').val('');
}
export function initPersonas() {
$(document).on('click', '.bind_user_name', bindUserNameToPersona);
$(document).on('click', '.set_default_persona', setDefaultPersona);
@@ -466,6 +607,9 @@ export function initPersonas() {
$("#create_dummy_persona").on('click', createDummyPersona);
$('#persona_description').on('input', onPersonaDescriptionInput);
$('#persona_description_position').on('input', onPersonaDescriptionPositionInput);
$('#personas_backup').on('click', onBackupPersonas);
$('#personas_restore').on('click', () => $('#personas_restore_input').trigger('click'));
$('#personas_restore_input').on('change', onPersonasRestoreInput);
eventSource.on("charManagementDropdown", (target) => {
if (target === 'convert_to_persona') {

View File

@@ -66,6 +66,7 @@ export const ui_mode = {
const avatar_styles = {
ROUND: 0,
RECTANGULAR: 1,
SQUARE: 2,
}
export const chat_styles = {
@@ -107,6 +108,7 @@ let power_user = {
target_length: 400,
},
markdown_escape_strings: '',
chat_truncation: 100,
ui_mode: ui_mode.POWER,
fast_ui_mode: true,
@@ -220,6 +222,7 @@ let power_user = {
encode_tags: false,
servers: [],
bogus_folders: false,
aux_field: 'character_version',
};
let themes = [];
@@ -432,10 +435,12 @@ var originalSliderValues = []
async function switchLabMode() {
if (power_user.enableZenSliders) {
//force disable ZenSliders for Lab Mode
$("#enableZenSliders").trigger('click')
}
/* if (power_user.enableZenSliders && power_user.enableLabMode) {
toastr.warning("Can't start Lab Mode while Zen Sliders are active")
return
//$("#enableZenSliders").trigger('click')
}
*/
await delay(100)
const value = localStorage.getItem(storage_keys.enableLabMode);
power_user.enableLabMode = value === null ? false : value == "true";
@@ -457,7 +462,7 @@ async function switchLabMode() {
.attr('min', '-99999')
.attr('max', '99999')
.attr('step', '0.001')
$("#labModeWarning").show()
$("#labModeWarning").removeClass('displayNone')
//$("#advanced-ai-config-block input[type='range']").hide()
} else {
@@ -470,20 +475,17 @@ async function switchLabMode() {
.trigger('input')
});
$("#advanced-ai-config-block input[type='range']").show()
$("#labModeWarning").hide()
$("#labModeWarning").addClass('displayNone')
}
}
async function switchZenSliders() {
await delay(100)
const value = localStorage.getItem(storage_keys.enableZenSliders);
power_user.enableZenSliders = value === null ? false : value == "true";
$("body").toggleClass("enableZenSliders", power_user.enableZenSliders);
$("#enableZenSliders").prop("checked", power_user.enableZenSliders);
if (power_user.enableZenSliders) {
$("#clickSlidersTips").hide()
$("#pro-settings-block input[type='number']").hide();
@@ -491,14 +493,17 @@ async function switchZenSliders() {
$(`#textgenerationwebui_api-settings :input[type='number']:not([id^='seed']),
#kobold_api-settings :input[type='number']:not([id^='seed'])`).hide()
//hide original sliders
//exclude max context because its creation is handled by switchMaxContext()
$(`#textgenerationwebui_api-settings input[type='range'],
#kobold_api-settings input[type='range'],
#pro-settings-block input[type='range']`)
#pro-settings-block input[type='range']:not(#max_context)`)
.hide()
.each(function () {
//make a zen slider for each original slider
CreateZenSliders($(this))
})
//this is for when zensliders is toggled after pageload
switchMaxContextSize()
} else {
$("#clickSlidersTips").show()
revertOriginalSliders();
@@ -516,149 +521,262 @@ async function switchZenSliders() {
$('div[id$="_zenslider"]').remove();
}
async function CreateZenSliders(elmnt) {
//await delay(100)
var originalSlider = elmnt;
var sliderID = originalSlider.attr('id')
var sliderMin = Number(originalSlider.attr('min'))
var sliderMax = Number(originalSlider.attr('max'))
var sliderValue = originalSlider.val();
var sliderRange = sliderMax - sliderMin
var numSteps = 10
var decimals = 2
if (sliderID == 'amount_gen') {
decimals = 0
var steps = [16, 50, 100, 150, 200, 256, 300, 400, 512, 1024];
sliderMin = 0
sliderMax = steps.length - 1
stepScale = 1;
numSteps = 10
sliderValue = steps.indexOf(Number(sliderValue))
if (sliderValue === -1) { sliderValue = 4 } // default to '200' if origSlider has value we can't use
}
if (sliderID == 'max_context') {
numSteps = 15
decimals = 0
}
if (sliderID == 'rep_pen_range_textgenerationwebui') {
numSteps = 16
decimals = 0
}
if (sliderID == 'encoder_rep_pen_textgenerationwebui') {
numSteps = 14
}
if (sliderID == 'mirostat_mode_textgenerationwebui') {
numSteps = 2
decimals = 0
}
if (sliderID == 'mirostat_tau_textgenerationwebui' ||
sliderID == 'top_k_textgenerationwebui' ||
sliderID == 'num_beams_textgenerationwebui' ||
sliderID == 'no_repeat_ngram_size_textgenerationwebui') {
numSteps = 20
decimals = 0
}
if (sliderID == 'epsilon_cutoff_textgenerationwebui') {
numSteps = 20
decimals = 1
}
if (sliderID == 'tfs_textgenerationwebui' ||
sliderID == 'min_p_textgenerationwebui') {
numSteps = 20
decimals = 2
}
if (sliderID == 'mirostat_eta_textgenerationwebui' ||
sliderID == 'penalty_alpha_textgenerationwebui' ||
sliderID == 'length_penalty_textgenerationwebui') {
numSteps = 50
}
if (sliderID == 'eta_cutoff_textgenerationwebui') {
numSteps = 50
decimals = 1
}
if (sliderID == 'guidance_scale_textgenerationwebui') {
numSteps = 78
}
if (sliderID == 'min_length_textgenerationwebui') {
decimals = 0
}
if (sliderID == 'temp_textgenerationwebui') {
numSteps = 20
}
if (sliderID !== 'amount_gen') {
var stepScale = sliderRange / numSteps
}
var newSlider = $("<div>")
.attr('id', `${sliderID}_zenslider`)
.css("width", "100%")
.insertBefore(originalSlider);
newSlider.slider({
value: sliderValue,
step: stepScale,
min: sliderMin,
max: sliderMax,
create: function () {
var handle = $(this).find(".ui-slider-handle");
if (newSlider.attr('id') == 'amount_gen_zenslider') {
//console.log(sliderValue, steps.indexOf(Number(sliderValue)))
var handleText = steps[sliderValue]
handle.text(handleText);
//console.log(handleText)
var stepNumber = sliderValue
var leftMargin = ((stepNumber) / numSteps) * 50 * -1
//console.log(`initial value:${handleText}, stepNum:${stepNumber}, numSteps:${numSteps}, left-margin:${leftMargin}`)
handle.css('margin-left', `${leftMargin}px`)
} else {
var handleText = Number(sliderValue).toFixed(decimals)
handle.text(handleText);
var stepNumber = ((sliderValue - sliderMin) / stepScale)
var leftMargin = (stepNumber / numSteps) * 50 * -1
handle.css('margin-left', `${leftMargin}px`)
console.debug(sliderID, sliderValue, handleText, stepNumber, stepScale)
}
},
slide: function (event, ui) {
var handle = $(this).find(".ui-slider-handle");
if (newSlider.attr('id') == 'amount_gen_zenslider') {
//console.log(`stepScale${stepScale}, UIvalue:${ui.value}, mappedValue:${steps[ui.value]}`)
$(this).val(steps[ui.value])
let handleText = steps[ui.value].toFixed(decimals)
handle.text(handleText);
var stepNumber = steps.indexOf(Number(handleText))
var leftMargin = (stepNumber / numSteps) * 50 * -1
//console.log(`handleText:${handleText},stepNum:${stepNumber}, numSteps:${numSteps},LeftMargin:${leftMargin}`)
handle.css('margin-left', `${leftMargin}px`)
originalSlider.val(handleText);
originalSlider.trigger('input')
originalSlider.trigger('change')
} else {
handle.text(ui.value.toFixed(decimals));
var stepNumber = ((ui.value - sliderMin) / stepScale)
var leftMargin = (stepNumber / numSteps) * 50 * -1
handle.css('margin-left', `${leftMargin}px`)
let handleText = (ui.value)
originalSlider.val(handleText);
originalSlider.trigger('input')
originalSlider.trigger('change')
}
}
});
originalSlider.data("newSlider", newSlider);
originalSlider.hide();
};
}
async function CreateZenSliders(elmnt) {
//await delay(100)
var originalSlider = elmnt;
var sliderID = originalSlider.attr('id')
var sliderMin = Number(originalSlider.attr('min'))
var sliderMax = Number(originalSlider.attr('max'))
var sliderValue = originalSlider.val();
var sliderRange = sliderMax - sliderMin
var numSteps = 10
var decimals = 2
var offVal
if (sliderID == 'amount_gen') {
decimals = 0
var steps = [16, 50, 100, 150, 200, 256, 300, 400, 512, 1024];
sliderMin = 0
sliderMax = steps.length - 1
stepScale = 1;
numSteps = 10
sliderValue = steps.indexOf(Number(sliderValue))
if (sliderValue === -1) { sliderValue = 4 } // default to '200' if origSlider has value we can't use
}
//customize decimals
if (sliderID == 'max_context' ||
sliderID == 'mirostat_mode_textgenerationwebui' ||
sliderID == 'mirostat_tau_textgenerationwebui' ||
sliderID == 'top_k_textgenerationwebui' ||
sliderID == 'num_beams_textgenerationwebui' ||
sliderID == 'no_repeat_ngram_size_textgenerationwebui' ||
sliderID == 'min_length_textgenerationwebui' ||
sliderID == 'top_k' ||
sliderID == 'mirostat_mode_kobold' ||
sliderID == 'rep_pen_range') {
decimals = 0
}
if (sliderID == 'eta_cutoff_textgenerationwebui' ||
sliderID == 'epsilon_cutoff_textgenerationwebui') {
numSteps = 50
decimals = 1
}
//customize steps
if (sliderID == 'mirostat_mode_textgenerationwebui' ||
sliderID == 'mirostat_mode_kobold') {
numSteps = 2
}
if (sliderID == 'encoder_rep_pen_textgenerationwebui') {
numSteps = 14
}
if (sliderID == 'max_context') {
numSteps = 15
}
if (sliderID == 'rep_pen_range_textgenerationwebui') {
numSteps = 16
}
if (sliderID == 'mirostat_tau_textgenerationwebui' ||
sliderID == 'top_k_textgenerationwebui' ||
sliderID == 'num_beams_textgenerationwebui' ||
sliderID == 'no_repeat_ngram_size_textgenerationwebui' ||
sliderID == 'epsilon_cutoff_textgenerationwebui' ||
sliderID == 'tfs_textgenerationwebui' ||
sliderID == 'min_p_textgenerationwebui' ||
sliderID == 'temp_textgenerationwebui' ||
sliderID == 'temp') {
numSteps = 20
}
if (sliderID == 'mirostat_eta_textgenerationwebui' ||
sliderID == 'penalty_alpha_textgenerationwebui' ||
sliderID == 'length_penalty_textgenerationwebui') {
numSteps = 50
}
//customize off values
if (sliderID == 'presence_pen_textgenerationwebui' ||
sliderID == 'freq_pen_textgenerationwebui' ||
sliderID == 'mirostat_mode_textgenerationwebui' ||
sliderID == 'mirostat_mode_kobold' ||
sliderID == 'mirostat_tau_textgenerationwebui' ||
sliderID == 'mirostat_tau_kobold' ||
sliderID == 'mirostat_eta_textgenerationwebui' ||
sliderID == 'mirostat_eta_kobold' ||
sliderID == 'min_p_textgenerationwebui' ||
sliderID == 'min_p' ||
sliderID == 'no_repeat_ngram_size_textgenerationwebui' ||
sliderID == 'penalty_alpha_textgenerationwebui' ||
sliderID == 'length_penalty_textgenerationwebui' ||
sliderID == 'epsilon_cutoff_textgenerationwebui' ||
sliderID == 'rep_pen_range_textgenerationwebui' ||
sliderID == 'rep_pen_range' ||
sliderID == 'eta_cutoff_textgenerationwebui' ||
sliderID == 'top_a_textgenerationwebui' ||
sliderID == 'top_a' ||
sliderID == 'top_k_textgenerationwebui' ||
sliderID == 'top_k' ||
sliderID == 'rep_pen_slope' ||
sliderID == 'min_length_textgenerationwebui') {
offVal = 0
}
if (sliderID == 'rep_pen_textgenerationwebui' ||
sliderID == 'rep_pen' ||
sliderID == 'tfs_textgenerationwebui' ||
sliderID == 'tfs' ||
sliderID == 'top_p_textgenerationwebui' ||
sliderID == 'top_p' ||
sliderID == 'num_beams_textgenerationwebui' ||
sliderID == 'typical_p_textgenerationwebui' ||
sliderID == 'typical_p' ||
sliderID == 'encoder_rep_pen_textgenerationwebui' ||
sliderID == 'temp_textgenerationwebui' ||
sliderID == 'temp' ||
sliderID == 'guidance_scale_textgenerationwebui' ||
sliderID == 'guidance_scale') {
offVal = 1
}
if (sliderID == 'guidance_scale_textgenerationwebui') {
numSteps = 78
}
//customize amt gen steps
if (sliderID !== 'amount_gen') {
var stepScale = sliderRange / numSteps
}
var newSlider = $("<div>")
.attr('id', `${sliderID}_zenslider`)
.css("width", "100%")
.insertBefore(originalSlider);
newSlider.slider({
value: sliderValue,
step: stepScale,
min: sliderMin,
max: sliderMax,
create: function () {
var handle = $(this).find(".ui-slider-handle");
//handling creaetion of amt_gen
if (newSlider.attr('id') == 'amount_gen_zenslider') {
var handleText = steps[sliderValue]
var stepNumber = sliderValue
var leftMargin = ((stepNumber) / numSteps) * 50 * -1
handle.text(handleText)
.css('margin-left', `${leftMargin}px`)
//console.log(`initial value:${handleText}, stepNum:${stepNumber}, numSteps:${numSteps}, left-margin:${leftMargin}`)
} else {
//handling creation for all other sliders
var numVal = Number(sliderValue).toFixed(decimals)
offVal = Number(offVal).toFixed(decimals)
//console.log(`${sliderID}: offVal ${offVal}`)
if (numVal === offVal) {
handle.text('Off').css('color', 'rgba(128,128,128,0.5');
} else {
handle.text(numVal).css('color', '');
}
var stepNumber = ((sliderValue - sliderMin) / stepScale)
var leftMargin = (stepNumber / numSteps) * 50 * -1
var isManualInput = false
var valueBeforeManualInput
handle.css('margin-left', `${leftMargin}px`)
.attr('contenteditable', 'true')
.on('click', function () {
//this just selects all the text in the handle so user can overwrite easily
//needed because JQUery UI uses left/right arrow keys as well as home/end to move the slider..
valueBeforeManualInput = newSlider.val()
console.log(valueBeforeManualInput)
let handleElement = handle.get(0);
let range = document.createRange();
range.selectNodeContents(handleElement);
let selection = window.getSelection();
selection.removeAllRanges();
selection.addRange(range);
})
.on('keyup', function () {
valueBeforeManualInput = newSlider.val()
console.log(valueBeforeManualInput)
isManualInput = true
})
//trigger slider changes when user clicks away
.on('mouseup blur', function () {
let manualInput = parseFloat(handle.text()).toFixed(decimals)
if (isManualInput) {
//disallow manual inputs outside acceptable range
if (manualInput >= sliderMin && manualInput <= sliderMax) {
//if value is ok, assign to slider and update handle text and position
newSlider.val(manualInput)
handleSlideEvent.call(newSlider, null, { value: parseFloat(manualInput) }, 'manual');
valueBeforeManualInput = manualInput
} else {
//if value not ok, warn and reset to last known valid value
toastr.warning(`Invalid value. Must be between ${sliderMin} and ${sliderMax}`)
console.log(valueBeforeManualInput)
newSlider.val(valueBeforeManualInput)
handle.text(valueBeforeManualInput)
}
}
isManualInput = false
})
console.debug(sliderID, sliderValue, handleText, stepNumber, stepScale)
}
},
slide: handleSlideEvent
});
function handleSlideEvent(event, ui, type) {
var handle = $(this).find(".ui-slider-handle");
var numVal = Number(ui.value).toFixed(decimals);
offVal = Number(offVal).toFixed(decimals);
//console.log(numVal, sliderMin, sliderMax, numVal > sliderMax, numVal < sliderMin)
if (numVal > sliderMax) {
//console.log('clamping numVal to sliderMax')
numVal = sliderMax
}
if (numVal < sliderMin) {
//console.log('clamping numVal to sliderMin')
numVal = sliderMin
}
var sliderValRange = sliderMax - sliderMin
var stepNumber = ((ui.value - sliderMin) / stepScale).toFixed(0);
var handleText = (ui.value);
var leftMargin = (stepNumber / numSteps) * 50 * -1;
var percentOfMax = Number((ui.value / sliderMax)) //what % our value is of the max
var perStepPercent = 1 / numSteps //how far in % each step should be on the slider
var leftPos = newSlider.width() * (stepNumber * perStepPercent) //how big of a left margin to give the slider for manual inputs
/* console.log(`
numVal: ${numVal},
sliderMax: ${sliderMax}
sliderMin: ${sliderMin}
sliderValRange: ${sliderValRange}
stepScale: ${stepScale}
Step: ${stepNumber} of ${numSteps}
offVal: ${offVal}
initial value: ${handleText}
left-margin: ${leftMargin}
width: ${newSlider.width()}
percent of max: ${percentOfMax}
left: ${leftPos}`) */
//special handling for response length slider, pulls text aliases for step values from an array
if (newSlider.attr('id') == 'amount_gen_zenslider') {
handleText = steps[stepNumber]
handle.text(handleText);
newSlider.val(stepNumber)
}
//everything else uses the flat slider value
else {
//show 'off' if disabled value is set
if (numVal === offVal) { handle.text('Off').css('color', 'rgba(128,128,128,0.5'); }
else { handle.text(ui.value.toFixed(decimals)).css('color', ''); }
newSlider.val(handleText)
}
//for manually typed-in values we must adjust left position because JQUI doesn't do it for us
//if (type === 'manual') {
handle.css('left', leftPos)
//}
//adjust a negative left margin to avoid overflowing right side of slider body
handle.css('margin-left', `${leftMargin}px`);
originalSlider.val(handleText);
originalSlider.trigger('input');
originalSlider.trigger('change');
}
originalSlider.data("newSlider", newSlider);
await delay(1)
originalSlider.hide();
};
function switchUiMode() {
const fastUi = localStorage.getItem(storage_keys.fast_ui_mode);
power_user.fast_ui_mode = fastUi === null ? true : fastUi == "true";
@@ -739,6 +857,7 @@ function noShadows() {
function applyAvatarStyle() {
power_user.avatar_style = Number(localStorage.getItem(storage_keys.avatar_style) ?? avatar_styles.ROUND);
$("body").toggleClass("big-avatars", power_user.avatar_style === avatar_styles.RECTANGULAR);
$("body").toggleClass("square-avatars", power_user.avatar_style === avatar_styles.SQUARE);
$("#avatar_style").val(power_user.avatar_style).prop("selected", true);
//$(`input[name="avatar_style"][value="${power_user.avatar_style}"]`).prop("checked", true);
@@ -800,6 +919,11 @@ function applyChatWidth(type) {
async function applyThemeColor(type) {
if (type === 'main') {
document.documentElement.style.setProperty('--SmartThemeBodyColor', power_user.main_text_color);
const color = power_user.main_text_color.split('(')[1].split(')')[0].split(',');
document.documentElement.style.setProperty('--SmartThemeCheckboxBgColorR', color[0]);
document.documentElement.style.setProperty('--SmartThemeCheckboxBgColorG', color[1]);
document.documentElement.style.setProperty('--SmartThemeCheckboxBgColorB', color[2]);
document.documentElement.style.setProperty('--SmartThemeCheckboxBgColorA', color[3]);
}
if (type === 'italics') {
document.documentElement.style.setProperty('--SmartThemeEmColor', power_user.italics_text_color);
@@ -1255,6 +1379,10 @@ function loadPowerUserSettings(settings, data) {
$(`#chat_display option[value=${power_user.chat_display}]`).attr("selected", true).trigger('change');
$('#chat_width_slider').val(power_user.chat_width);
$("#token_padding").val(power_user.token_padding);
$("#aux_field").val(power_user.aux_field);
$("#chat_truncation").val(power_user.chat_truncation);
$('#chat_truncation_counter').val(power_user.chat_truncation);
$("#font_scale").val(power_user.font_scale);
$("#font_scale_counter").val(power_user.font_scale);
@@ -1355,16 +1483,17 @@ function loadMaxContextUnlocked() {
}
function switchMaxContextSize() {
const elements = [$('#max_context'), $('#rep_pen_range'), $('#rep_pen_range_textgenerationwebui')];
const elements = [$('#max_context'), $('#max_context_counter'), $('#rep_pen_range'), $('#rep_pen_range_textgenerationwebui')];
const maxValue = power_user.max_context_unlocked ? MAX_CONTEXT_UNLOCKED : MAX_CONTEXT_DEFAULT;
const minValue = power_user.max_context_unlocked ? maxContextMin : maxContextMin;
const steps = power_user.max_context_unlocked ? unlockedMaxContextStep : maxContextStep;
for (const element of elements) {
const id = element.attr('id');
element.attr('max', maxValue);
element.attr('step', steps);
if (element.attr('id') == 'max_context') {
if (typeof id === 'string' && id?.indexOf('max_context') !== -1) {
element.attr('min', minValue);
}
const value = Number(element.val());
@@ -1373,6 +1502,10 @@ function switchMaxContextSize() {
element.val(maxValue).trigger('input');
}
}
if (power_user.enableZenSliders) {
$("#max_context_zenslider").remove()
CreateZenSliders($("#max_context"))
}
}
// Fetch a compiled object of all preset settings
@@ -1550,7 +1683,7 @@ export function fuzzySearchWorldInfo(data, searchValue) {
export function fuzzySearchTags(searchValue) {
const fuse = new Fuse(tags, {
keys: [
{ name: 'name', weight: 1},
{ name: 'name', weight: 1 },
],
includeScore: true,
ignoreLocation: true,
@@ -2199,6 +2332,71 @@ function setAvgBG() {
}
async function setThemeCallback(_, text) {
const fuse = new Fuse(themes, {
keys: [
{ name: 'name', weight: 1 },
],
});
const results = fuse.search(text);
console.debug('Theme fuzzy search results for ' + text, results);
const theme = results[0]?.item;
if (!theme) {
toastr.warning(`Could not find theme with name: ${text}`);
return;
}
power_user.theme = theme.name;
applyTheme(theme.name);
$("#themes").val(theme.name);
saveSettingsDebounced();
}
async function setmovingUIPreset(_, text) {
const fuse = new Fuse(movingUIPresets, {
keys: [
{ name: 'name', weight: 1 },
],
});
const results = fuse.search(text);
console.debug('movingUI preset fuzzy search results for ' + text, results);
const preset = results[0]?.item;
if (!preset) {
toastr.warning(`Could not find preset with name: ${text}`);
return;
}
power_user.movingUIPreset = preset.name;
applyMovingUIPreset(preset.name);
$("#movingUIPresets").val(preset.name);
saveSettingsDebounced();
}
const EPHEMERAL_STOPPING_STRINGS = [];
/**
* Adds a stopping string to the list of stopping strings that are only used for the next generation.
* @param {string} value The stopping string to add
*/
export function addEphemeralStoppingString(value) {
if (!EPHEMERAL_STOPPING_STRINGS.includes(value)) {
console.debug('Adding ephemeral stopping string:', value);
EPHEMERAL_STOPPING_STRINGS.push(value);
}
}
export function flushEphemeralStoppingStrings() {
if (EPHEMERAL_STOPPING_STRINGS.length === 0) {
return;
}
console.debug('Flushing ephemeral stopping strings:', EPHEMERAL_STOPPING_STRINGS);
EPHEMERAL_STOPPING_STRINGS.splice(0, EPHEMERAL_STOPPING_STRINGS.length);
}
/**
* Gets the custom stopping strings from the power user settings.
@@ -2206,39 +2404,47 @@ function setAvgBG() {
* @returns {string[]} An array of custom stopping strings
*/
export function getCustomStoppingStrings(limit = undefined) {
try {
// If there's no custom stopping strings, return an empty array
if (!power_user.custom_stopping_strings) {
function getPermanent() {
try {
// If there's no custom stopping strings, return an empty array
if (!power_user.custom_stopping_strings) {
return [];
}
// Parse the JSON string
let strings = JSON.parse(power_user.custom_stopping_strings);
// Make sure it's an array
if (!Array.isArray(strings)) {
return [];
}
// Make sure all the elements are strings and non-empty.
strings = strings.filter(s => typeof s === 'string' && s.length > 0);
// Substitute params if necessary
if (power_user.custom_stopping_strings_macro) {
strings = strings.map(x => substituteParams(x));
}
return strings;
} catch (error) {
// If there's an error, return an empty array
console.warn('Error parsing custom stopping strings:', error);
return [];
}
// Parse the JSON string
let strings = JSON.parse(power_user.custom_stopping_strings);
// Make sure it's an array
if (!Array.isArray(strings)) {
return [];
}
// Make sure all the elements are strings and non-empty.
strings = strings.filter(s => typeof s === 'string' && s.length > 0);
// Substitute params if necessary
if (power_user.custom_stopping_strings_macro) {
strings = strings.map(x => substituteParams(x));
}
// Apply the limit. If limit is 0, return all strings.
if (limit > 0) {
strings = strings.slice(0, limit);
}
return strings;
} catch (error) {
// If there's an error, return an empty array
console.warn('Error parsing custom stopping strings:', error);
return [];
}
const permanent = getPermanent();
const ephemeral = EPHEMERAL_STOPPING_STRINGS;
const strings = [...permanent, ...ephemeral];
// Apply the limit. If limit is 0, return all strings.
if (limit > 0) {
return strings.slice(0, limit);
}
return strings;
}
$(document).ready(() => {
@@ -2416,6 +2622,12 @@ $(document).ready(() => {
setHotswapsDebounced();
});
$('#chat_truncation').on('input', function () {
power_user.chat_truncation = Number($('#chat_truncation').val());
$('#chat_truncation_counter').val(power_user.chat_truncation);
saveSettingsDebounced();
});
$(`input[name="font_scale"]`).on('input', async function (e) {
power_user.font_scale = Number(e.target.value);
$("#font_scale_counter").val(power_user.font_scale);
@@ -2690,28 +2902,31 @@ $(document).ready(() => {
});
$("#enableZenSliders").on("input", function () {
if (power_user.enableLabMode) {
const value = !!$(this).prop('checked');
if (power_user.enableLabMode === true && value === true) {
//disallow zenSliders while Lab Mode is active
toastr.warning('ZenSliders not allowed in Mad Lab Mode')
$(this).prop('checked', false);
toastr.warning('Disable Mad Lab Mode before enabling Zen Sliders')
$(this).prop('checked', false).trigger('input');
return
}
const value = !!$(this).prop('checked');
power_user.enableZenSliders = value;
localStorage.setItem(storage_keys.enableZenSliders, Boolean(power_user.enableZenSliders));
saveSettingsDebounced();
switchZenSliders();
});
$("#enableLabMode").on("input", function () {
if (power_user.enableZenSliders) {
const value = !!$(this).prop('checked');
if (power_user.enableZenSliders === true && value === true) {
//disallow Lab Mode if ZenSliders are active
toastr.warning('Mad Lab Mode not allowed while ZenSliders are active')
$(this).prop('checked', false);
toastr.warning('Disable Zen Sliders before enabling Mad Lab Mode')
$(this).prop('checked', false).trigger('input');;
return
}
const value = !!$(this).prop('checked');
power_user.enableLabMode = value;
localStorage.setItem(storage_keys.enableLabMode, Boolean(power_user.enableLabMode));
saveSettingsDebounced();
switchLabMode();
});
@@ -2825,13 +3040,20 @@ $(document).ready(() => {
switchSimpleMode();
});
$('#bogus_folders').on('input', function() {
$('#bogus_folders').on('input', function () {
const value = !!$(this).prop('checked');
power_user.bogus_folders = value;
saveSettingsDebounced();
printCharacters(true);
});
$('#aux_field').on('change', function () {
const value = $(this).find(':selected').val();
power_user.aux_field = String(value);
saveSettingsDebounced();
printCharacters(false);
});
$(document).on('click', '#debug_table [data-debug-function]', function () {
const functionId = $(this).data('debug-function');
const functionRecord = debug_functions.find(f => f.functionId === functionId);
@@ -2858,4 +3080,6 @@ $(document).ready(() => {
registerSlashCommand('cut', doMesCut, [], '<span class="monospace">(number or range)</span> cuts the specified message or continuous chunk from the chat, e.g. <tt>/cut 0-10</tt>. Ranges are inclusive!', true, true);
registerSlashCommand('resetpanels', doResetPanels, ['resetui'], ' resets UI panels to original state.', true, true);
registerSlashCommand('bgcol', setAvgBG, [], ' WIP test of auto-bg avg coloring', true, true);
registerSlashCommand('theme', setThemeCallback, [], '<span class="monospace">(name)</span> sets a UI theme by name', true, true);
registerSlashCommand('movingui', setmovingUIPreset, [], '<span class="monospace">(name)</span> activates a movingUI preset by name', true, true);
});

View File

@@ -4,6 +4,7 @@ export const SECRET_KEYS = {
HORDE: 'api_key_horde',
MANCER: 'api_key_mancer',
APHRODITE: 'api_key_aphrodite',
TABBY: 'api_key_tabby',
OPENAI: 'api_key_openai',
NOVEL: 'api_key_novel',
CLAUDE: 'api_key_claude',
@@ -12,6 +13,7 @@ export const SECRET_KEYS = {
AI21: 'api_key_ai21',
SCALE_COOKIE: 'scale_cookie',
PALM: 'api_key_palm',
SERPAPI: 'api_key_serpapi',
}
const INPUT_MAP = {
@@ -26,6 +28,7 @@ const INPUT_MAP = {
[SECRET_KEYS.SCALE_COOKIE]: '#scale_cookie',
[SECRET_KEYS.PALM]: '#api_key_palm',
[SECRET_KEYS.APHRODITE]: '#api_key_aphrodite',
[SECRET_KEYS.TABBY]: '#api_key_tabby'
}
async function clearSecret() {
@@ -52,7 +55,7 @@ async function viewSecrets() {
});
if (response.status == 403) {
callPopup('<h3>Forbidden</h3><p>To view your API keys here, set the value of allowKeysExposure to true in config.conf file and restart the SillyTavern server.</p>', 'text');
callPopup('<h3>Forbidden</h3><p>To view your API keys here, set the value of allowKeysExposure to true in config.yaml file and restart the SillyTavern server.</p>', 'text');
return;
}
@@ -113,6 +116,23 @@ export async function readSecretState() {
}
}
export async function findSecret(key) {
try {
const response = await fetch('/api/secrets/find', {
method: 'POST',
headers: getRequestHeaders(),
body: JSON.stringify({ key }),
});
if (response.ok) {
const data = await response.json();
return data.value
}
} catch {
console.error('Could not find secret value: ', key);
}
}
function authorizeOpenRouter() {
const openRouterUrl = `https://openrouter.ai/auth?callback_url=${encodeURIComponent(location.origin)}`;
location.href = openRouterUrl;

View File

@@ -24,14 +24,22 @@ import {
Generate,
this_chid,
setCharacterName,
generateRaw,
callPopup,
deactivateSendButtons,
activateSendButtons,
main_api,
} from "../script.js";
import { getMessageTimeStamp } from "./RossAscends-mods.js";
import { groups, is_group_generating, resetSelectedGroup, selected_group } from "./group-chats.js";
import { findGroupMemberId, groups, is_group_generating, resetSelectedGroup, saveGroupChat, selected_group } from "./group-chats.js";
import { getRegexedString, regex_placement } from "./extensions/regex/engine.js";
import { chat_styles, power_user } from "./power-user.js";
import { addEphemeralStoppingString, chat_styles, flushEphemeralStoppingStrings, power_user } from "./power-user.js";
import { autoSelectPersona } from "./personas.js";
import { getContext } from "./extensions.js";
import { hideChatMessage, unhideChatMessage } from "./chats.js";
import { delay, isFalseBoolean, isTrueBoolean, stringToRange, trimToEndSentence, trimToStartSentence } from "./utils.js";
import { registerVariableCommands, resolveVariable } from "./variables.js";
import { decodeTextTokens, getFriendlyTokenizerName, getTextTokens, getTokenCount } from "./tokenizers.js";
export {
executeSlashCommands,
registerSlashCommand,
@@ -76,21 +84,22 @@ class SlashCommandParser {
let unnamedArg;
if (args.length > 0) {
const argsArray = args.split(' ');
for (let arg of argsArray) {
const equalsIndex = arg.indexOf('=');
if (equalsIndex !== -1) {
const key = arg.substring(0, equalsIndex);
const value = arg.substring(equalsIndex + 1);
// Replace "wrapping quotes" used for escaping spaces
argObj[key] = value.replace(/(^")|("$)/g, '');
}
else {
break;
}
// Match named arguments
const namedArgPattern = /(\w+)=("(?:\\.|[^"\\])*"|\S+)/g;
let match;
while ((match = namedArgPattern.exec(args)) !== null) {
const key = match[1];
const value = match[2];
// Remove the quotes around the value, if any
argObj[key] = value.replace(/(^")|("$)/g, '');
}
unnamedArg = argsArray.slice(Object.keys(argObj).length).join(' ');
// Match unnamed argument
const unnamedArgPattern = /(?:\w+=(?:"(?:\\.|[^"\\])*"|\S+)\s*)*(.*)/s;
match = unnamedArgPattern.exec(args);
if (match !== null) {
unnamedArg = match[1].trim();
}
// Excluded commands format in their own function
if (!excludedFromRegex.includes(command)) {
@@ -130,8 +139,8 @@ parser.addCommand('?', helpCommandCallback, ['help'], ' get help on macros,
parser.addCommand('name', setNameCallback, ['persona'], '<span class="monospace">(name)</span> sets user name and persona avatar (if set)', true, true);
parser.addCommand('sync', syncCallback, [], ' syncs user name in user-attributed messages in the current chat', true, true);
parser.addCommand('lock', bindCallback, ['bind'], ' locks/unlocks a persona (name and avatar) to the current chat', true, true);
parser.addCommand('bg', setBackgroundCallback, ['background'], '<span class="monospace">(filename)</span> sets a background according to filename, partial names allowed, will set the first one alphabetically if multiple files begin with the provided argument string', false, true);
parser.addCommand('sendas', sendMessageAs, [], ` sends message as a specific character. Uses character avatar if it exists in the characters list. Example that will send "Hello, guys!" from "Chloe": <pre><code>/sendas Chloe&#10;Hello, guys!</code></pre>`, true, true);
parser.addCommand('bg', setBackgroundCallback, ['background'], '<span class="monospace">(filename)</span> sets a background according to filename, partial names allowed', false, true);
parser.addCommand('sendas', sendMessageAs, [], ` sends message as a specific character. Uses character avatar if it exists in the characters list. Example that will send "Hello, guys!" from "Chloe": <tt>/sendas name="Chloe" Hello, guys!</tt>`, true, true);
parser.addCommand('sys', sendNarratorMessage, ['nar'], '<span class="monospace">(text)</span> sends message as a system narrator', false, true);
parser.addCommand('sysname', setNarratorName, [], '<span class="monospace">(name)</span> sets a name for future system narrator messages in this chat (display only). Default: System. Leave empty to reset.', true, true);
parser.addCommand('comment', sendCommentMessage, [], '<span class="monospace">(text)</span> adds a note/comment message not part of the chat', false, true);
@@ -145,16 +154,431 @@ parser.addCommand('ask', askCharacter, [], '<span class="monospace">(prompt)</sp
parser.addCommand('delname', deleteMessagesByNameCallback, ['cancel'], '<span class="monospace">(name)</span> deletes all messages attributed to a specified name', true, true);
parser.addCommand('send', sendUserMessageCallback, ['add'], '<span class="monospace">(text)</span> adds a user message to the chat log without triggering a generation', true, true);
parser.addCommand('trigger', triggerGroupMessageCallback, [], '<span class="monospace">(member index or name)</span> triggers a message generation for the specified group member', true, true);
parser.addCommand('hide', hideMessageCallback, [], '<span class="monospace">(message index)</span> hides a chat message from the prompt', true, true);
parser.addCommand('unhide', unhideMessageCallback, [], '<span class="monospace">(message index)</span> unhides a message from the prompt', true, true);
parser.addCommand('hide', hideMessageCallback, [], '<span class="monospace">(message index or range)</span> hides a chat message from the prompt', true, true);
parser.addCommand('unhide', unhideMessageCallback, [], '<span class="monospace">(message index or range)</span> unhides a message from the prompt', true, true);
parser.addCommand('disable', disableGroupMemberCallback, [], '<span class="monospace">(member index or name)</span> disables a group member from being drafted for replies', true, true);
parser.addCommand('enable', enableGroupMemberCallback, [], '<span class="monospace">(member index or name)</span> enables a group member to be drafted for replies', true, true);
parser.addCommand('memberadd', addGroupMemberCallback, ['addmember'], '<span class="monospace">(character name)</span> adds a new group member to the group chat', true, true);
parser.addCommand('memberremove', removeGroupMemberCallback, ['removemember'], '<span class="monospace">(member index or name)</span> removes a group member from the group chat', true, true);
parser.addCommand('memberup', moveGroupMemberUpCallback, ['upmember'], '<span class="monospace">(member index or name)</span> moves a group member up in the group chat list', true, true);
parser.addCommand('memberdown', moveGroupMemberDownCallback, ['downmember'], '<span class="monospace">(member index or name)</span> moves a group member down in the group chat list', true, true);
parser.addCommand('peek', peekCallback, [], '<span class="monospace">(message index or range)</span> shows a group member character card without switching chats', true, true);
parser.addCommand('delswipe', deleteSwipeCallback, ['swipedel'], '<span class="monospace">(optional 1-based id)</span> deletes a swipe from the last chat message. If swipe id not provided - deletes the current swipe.', true, true);
parser.addCommand('echo', echoCallback, [], '<span class="monospace">(text)</span> echoes the text to toast message. Useful for pipes debugging.', true, true);
parser.addCommand('gen', generateCallback, [], '<span class="monospace">(lock=on/off [prompt])</span> generates text using the provided prompt and passes it to the next command through the pipe, optionally locking user input while generating.', true, true);
parser.addCommand('genraw', generateRawCallback, [], '<span class="monospace">(lock=on/off [prompt])</span> generates text using the provided prompt and passes it to the next command through the pipe, optionally locking user input while generating. Does not include chat history or character card. Use instruct=off to skip instruct formatting, e.g. <tt>/genraw instruct=off Why is the sky blue?</tt>. Use stop=... with a JSON-serialized array to add one-time custom stop strings, e.g. <tt>/genraw stop=["\\n"] Say hi</tt>', true, true);
parser.addCommand('addswipe', addSwipeCallback, ['swipeadd'], '<span class="monospace">(text)</span> adds a swipe to the last chat message.', true, true);
parser.addCommand('abort', abortCallback, [], ' aborts the slash command batch execution', true, true);
parser.addCommand('fuzzy', fuzzyCallback, [], 'list=["a","b","c"] (search value) performs a fuzzy match of the provided search using the provided list of value and passes the closest match to the next command through the pipe.', true, true);
parser.addCommand('pass', (_, arg) => arg, ['return'], '<span class="monospace">(text)</span> passes the text to the next command through the pipe.', true, true);
parser.addCommand('delay', delayCallback, ['wait', 'sleep'], '<span class="monospace">(milliseconds)</span> delays the next command in the pipe by the specified number of milliseconds.', true, true);
parser.addCommand('input', inputCallback, ['prompt'], '<span class="monospace">(prompt)</span> shows a popup with the provided prompt and passes the user input to the next command through the pipe.', true, true);
parser.addCommand('run', runCallback, ['call', 'exec'], '<span class="monospace">(QR label)</span> runs a Quick Reply with the specified name from the current preset.', true, true);
parser.addCommand('messages', getMessagesCallback, ['message'], '<span class="monospace">(names=off/on [message index or range])</span> returns the specified message or range of messages as a string.', true, true);
parser.addCommand('setinput', setInputCallback, [], '<span class="monospace">(text)</span> sets the user input to the specified text and passes it to the next command through the pipe.', true, true);
parser.addCommand('popup', popupCallback, [], '<span class="monospace">(text)</span> shows a blocking popup with the specified text.', true, true);
parser.addCommand('buttons', buttonsCallback, [], '<span class="monospace">labels=["a","b"] (text)</span> shows a blocking popup with the specified text and buttons. Returns the clicked button label into the pipe or empty string if canceled.', true, true);
parser.addCommand('trimtokens', trimTokensCallback, [], '<span class="monospace">limit=number (direction=start/end [text])</span> trims the start or end of text to the specified number of tokens.', true, true);
parser.addCommand('trimstart', trimStartCallback, [], '<span class="monospace">(text)</span> trims the text to the start of the first full sentence.', true, true);
parser.addCommand('trimend', trimEndCallback, [], '<span class="monospace">(text)</span> trims the text to the end of the last full sentence.', true, true);
registerVariableCommands();
const NARRATOR_NAME_KEY = 'narrator_name';
const NARRATOR_NAME_DEFAULT = 'System';
export const COMMENT_NAME_DEFAULT = 'Note';
function setInputCallback(_, value) {
$('#send_textarea').val(value || '').trigger('input');
return value;
}
function trimStartCallback(_, value) {
if (!value) {
return '';
}
return trimToStartSentence(value);
}
function trimEndCallback(_, value) {
if (!value) {
return '';
}
return trimToEndSentence(value);
}
function trimTokensCallback(arg, value) {
if (!value) {
console.warn('WARN: No argument provided for /trimtokens command');
return '';
}
const limit = Number(resolveVariable(arg.limit));
if (isNaN(limit)) {
console.warn(`WARN: Invalid limit provided for /trimtokens command: ${limit}`);
return value;
}
if (limit <= 0) {
return '';
}
const direction = arg.direction || 'end';
const tokenCount = getTokenCount(value)
// Token count is less than the limit, do nothing
if (tokenCount <= limit) {
return value;
}
const { tokenizerName, tokenizerId } = getFriendlyTokenizerName(main_api);
console.debug('Requesting tokenization for /trimtokens command', tokenizerName);
try {
const textTokens = getTextTokens(tokenizerId, value);
if (!Array.isArray(textTokens) || !textTokens.length) {
console.warn('WARN: No tokens returned for /trimtokens command, falling back to estimation');
const percentage = limit / tokenCount;
const trimIndex = Math.floor(value.length * percentage);
const trimmedText = direction === 'start' ? value.substring(trimIndex) : value.substring(0, value.length - trimIndex);
return trimmedText;
}
const sliceTokens = direction === 'start' ? textTokens.slice(0, limit) : textTokens.slice(-limit);
const decodedText = decodeTextTokens(tokenizerId, sliceTokens);
return decodedText;
} catch (error) {
console.warn('WARN: Tokenization failed for /trimtokens command, returning original', error);
return value;
}
}
async function buttonsCallback(args, text) {
try {
const buttons = JSON.parse(resolveVariable(args?.labels));
if (!Array.isArray(buttons) || !buttons.length) {
console.warn('WARN: Invalid labels provided for /buttons command');
return '';
}
return new Promise(async (resolve) => {
const safeValue = DOMPurify.sanitize(text || '');
const buttonContainer = document.createElement('div');
buttonContainer.classList.add('flex-container', 'flexFlowColumn', 'wide100p', 'm-t-1');
for (const button of buttons) {
const buttonElement = document.createElement('div');
buttonElement.classList.add('menu_button', 'wide100p');
buttonElement.addEventListener('click', () => {
resolve(button);
$('#dialogue_popup_ok').trigger('click');
});
buttonElement.innerText = button;
buttonContainer.appendChild(buttonElement);
}
const popupContainer = document.createElement('div');
popupContainer.innerHTML = safeValue;
popupContainer.appendChild(buttonContainer);
callPopup(popupContainer, 'text', '', { okButton: 'Cancel' })
.then(() => resolve(''))
.catch(() => resolve(''));
})
} catch {
return '';
}
}
async function popupCallback(_, value) {
const safeValue = DOMPurify.sanitize(value || '');
await delay(1);
await callPopup(safeValue, 'text', '');
await delay(1);
return value;
}
function getMessagesCallback(args, value) {
const includeNames = !isFalseBoolean(args?.names);
const range = stringToRange(value, 0, chat.length - 1);
if (!range) {
console.warn(`WARN: Invalid range provided for /getmessages command: ${value}`);
return '';
}
const messages = [];
for (let messageId = range.start; messageId <= range.end; messageId++) {
const message = chat[messageId];
if (!message) {
console.warn(`WARN: No message found with ID ${messageId}`);
continue;
}
if (message.is_system) {
continue;
}
if (includeNames) {
messages.push(`${message.name}: ${message.mes}`);
} else {
messages.push(message.mes);
}
}
return messages.join('\n\n');
}
async function runCallback(_, name) {
if (!name) {
toastr.warning('No name provided for /run command');
return '';
}
if (typeof window['executeQuickReplyByName'] !== 'function') {
toastr.warning('Quick Reply extension is not loaded');
return '';
}
try {
name = name.trim();
return await window['executeQuickReplyByName'](name);
} catch (error) {
toastr.error(`Error running Quick Reply "${name}": ${error.message}`, 'Error');
return '';
}
}
function abortCallback() {
$('#send_textarea').val('').trigger('input');
throw new Error('/abort command executed');
}
async function delayCallback(_, amount) {
if (!amount) {
console.warn('WARN: No amount provided for /delay command');
return;
}
amount = Number(amount);
if (isNaN(amount)) {
amount = 0;
}
await delay(amount);
}
async function inputCallback(_, prompt) {
// Do not remove this delay, otherwise the prompt will not show up
await delay(1);
const safeValue = DOMPurify.sanitize(prompt || '');
const result = await callPopup(safeValue, 'input', '', { okButton: 'Ok' });
await delay(1);
return result || '';
}
function fuzzyCallback(args, value) {
if (!value) {
console.warn('WARN: No argument provided for /fuzzy command');
return '';
}
if (!args.list) {
console.warn('WARN: No list argument provided for /fuzzy command');
return '';
}
try {
const list = JSON.parse(resolveVariable(args.list));
if (!Array.isArray(list)) {
console.warn('WARN: Invalid list argument provided for /fuzzy command');
return '';
}
const fuse = new Fuse(list, {
includeScore: true,
findAllMatches: true,
ignoreLocation: true,
threshold: 0.7,
});
const result = fuse.search(value);
return result[0]?.item;
} catch {
console.warn('WARN: Invalid list argument provided for /fuzzy command');
return '';
}
}
function setEphemeralStopStrings(value) {
if (typeof value === 'string' && value.length) {
try {
const stopStrings = JSON.parse(value);
if (Array.isArray(stopStrings)) {
for (const stopString of stopStrings) {
addEphemeralStoppingString(stopString);
}
}
} catch {
// Do nothing
}
}
}
async function generateRawCallback(args, value) {
if (!value) {
console.warn('WARN: No argument provided for /genraw command');
return;
}
// Prevent generate recursion
$('#send_textarea').val('').trigger('input');
const lock = isTrueBoolean(args?.lock);
try {
if (lock) {
deactivateSendButtons();
}
setEphemeralStopStrings(resolveVariable(args?.stop));
const result = await generateRaw(value, '', isFalseBoolean(args?.instruct));
return result;
} finally {
if (lock) {
activateSendButtons();
}
flushEphemeralStoppingStrings();
}
}
async function generateCallback(args, value) {
if (!value) {
console.warn('WARN: No argument provided for /gen command');
return;
}
// Prevent generate recursion
$('#send_textarea').val('').trigger('input');
const lock = isTrueBoolean(args?.lock);
try {
if (lock) {
deactivateSendButtons();
}
setEphemeralStopStrings(resolveVariable(args?.stop));
const result = await generateQuietPrompt(value, false, false, '');
return result;
} finally {
if (lock) {
activateSendButtons();
}
flushEphemeralStoppingStrings();
}
}
async function echoCallback(_, arg) {
if (!String(arg)) {
console.warn('WARN: No argument provided for /echo command');
return;
}
toastr.info(String(arg));
return arg;
}
async function addSwipeCallback(_, arg) {
const lastMessage = chat[chat.length - 1];
if (!lastMessage) {
toastr.warning("No messages to add swipes to.");
return;
}
if (!arg) {
console.warn('WARN: No argument provided for /addswipe command');
return;
}
if (lastMessage.is_user) {
toastr.warning("Can't add swipes to user messages.");
return;
}
if (lastMessage.is_system) {
toastr.warning("Can't add swipes to system messages.");
return;
}
if (lastMessage.extra?.image) {
toastr.warning("Can't add swipes to message containing an image.");
return;
}
if (!Array.isArray(lastMessage.swipes)) {
lastMessage.swipes = [lastMessage.mes];
lastMessage.swipe_info = [{}];
lastMessage.swipe_id = 0;
}
lastMessage.swipes.push(arg);
lastMessage.swipe_info.push({
send_date: getMessageTimeStamp(),
gen_started: null,
gen_finished: null,
extra: {
bias: extractMessageBias(arg),
gen_id: Date.now(),
api: 'manual',
model: 'slash command',
}
});
await saveChatConditional();
await reloadCurrentChat();
}
async function deleteSwipeCallback(_, arg) {
const lastMessage = chat[chat.length - 1];
if (!lastMessage || !Array.isArray(lastMessage.swipes) || !lastMessage.swipes.length) {
toastr.warning("No messages to delete swipes from.");
return;
}
if (lastMessage.swipes.length <= 1) {
toastr.warning("Can't delete the last swipe.");
return;
}
const swipeId = arg && !isNaN(Number(arg)) ? (Number(arg) - 1) : lastMessage.swipe_id;
if (swipeId < 0 || swipeId >= lastMessage.swipes.length) {
toastr.warning(`Invalid swipe ID: ${swipeId + 1}`);
return;
}
lastMessage.swipes.splice(swipeId, 1);
if (Array.isArray(lastMessage.swipe_info) && lastMessage.swipe_info.length) {
lastMessage.swipe_info.splice(swipeId, 1);
}
const newSwipeId = Math.min(swipeId, lastMessage.swipes.length - 1);
lastMessage.swipe_id = newSwipeId;
lastMessage.mes = lastMessage.swipes[newSwipeId];
await saveChatConditional();
await reloadCurrentChat();
}
async function askCharacter(_, text) {
// Prevent generate recursion
$('#send_textarea').val('');
$('#send_textarea').val('').trigger('input');
// Not supported in group chats
// TODO: Maybe support group chats?
@@ -240,15 +664,23 @@ async function hideMessageCallback(_, arg) {
return;
}
const messageId = Number(arg);
const messageBlock = $(`.mes[mesid="${messageId}"]`);
const range = stringToRange(arg, 0, chat.length - 1);
if (!messageBlock.length) {
console.warn(`WARN: No message found with ID ${messageId}`);
if (!range) {
console.warn(`WARN: Invalid range provided for /hide command: ${arg}`);
return;
}
await hideChatMessage(messageId, messageBlock);
for (let messageId = range.start; messageId <= range.end; messageId++) {
const messageBlock = $(`.mes[mesid="${messageId}"]`);
if (!messageBlock.length) {
console.warn(`WARN: No message found with ID ${messageId}`);
return;
}
await hideChatMessage(messageId, messageBlock);
}
}
async function unhideMessageCallback(_, arg) {
@@ -257,20 +689,175 @@ async function unhideMessageCallback(_, arg) {
return;
}
const messageId = Number(arg);
const messageBlock = $(`.mes[mesid="${messageId}"]`);
const range = stringToRange(arg, 0, chat.length - 1);
if (!messageBlock.length) {
console.warn(`WARN: No message found with ID ${messageId}`);
if (!range) {
console.warn(`WARN: Invalid range provided for /unhide command: ${arg}`);
return;
}
await unhideChatMessage(messageId, messageBlock);
for (let messageId = range.start; messageId <= range.end; messageId++) {
const messageBlock = $(`.mes[mesid="${messageId}"]`);
if (!messageBlock.length) {
console.warn(`WARN: No message found with ID ${messageId}`);
return;
}
await unhideChatMessage(messageId, messageBlock);
}
}
async function disableGroupMemberCallback(_, arg) {
if (!selected_group) {
toastr.warning("Cannot run /disable command outside of a group chat.");
return;
}
const chid = findGroupMemberId(arg);
if (chid === undefined) {
console.warn(`WARN: No group member found for argument ${arg}`);
return;
}
$(`.group_member[chid="${chid}"] [data-action="disable"]`).trigger('click');
}
async function enableGroupMemberCallback(_, arg) {
if (!selected_group) {
toastr.warning("Cannot run /enable command outside of a group chat.");
return;
}
const chid = findGroupMemberId(arg);
if (chid === undefined) {
console.warn(`WARN: No group member found for argument ${arg}`);
return;
}
$(`.group_member[chid="${chid}"] [data-action="enable"]`).trigger('click');
}
async function moveGroupMemberUpCallback(_, arg) {
if (!selected_group) {
toastr.warning("Cannot run /memberup command outside of a group chat.");
return;
}
const chid = findGroupMemberId(arg);
if (chid === undefined) {
console.warn(`WARN: No group member found for argument ${arg}`);
return;
}
$(`.group_member[chid="${chid}"] [data-action="up"]`).trigger('click');
}
async function moveGroupMemberDownCallback(_, arg) {
if (!selected_group) {
toastr.warning("Cannot run /memberdown command outside of a group chat.");
return;
}
const chid = findGroupMemberId(arg);
if (chid === undefined) {
console.warn(`WARN: No group member found for argument ${arg}`);
return;
}
$(`.group_member[chid="${chid}"] [data-action="down"]`).trigger('click');
}
async function peekCallback(_, arg) {
if (!selected_group) {
toastr.warning("Cannot run /peek command outside of a group chat.");
return;
}
if (is_group_generating) {
toastr.warning("Cannot run /peek command while the group reply is generating.");
return;
}
const chid = findGroupMemberId(arg);
if (chid === undefined) {
console.warn(`WARN: No group member found for argument ${arg}`);
return;
}
$(`.group_member[chid="${chid}"] [data-action="view"]`).trigger('click');
}
async function removeGroupMemberCallback(_, arg) {
if (!selected_group) {
toastr.warning("Cannot run /memberremove command outside of a group chat.");
return;
}
if (is_group_generating) {
toastr.warning("Cannot run /memberremove command while the group reply is generating.");
return;
}
const chid = findGroupMemberId(arg);
if (chid === undefined) {
console.warn(`WARN: No group member found for argument ${arg}`);
return;
}
$(`.group_member[chid="${chid}"] [data-action="remove"]`).trigger('click');
}
async function addGroupMemberCallback(_, arg) {
if (!selected_group) {
toastr.warning("Cannot run /memberadd command outside of a group chat.");
return;
}
if (!arg) {
console.warn('WARN: No argument provided for /memberadd command');
return;
}
arg = arg.trim();
const chid = findCharacterIndex(arg);
if (chid === -1) {
console.warn(`WARN: No character found for argument ${arg}`);
return;
}
const character = characters[chid];
const group = groups.find(x => x.id === selected_group);
if (!group || !Array.isArray(group.members)) {
console.warn(`WARN: No group found for ID ${selected_group}`);
return;
}
const avatar = character.avatar;
if (group.members.includes(avatar)) {
toastr.warning(`${character.name} is already a member of this group.`);
return;
}
group.members.push(avatar);
await saveGroupChat(selected_group, true);
// Trigger to reload group UI
$('#rm_button_selected_ch').trigger('click');
}
async function triggerGroupMessageCallback(_, arg) {
if (!selected_group) {
toastr.warning("Cannot run trigger command outside of a group chat.");
toastr.warning("Cannot run /trigger command outside of a group chat.");
return;
}
@@ -279,65 +866,17 @@ async function triggerGroupMessageCallback(_, arg) {
return;
}
arg = arg?.trim();
if (!arg) {
console.warn('WARN: No argument provided for /trigger command');
return;
}
const group = groups.find(x => x.id == selected_group);
if (!group || !Array.isArray(group.members)) {
console.warn('WARN: No group found for selected group ID');
return;
}
// Prevent generate recursion
$('#send_textarea').val('');
$('#send_textarea').val('').trigger('input');
// Index is 1-based
const index = parseInt(arg) - 1;
const searchByName = isNaN(index);
const chid = findGroupMemberId(arg);
if (searchByName) {
const memberNames = group.members.map(x => ({ name: characters.find(y => y.avatar === x)?.name, index: characters.findIndex(y => y.avatar === x) }));
const fuse = new Fuse(memberNames, { keys: ['name'] });
const result = fuse.search(arg);
if (!result.length) {
console.warn(`WARN: No group member found with name ${arg}`);
return;
}
const chid = result[0].item.index;
if (chid === -1) {
console.warn(`WARN: No character found for group member ${arg}`);
return;
}
console.log(`Triggering group member ${chid} (${arg}) from search result`, result[0]);
Generate('normal', { force_chid: chid });
} else {
const memberAvatar = group.members[index];
if (memberAvatar === undefined) {
console.warn(`WARN: No group member found at index ${index}`);
return;
}
const chid = characters.findIndex(x => x.avatar === memberAvatar);
if (chid === -1) {
console.warn(`WARN: No character found for group member ${memberAvatar} at index ${index}`);
return;
}
console.log(`Triggering group member ${memberAvatar} at index ${index}`);
Generate('normal', { force_chid: chid });
if (chid === undefined) {
console.warn(`WARN: No group member found for argument ${arg}`);
return;
}
Generate('normal', { force_chid: chid });
}
async function sendUserMessageCallback(_, text) {
@@ -348,7 +887,7 @@ async function sendUserMessageCallback(_, text) {
text = text.trim();
const bias = extractMessageBias(text);
sendMessageAsUser(text, bias);
await sendMessageAsUser(text, bias);
}
async function deleteMessagesByNameCallback(_, name) {
@@ -428,12 +967,12 @@ function openChat(id) {
function continueChatCallback() {
// Prevent infinite recursion
$('#send_textarea').val('');
$('#send_textarea').val('').trigger('input');
$('#option_continue').trigger('click', { fromSlashCommand: true });
}
export async function generateSystemMessage(_, prompt) {
$('#send_textarea').val('');
$('#send_textarea').val('').trigger('input');
if (!prompt) {
console.warn('WARN: No prompt provided for /sysgen command');
@@ -496,19 +1035,32 @@ async function setNarratorName(_, text) {
await saveChatConditional();
}
export async function sendMessageAs(_, text) {
export async function sendMessageAs(namedArgs, text) {
if (!text) {
return;
}
const parts = text.split('\n');
if (parts.length <= 1) {
toastr.warning('Both character name and message are required. Separate them with a new line.');
return;
}
let name;
let mesText;
const name = parts.shift().trim();
let mesText = parts.join('\n').trim();
if (namedArgs.name) {
name = namedArgs.name.trim();
mesText = text.trim();
if (!name && !text) {
toastr.warning('You must specify a name and text to send as');
return;
}
} else {
const parts = text.split('\n');
if (parts.length <= 1) {
toastr.warning('Both character name and message are required. Separate them with a new line.');
return;
}
name = parts.shift().trim();
mesText = parts.join('\n').trim();
}
// Requires a regex check after the slash command is pushed to output
mesText = getRegexedString(mesText, regex_placement.SLASH_COMMAND, { characterOverride: name });
@@ -692,25 +1244,58 @@ function setBackgroundCallback(_, bg) {
if (!bg) {
return;
}
console.log('Set background to ' + bg);
const bgElement = $(`.bg_example[bgfile^="${bg.trim()}"`);
if (bgElement.length) {
bgElement.get(0).click();
console.log('Set background to ' + bg);
const bgElements = Array.from(document.querySelectorAll(`.bg_example`)).map((x) => ({ element: x, bgfile: x.getAttribute('bgfile') }));
const fuse = new Fuse(bgElements, { keys: ['bgfile'] });
const result = fuse.search(bg);
if (!result.length) {
toastr.error(`No background found with name "${bg}"`);
return;
}
const bgElement = result[0].item.element;
if (bgElement instanceof HTMLElement) {
bgElement.click();
}
}
async function executeSlashCommands(text) {
/**
* Executes slash commands in the provided text
* @param {string} text Slash command text
* @param {boolean} unescape Whether to unescape the batch separator
* @returns {Promise<{interrupt: boolean, newText: string, pipe: string} | boolean>}
*/
async function executeSlashCommands(text, unescape = false) {
if (!text) {
return false;
}
// Unescape the pipe character and macro braces
if (unescape) {
text = text.replace(/\\\|/g, '|');
text = text.replace(/\\\{/g, '{');
text = text.replace(/\\\}/g, '}');
}
// Hack to allow multi-line slash commands
// All slash command messages should begin with a slash
const lines = text.split('|').map(line => line.trim());
const placeholder = '\u200B'; // Use a zero-width space as a placeholder
const chars = text.split('');
for (let i = 1; i < chars.length; i++) {
if (chars[i] === '|' && chars[i - 1] !== '\\') {
chars[i] = placeholder;
}
}
const lines = chars.join('').split(placeholder).map(line => line.trim());
const linesToRemove = [];
let interrupt = false;
let pipeResult = '';
for (let index = 0; index < lines.length; index++) {
const trimmedLine = lines[index].trim();
@@ -730,7 +1315,27 @@ async function executeSlashCommands(text) {
}
console.debug('Slash command executing:', result);
await result.command.callback(result.args, result.value);
let unnamedArg = result.value || pipeResult;
if (typeof result.args === 'object') {
for (let [key, value] of Object.entries(result.args)) {
if (typeof value === 'string') {
value = substituteParams(value.trim());
if (/{{pipe}}/i.test(value)) {
value = value.replace(/{{pipe}}/i, pipeResult || '');
}
result.args[key] = value;
}
}
}
if (typeof unnamedArg === 'string' && /{{pipe}}/i.test(unnamedArg)) {
unnamedArg = unnamedArg.replace(/{{pipe}}/i, pipeResult || '');
}
pipeResult = await result.command.callback(result.args, unnamedArg);
if (result.command.interruptsGeneration) {
interrupt = true;
@@ -743,7 +1348,7 @@ async function executeSlashCommands(text) {
const newText = lines.filter(x => linesToRemove.indexOf(x) === -1).join('\n');
return { interrupt, newText };
return { interrupt, newText, pipe: pipeResult };
}
function setSlashCommandAutocomplete(textarea) {

View File

@@ -10,8 +10,8 @@ import {
} from "../script.js";
import { FILTER_TYPES, FilterHelper } from "./filters.js";
import { groupCandidatesFilter, selected_group } from "./group-chats.js";
import { onlyUnique, uuidv4 } from "./utils.js";
import { groupCandidatesFilter, groups, selected_group } from "./group-chats.js";
import { download, onlyUnique, parseJsonFile, uuidv4 } from "./utils.js";
export {
tags,
@@ -482,9 +482,20 @@ function onViewTagsListClick() {
$(list).append(`
<div class="title_restorable alignItemsBaseline">
<h3>Tag Management</h3>
<div class="menu_button menu_button_icon tag_view_create">
<i class="fa-solid fa-plus"></i>
<span data-i18n="Create">Create</span>
<div class="flex-container alignItemsBaseline">
<div class="menu_button menu_button_icon tag_view_backup" title="Save your tags to a file">
<i class="fa-solid fa-file-export"></i>
<span data-i18n="Backup">Backup</span>
</div>
<div class="menu_button menu_button_icon tag_view_restore" title="Restore tags from a file">
<i class="fa-solid fa-file-import"></i>
<span data-i18n="Restore">Restore</span>
</div>
<div class="menu_button menu_button_icon tag_view_create" title="Create a new tag">
<i class="fa-solid fa-plus"></i>
<span data-i18n="Create">Create</span>
</div>
<input type="file" id="tag_view_restore_input" hidden accept=".json">
</div>
</div>
<div class="justifyLeft m-b-1">
@@ -494,13 +505,110 @@ function onViewTagsListClick() {
</small>
</div>`);
for (const tag of tags.slice().sort((a, b) => a?.name?.toLowerCase()?.localeCompare(b?.name?.toLowerCase()))) {
const sortedTags = tags.slice().sort((a, b) => a?.name?.toLowerCase()?.localeCompare(b?.name?.toLowerCase()));
for (const tag of sortedTags) {
appendViewTagToList(list, tag, everything);
}
callPopup(list, 'text');
}
async function onTagRestoreFileSelect(e) {
const file = e.target.files[0];
if (!file) {
console.log('Tag restore: No file selected.');
return;
}
const data = await parseJsonFile(file);
if (!data) {
toastr.warning('Empty file data', 'Tag restore');
console.log('Tag restore: File data empty.');
return;
}
if (!data.tags || !data.tag_map || !Array.isArray(data.tags) || typeof data.tag_map !== 'object') {
toastr.warning('Invalid file format', 'Tag restore');
console.log('Tag restore: Invalid file format.');
return;
}
const warnings = [];
// Import tags
for (const tag of data.tags) {
if (!tag.id || !tag.name) {
warnings.push(`Tag object is invalid: ${JSON.stringify(tag)}.`);
continue;
}
if (tags.find(x => x.id === tag.id)) {
warnings.push(`Tag with id ${tag.id} already exists.`);
continue;
}
tags.push(tag);
}
// Import tag_map
for (const key of Object.keys(data.tag_map)) {
const tagIds = data.tag_map[key];
if (!Array.isArray(tagIds)) {
warnings.push(`Tag map for key ${key} is invalid: ${JSON.stringify(tagIds)}.`);
continue;
}
// Verify that the key points to a valid character or group.
const characterExists = characters.some(x => String(x.avatar) === String(key));
const groupExists = groups.some(x => String(x.id) === String(key));
if (!characterExists && !groupExists) {
warnings.push(`Tag map key ${key} does not exist.`);
continue;
}
// Get existing tag ids for this key or empty array.
const existingTagIds = tag_map[key] || [];
// Merge existing and new tag ids. Remove duplicates.
tag_map[key] = existingTagIds.concat(tagIds).filter(onlyUnique);
// Verify that all tags exist. Remove tags that don't exist.
tag_map[key] = tag_map[key].filter(x => tags.some(y => String(y.id) === String(x)));
}
if (warnings.length) {
toastr.success('Tags restored with warnings. Check console for details.');
console.warn(`TAG RESTORE REPORT\n====================\n${warnings.join('\n')}`);
} else {
toastr.success('Tags restored successfully.');
}
$('#tag_view_restore_input').val('');
saveSettingsDebounced();
printCharacters(true);
onViewTagsListClick();
}
function onBackupRestoreClick() {
$('#tag_view_restore_input')
.off('change')
.on('change', onTagRestoreFileSelect)
.trigger('click');
}
function onTagsBackupClick() {
const timestamp = new Date().toISOString().split('T')[0].replace(/-/g, '');
const filename = `tags_${timestamp}.json`;
const data = {
tags: tags,
tag_map: tag_map,
};
const blob = new Blob([JSON.stringify(data, null, 2)], { type: 'application/json' });
download(blob, filename, 'application/json');
}
function onTagCreateClick() {
const tag = createNewTag('New Tag');
appendViewTagToList($('#tag_view_list'), tag, []);
@@ -609,7 +717,7 @@ function onTagListHintClick() {
$(this).siblings(".innerActionable").toggleClass('hidden');
}
$(document).ready(() => {
jQuery(() => {
createTagInput('#tagInput', '#tagList');
createTagInput('#groupTagInput', '#groupTagList');
@@ -623,4 +731,6 @@ $(document).ready(() => {
$(document).on("click", ".tag_delete", onTagDeleteClick);
$(document).on("input", ".tag_view_name", onTagRenameInput);
$(document).on("click", ".tag_view_create", onTagCreateClick);
$(document).on("click", ".tag_view_backup", onTagsBackupClick);
$(document).on("click", ".tag_view_restore", onBackupRestoreClick);
});

View File

@@ -8,7 +8,7 @@ Text formatting commands:
<pre><code> like this</code></pre>
<ul>
<li><tt>`text`</tt> - displays as <code>inline code</code></li>
<li><tt> text</tt> - displays as a blockquote (note the space after >)</li>
<li><tt>&gt; text</tt> - displays as a blockquote (note the space after &gt;)</li>
<blockquote>like this</blockquote>
<li><tt># text</tt> - displays as a large header (note the space)</li>
<h1>like this</h1>

View File

@@ -1,7 +1,13 @@
System-wide Replacement Macros (in order of evaluation):
<div>
System-wide Replacement Macros (in order of evaluation):
</div>
<ul>
<li><tt>&lcub;&lcub;pipe&rcub;&rcub;</tt> only for slash command batching. Replaced with the returned result of the previous command.</li>
<li><tt>&lcub;&lcub;newline&rcub;&rcub;</tt> just inserts a newline.</li>
<li><tt>&lcub;&lcub;original&rcub;&rcub;</tt> global prompts defined in API settings. Only valid in Advanced Definitions prompt overrides.</li>
<li><tt>&lcub;&lcub;input&rcub;&rcub;</tt> the user input</li>
<li><tt>&lcub;&lcub;charPrompt&rcub;&rcub;</tt> the Character's Main Prompt override</li>
<li><tt>&lcub;&lcub;charJailbreak&rcub;&rcub;</tt> the Character's Jailbreak Prompt override</li>
<li><tt>&lcub;&lcub;description&rcub;&rcub;</tt> the Character's Description</li>
<li><tt>&lcub;&lcub;personality&rcub;&rcub;</tt> the Character's Personality</li>
<li><tt>&lcub;&lcub;scenario&rcub;&rcub;</tt> the Character's Scenario</li>
@@ -9,18 +15,52 @@ System-wide Replacement Macros (in order of evaluation):
<li><tt>&lcub;&lcub;mesExamples&rcub;&rcub;</tt> the Character's Dialogue Examples</li>
<li><tt>&lcub;&lcub;user&rcub;&rcub;</tt> your current Persona username</li>
<li><tt>&lcub;&lcub;char&rcub;&rcub;</tt> the Character's name</li>
<li><tt>&lcub;&lcub;lastMessage&rcub;&rcub;</tt> - the text of the latest chat message.</li>
<li><tt>&lcub;&lcub;lastMessageId&rcub;&rcub;</tt> index # of the latest chat message. Useful for slash command batching.</li>
<li><tt>&lcub;&lcub;// (note)&rcub;&rcub;</tt> you can leave a note here, and the macro will be replaced with blank content. Not visible for the AI.</li>
<li><tt>&lcub;&lcub;time&rcub;&rcub;</tt> the current time</li>
<li><tt>&lcub;&lcub;date&rcub;&rcub;</tt> the current date</li>
<li><tt>&lcub;&lcub;weekday&rcub;&rcub;</tt> the current weekday</li>
<li><tt>&lcub;&lcub;isotime&rcub;&rcub;</tt> the current ISO date (YYYY-MM-DD)</li>
<li><tt>&lcub;&lcub;isodate&rcub;&rcub;</tt> the current ISO time (24-hour clock)</li>
<li><tt>&lcub;&lcub;isotime&rcub;&rcub;</tt> the current ISO time (24-hour clock)</li>
<li><tt>&lcub;&lcub;isodate&rcub;&rcub;</tt> the current ISO date (YYYY-MM-DD)</li>
<li><tt>&lcub;&lcub;datetimeformat &hellip;&rcub;&rcub;</tt> the current date/time in the specified format, e. g. for German date/time: <tt>&lcub;&lcub;datetimeformat DD.MM.YYYY HH:mm&rcub;&rcub;</tt></li>
<li><tt>&lcub;&lcub;time_UTC±#&rcub;&rcub;</tt> the current time in the specified UTC time zone offset, e.g. UTC-4 or UTC+2</li>
<li><tt>&lcub;&lcub;idle_duration&rcub;&rcub;</tt> the time since the last user message was sent</li>
<li><tt>&lcub;&lcub;bias "text here"&rcub;&rcub;</tt> sets a behavioral bias for the AI until the next user input. Quotes around the text are important.</li>
<li><tt>&lcub;&lcub;random:(args)&rcub;&rcub;</tt> returns a random item from the list. (ex: &lcub;&lcub;random:1,2,3,4&rcub;&rcub; will return 1 of the 4 numbers at random. Works with text lists too.</li>
<li><tt>&lcub;&lcub;roll:(formula)&rcub;&rcub;</tt> rolls a dice. (ex: &lcub;&lcub;roll:1d6&rcub;&rcub; will roll a 6- sided dice and return a number between 1 and 6)</li>
<li><tt>&lcub;&lcub;roll:(formula)&rcub;&rcub;</tt> rolls a dice. (ex: <tt>>&lcub;&lcub;roll:1d6&rcub;&rcub</tt> will roll a 6-sided dice and return a number between 1 and 6)</li>
<li><tt>&lcub;&lcub;random:(args)&rcub;&rcub;</tt> returns a random item from the list. (ex: <tt>&lcub;&lcub;random:1,2,3,4&rcub;&rcub;</tt> will return 1 of the 4 numbers at random. Works with text lists too.</li>
<li><tt>&lcub;&lcub;random::(arg1)::(arg2)&rcub;&rcub;</tt> alternative syntax for random that allows to use commas in the list items.</li>
<li><tt>&lcub;&lcub;banned "text here"&rcub;&rcub;</tt> dynamically add text in the quotes to banned words sequences, if Text Generation WebUI backend used. Do nothing for others backends. Can be used anywhere (Character description, WI, AN, etc.) Quotes around the text are important.</li>
</ul>
<div>
Instruct Mode and Context Template Macros:
</div>
<div>
<small>(enabled in the Advanced Formatting settings)</small>
</div>
<ul>
<li><tt>&lcub;&lcub;exampleSeparator&rcub;&rcub;</tt> context template example dialogues separator</li>
<li><tt>&lcub;&lcub;chatStart&rcub;&rcub;</tt> context template chat start line</li>
<li><tt>&lcub;&lcub;instructSystem&rcub;&rcub;</tt> instruct system prompt</li>
<li><tt>&lcub;&lcub;instructSystemPrefix&rcub;&rcub;</tt> instruct system prompt prefix sequence</li>
<li><tt>&lcub;&lcub;instructSystemSuffix&rcub;&rcub;</tt> instruct system prompt suffix sequence</li>
<li><tt>&lcub;&lcub;instructInput&rcub;&rcub;</tt> instruct user input sequence</li>
<li><tt>&lcub;&lcub;instructOutput&rcub;&rcub;</tt> instruct assistant output sequence</li>
<li><tt>&lcub;&lcub;instructFirstOutput&rcub;&rcub;</tt> instruct assistant first output sequence</li>
<li><tt>&lcub;&lcub;instructLastOutput&rcub;&rcub;</tt> instruct assistant last output sequence</li>
<li><tt>&lcub;&lcub;instructSeparator&rcub;&rcub;</tt> instruct turn separator sequence</li>
<li><tt>&lcub;&lcub;instructStop&rcub;&rcub;</tt> instruct stop sequence</li>
</ul>
<div>
Chat variables Macros:
</div>
<div><small>Local variables = unique to the current chat</small></div>
<div><small>Global variables = works in any chat for any character</small></div>
<ul>
<li><tt>&lcub;&lcub;getvar::name&rcub;&rcub;</tt> replaced with the value of the local variable "name"</li>
<li><tt>&lcub;&lcub;setvar::name::value&rcub;&rcub;</tt> replaced with empty string, sets the local variable "name" to "value"</li>
<li><tt>&lcub;&lcub;addvar::name::increment&rcub;&rcub;</tt> replaced with the result of addition numeric value of "increment" to the local variable "name"</li>
<li><tt>&lcub;&lcub;getglobalvar::name&rcub;&rcub;</tt> replaced with the value of the global variable "name"</li>
<li><tt>&lcub;&lcub;setglobalvar::name::value&rcub;&rcub;</tt> replaced with empty string, sets the global variable "name" to "value"</li>
<li><tt>&lcub;&lcub;addglobalvar::name::value&rcub;&rcub;</tt> replaced with the result of addition numeric value of "increment" to the global variable "name"</li>
</ul>

View File

@@ -7,13 +7,15 @@ import {
saveSettingsDebounced,
setGenerationParamsFromPreset,
setOnlineStatus,
substituteParams,
} from "../script.js";
import {
power_user,
registerDebugFunction,
} from "./power-user.js";
import { getTextTokens, tokenizers } from "./tokenizers.js";
import { onlyUnique } from "./utils.js";
import { SENTENCEPIECE_TOKENIZERS, getTextTokens, tokenizers } from "./tokenizers.js";
import { getSortableDelay, onlyUnique } from "./utils.js";
export {
textgenerationwebui_settings,
@@ -26,11 +28,17 @@ export const textgen_types = {
OOBA: 'ooba',
MANCER: 'mancer',
APHRODITE: 'aphrodite',
TABBY: 'tabby',
KOBOLDCPP: 'koboldcpp',
};
// Maybe let it be configurable in the future?
export const MANCER_SERVER = 'https://neuro.mancer.tech';
// (7 days later) The future has come.
const MANCER_SERVER_KEY = 'mancer_server';
const MANCER_SERVER_DEFAULT = 'https://neuro.mancer.tech';
export let MANCER_SERVER = localStorage.getItem(MANCER_SERVER_KEY) ?? MANCER_SERVER_DEFAULT;
const KOBOLDCPP_ORDER = [6, 0, 1, 3, 4, 2, 5];
const textgenerationwebui_settings = {
temp: 0.7,
temperature_last: true,
@@ -71,14 +79,15 @@ const textgenerationwebui_settings = {
banned_tokens: '',
//n_aphrodite: 1,
//best_of_aphrodite: 1,
//ignore_eos_token_aphrodite: false,
//spaces_between_special_tokens_aphrodite: true,
ignore_eos_token_aphrodite: false,
spaces_between_special_tokens_aphrodite: true,
//logits_processors_aphrodite: [],
//log_probs_aphrodite: 0,
//prompt_log_probs_aphrodite: 0,
type: textgen_types.OOBA,
mancer_model: 'mytholite',
legacy_api: false,
sampler_order: KOBOLDCPP_ORDER,
};
export let textgenerationwebui_banned_in_macros = [];
@@ -124,11 +133,12 @@ const setting_names = [
"legacy_api",
//'n_aphrodite',
//'best_of_aphrodite',
//'ignore_eos_token_aphrodite',
//'spaces_between_special_tokens_aphrodite',
'ignore_eos_token_aphrodite',
'spaces_between_special_tokens_aphrodite',
//'logits_processors_aphrodite',
//'log_probs_aphrodite',
//'prompt_log_probs_aphrodite'
"sampler_order",
];
async function selectPreset(name) {
@@ -180,6 +190,7 @@ function getCustomTokenBans() {
return '';
}
const tokenizer = SENTENCEPIECE_TOKENIZERS.includes(power_user.tokenizer) ? power_user.tokenizer : tokenizers.LLAMA;
const result = [];
const sequences = textgenerationwebui_settings.banned_tokens
.split('\n')
@@ -211,7 +222,7 @@ function getCustomTokenBans() {
}
} else {
try {
const tokens = getTextTokens(tokenizers.LLAMA, line);
const tokens = getTextTokens(tokenizer, line);
result.push(...tokens);
} catch {
console.log(`Could not tokenize raw text: ${line}`);
@@ -249,6 +260,25 @@ function loadTextGenSettings(data, settings) {
$('#textgen_type').val(textgenerationwebui_settings.type);
showTypeSpecificControls(textgenerationwebui_settings.type);
//this is needed because showTypeSpecificControls() does not handle NOT declarations
if (isAphrodite()) {
$('[data-forAphro=False]').each(function () {
$(this).hide()
})
} else {
$('[data-forAphro=False]').each(function () {
$(this).show()
})
}
registerDebugFunction('change-mancer-url', 'Change Mancer base URL', 'Change Mancer API server base URL', () => {
const result = prompt(`Enter Mancer base URL\nDefault: ${MANCER_SERVER_DEFAULT}`, MANCER_SERVER);
if (result) {
localStorage.setItem(MANCER_SERVER_KEY, result);
MANCER_SERVER = result;
}
});
}
export function isMancer() {
@@ -259,43 +289,97 @@ export function isAphrodite() {
return textgenerationwebui_settings.type === textgen_types.APHRODITE;
}
export function isTabby() {
return textgenerationwebui_settings.type === textgen_types.TABBY;
}
export function isOoba() {
return textgenerationwebui_settings.type === textgen_types.OOBA;
}
export function isKoboldCpp() {
return textgenerationwebui_settings.type === textgen_types.KOBOLDCPP;
}
export function getTextGenUrlSourceId() {
switch (textgenerationwebui_settings.type) {
case textgen_types.OOBA:
return "#textgenerationwebui_api_url_text";
case textgen_types.APHRODITE:
return "#aphrodite_api_url_text";
case textgen_types.TABBY:
return "#tabby_api_url_text";
case textgen_types.KOBOLDCPP:
return "#koboldcpp_api_url_text";
}
}
/**
* Sorts the sampler items by the given order.
* @param {any[]} orderArray Sampler order array.
*/
function sortItemsByOrder(orderArray) {
console.debug('Preset samplers order: ' + orderArray);
const $draggableItems = $("#koboldcpp_order");
for (let i = 0; i < orderArray.length; i++) {
const index = orderArray[i];
const $item = $draggableItems.find(`[data-id="${index}"]`).detach();
$draggableItems.append($item);
}
}
jQuery(function () {
$('#koboldcpp_order').sortable({
delay: getSortableDelay(),
stop: function () {
const order = [];
$('#koboldcpp_order').children().each(function () {
order.push($(this).data('id'));
});
textgenerationwebui_settings.sampler_order = order;
console.log('Samplers reordered:', textgenerationwebui_settings.sampler_order);
saveSettingsDebounced();
},
});
$('#koboldcpp_default_order').on('click', function () {
textgenerationwebui_settings.sampler_order = KOBOLDCPP_ORDER;
sortItemsByOrder(textgenerationwebui_settings.sampler_order);
saveSettingsDebounced();
});
$('#textgen_type').on('change', function () {
const type = String($(this).val());
textgenerationwebui_settings.type = type;
/* if (type === 'aphrodite') {
$('[data-forAphro=False]').each(function () {
$(this).hide()
})
$('[data-forAphro=True]').each(function () {
$(this).show()
})
$('#mirostat_mode_textgenerationwebui').attr('step', 2) //Aphro disallows mode 1
$("#do_sample_textgenerationwebui").prop('checked', true) //Aphro should always do sample; 'otherwise set temp to 0 to mimic no sample'
$("#ban_eos_token_textgenerationwebui").prop('checked', false) //Aphro should not ban EOS, just ignore it; 'add token '2' to ban list do to this'
} else {
$('[data-forAphro=False]').each(function () {
$(this).show()
})
$('[data-forAphro=True]').each(function () {
$(this).hide()
})
$('#mirostat_mode_textgenerationwebui').attr('step', 1)
} */
if (isAphrodite()) {
//this is needed because showTypeSpecificControls() does not handle NOT declarations
$('[data-forAphro=False]').each(function () {
$(this).hide()
})
$('#mirostat_mode_textgenerationwebui').attr('step', 2) //Aphro disallows mode 1
$("#do_sample_textgenerationwebui").prop('checked', true) //Aphro should always do sample; 'otherwise set temp to 0 to mimic no sample'
$("#ban_eos_token_textgenerationwebui").prop('checked', false) //Aphro should not ban EOS, just ignore it; 'add token '2' to ban list do to this'
//special handling for Aphrodite topK -1 disable state
$('#top_k_textgenerationwebui').attr('min', -1)
if ($('#top_k_textgenerationwebui').val() === '0' || textgenerationwebui_settings['top_k'] === 0) {
textgenerationwebui_settings['top_k'] = -1
$('#top_k_textgenerationwebui').val('-1').trigger('input')
}
} else {
//this is needed because showTypeSpecificControls() does not handle NOT declarations
$('[data-forAphro=False]').each(function () {
$(this).show()
})
$('#mirostat_mode_textgenerationwebui').attr('step', 1)
//undo special Aphrodite setup for topK
$('#top_k_textgenerationwebui').attr('min', 0)
if ($('#top_k_textgenerationwebui').val() === '-1' || textgenerationwebui_settings['top_k'] === -1) {
textgenerationwebui_settings['top_k'] = 0
$('#top_k_textgenerationwebui').val('0').trigger('input')
}
}
showTypeSpecificControls(type);
setOnlineStatus('no_connection');
@@ -330,8 +414,12 @@ jQuery(function () {
const value = Number($(this).val());
$(`#${id}_counter_textgenerationwebui`).val(value);
textgenerationwebui_settings[id] = value;
//special handling for aphrodite using -1 as disabled instead of 0
if ($(this).attr('id') === 'top_k_textgenerationwebui' && isAphrodite() && value === 0) {
textgenerationwebui_settings[id] = -1
$(this).val(-1)
}
}
saveSettingsDebounced();
});
}
@@ -348,26 +436,33 @@ function showTypeSpecificControls(type) {
});
}
function setSettingByName(i, value, trigger) {
function setSettingByName(setting, value, trigger) {
if (value === null || value === undefined) {
return;
}
const isCheckbox = $(`#${i}_textgenerationwebui`).attr('type') == 'checkbox';
const isText = $(`#${i}_textgenerationwebui`).attr('type') == 'text' || $(`#${i}_textgenerationwebui`).is('textarea');
if ('sampler_order' === setting) {
value = Array.isArray(value) ? value : KOBOLDCPP_ORDER;
sortItemsByOrder(value);
textgenerationwebui_settings.sampler_order = value;
return;
}
const isCheckbox = $(`#${setting}_textgenerationwebui`).attr('type') == 'checkbox';
const isText = $(`#${setting}_textgenerationwebui`).attr('type') == 'text' || $(`#${setting}_textgenerationwebui`).is('textarea');
if (isCheckbox) {
const val = Boolean(value);
$(`#${i}_textgenerationwebui`).prop('checked', val);
$(`#${setting}_textgenerationwebui`).prop('checked', val);
}
else if (isText) {
$(`#${i}_textgenerationwebui`).val(value);
$(`#${setting}_textgenerationwebui`).val(value);
}
else {
const val = parseFloat(value);
$(`#${i}_textgenerationwebui`).val(val);
$(`#${i}_counter_textgenerationwebui`).val(val);
$(`#${setting}_textgenerationwebui`).val(val);
$(`#${setting}_counter_textgenerationwebui`).val(val);
if (power_user.enableZenSliders) {
let zenSlider = $(`#${i}_textgenerationwebui_zenslider`).slider()
let zenSlider = $(`#${setting}_textgenerationwebui_zenslider`).slider()
zenSlider.slider('option', 'value', val)
zenSlider.slider('option', 'slide')
.call(zenSlider, null, {
@@ -377,7 +472,7 @@ function setSettingByName(i, value, trigger) {
}
if (trigger) {
$(`#${i}_textgenerationwebui`).trigger('input');
$(`#${setting}_textgenerationwebui`).trigger('input');
}
}
@@ -480,37 +575,28 @@ function getModel() {
return undefined;
}
export function getTextGenGenerationData(finalPrompt, this_amount_gen, isImpersonate, cfgValues) {
return {
export function getTextGenGenerationData(finalPrompt, maxTokens, isImpersonate, isContinue, cfgValues) {
let APIflags = {
'prompt': finalPrompt,
'model': getModel(),
'max_new_tokens': this_amount_gen,
'max_tokens': this_amount_gen,
'do_sample': textgenerationwebui_settings.do_sample,
'max_new_tokens': maxTokens,
'max_tokens': maxTokens,
'temperature': textgenerationwebui_settings.temp,
'temperature_last': textgenerationwebui_settings.temperature_last,
'top_p': textgenerationwebui_settings.top_p,
'typical_p': textgenerationwebui_settings.typical_p,
'min_p': textgenerationwebui_settings.min_p,
'repetition_penalty': textgenerationwebui_settings.rep_pen,
'repetition_penalty_range': textgenerationwebui_settings.rep_pen_range,
'encoder_repetition_penalty': textgenerationwebui_settings.encoder_rep_pen,
'frequency_penalty': textgenerationwebui_settings.freq_pen,
'presence_penalty': textgenerationwebui_settings.presence_pen,
'top_k': textgenerationwebui_settings.top_k,
'min_length': textgenerationwebui_settings.min_length,
'min_tokens': textgenerationwebui_settings.min_length,
'no_repeat_ngram_size': textgenerationwebui_settings.no_repeat_ngram_size,
'num_beams': textgenerationwebui_settings.num_beams,
'penalty_alpha': textgenerationwebui_settings.penalty_alpha,
'length_penalty': textgenerationwebui_settings.length_penalty,
'early_stopping': textgenerationwebui_settings.early_stopping,
'guidance_scale': cfgValues?.guidanceScale?.value ?? textgenerationwebui_settings.guidance_scale ?? 1,
'negative_prompt': cfgValues?.negativePrompt ?? textgenerationwebui_settings.negative_prompt ?? '',
'seed': textgenerationwebui_settings.seed,
'add_bos_token': textgenerationwebui_settings.add_bos_token,
'stopping_strings': getStoppingStrings(isImpersonate),
'stop': getStoppingStrings(isImpersonate),
'stopping_strings': getStoppingStrings(isImpersonate, isContinue),
'stop': getStoppingStrings(isImpersonate, isContinue),
'truncation_length': max_context,
'ban_eos_token': textgenerationwebui_settings.ban_eos_token,
'skip_special_tokens': textgenerationwebui_settings.skip_special_tokens,
@@ -521,20 +607,43 @@ export function getTextGenGenerationData(finalPrompt, this_amount_gen, isImperso
'mirostat_mode': textgenerationwebui_settings.mirostat_mode,
'mirostat_tau': textgenerationwebui_settings.mirostat_tau,
'mirostat_eta': textgenerationwebui_settings.mirostat_eta,
'grammar_string': textgenerationwebui_settings.grammar_string,
'custom_token_bans': isAphrodite() ? toIntArray(getCustomTokenBans()) : getCustomTokenBans(),
'use_mancer': isMancer(),
'use_aphrodite': isAphrodite(),
'use_tabby': isTabby(),
'use_koboldcpp': isKoboldCpp(),
'use_ooba': isOoba(),
'api_server': isMancer() ? MANCER_SERVER : api_server_textgenerationwebui,
'legacy_api': textgenerationwebui_settings.legacy_api && !isMancer(),
'sampler_order': isKoboldCpp() ? textgenerationwebui_settings.sampler_order : undefined,
};
let aphroditeExclusionFlags = {
'repetition_penalty_range': textgenerationwebui_settings.rep_pen_range,
'encoder_repetition_penalty': textgenerationwebui_settings.encoder_rep_pen,
'no_repeat_ngram_size': textgenerationwebui_settings.no_repeat_ngram_size,
'penalty_alpha': textgenerationwebui_settings.penalty_alpha,
'temperature_last': textgenerationwebui_settings.temperature_last,
'do_sample': textgenerationwebui_settings.do_sample,
'seed': textgenerationwebui_settings.seed,
'guidance_scale': cfgValues?.guidanceScale?.value ?? textgenerationwebui_settings.guidance_scale ?? 1,
'negative_prompt': cfgValues?.negativePrompt ?? substituteParams(textgenerationwebui_settings.negative_prompt) ?? '',
'grammar_string': textgenerationwebui_settings.grammar_string,
}
let aphroditeFlags = {
//'n': textgenerationwebui_settings.n_aphrodite,
//'best_of': textgenerationwebui_settings.n_aphrodite, //n must always == best_of and vice versa
//'ignore_eos': textgenerationwebui_settings.ignore_eos_token_aphrodite,
//'spaces_between_special_tokens': textgenerationwebui_settings.spaces_between_special_tokens_aphrodite,
// 'logits_processors': textgenerationwebui_settings.logits_processors_aphrodite,
'ignore_eos': textgenerationwebui_settings.ignore_eos_token_aphrodite,
'spaces_between_special_tokens': textgenerationwebui_settings.spaces_between_special_tokens_aphrodite,
//'logits_processors': textgenerationwebui_settings.logits_processors_aphrodite,
//'logprobs': textgenerationwebui_settings.log_probs_aphrodite,
//'prompt_logprobs': textgenerationwebui_settings.prompt_log_probs_aphrodite,
};
}
if (isAphrodite()) {
APIflags = Object.assign(APIflags, aphroditeFlags);
} else {
APIflags = Object.assign(APIflags, aphroditeExclusionFlags);
}
return APIflags
}

View File

@@ -4,7 +4,7 @@ import { chat_completion_sources, model_list, oai_settings } from "./openai.js";
import { groups, selected_group } from "./group-chats.js";
import { getStringHash } from "./utils.js";
import { kai_flags } from "./kai-settings.js";
import { isMancer, textgenerationwebui_settings } from "./textgen-settings.js";
import { isKoboldCpp, isMancer, isTabby, textgenerationwebui_settings } from "./textgen-settings.js";
export const CHARACTERS_PER_TOKEN_RATIO = 3.35;
const TOKENIZER_WARNING_KEY = 'tokenizationWarningShown';
@@ -18,9 +18,19 @@ export const tokenizers = {
NERD2: 5,
API: 6,
MISTRAL: 7,
YI: 8,
BEST_MATCH: 99,
};
export const SENTENCEPIECE_TOKENIZERS = [
tokenizers.LLAMA,
tokenizers.MISTRAL,
tokenizers.YI,
// uncomment when NovelAI releases Kayra and Clio weights, lol
//tokenizers.NERD,
//tokenizers.NERD2,
];
const objectStore = new localforage.createInstance({ name: "SillyTavern_ChatCompletions" });
let tokenCache = {};
@@ -148,6 +158,8 @@ function callTokenizer(type, str, padding) {
return countTokensRemote('/api/tokenize/nerdstash_v2', str, padding);
case tokenizers.MISTRAL:
return countTokensRemote('/api/tokenize/mistral', str, padding);
case tokenizers.YI:
return countTokensRemote('/api/tokenize/yi', str, padding);
case tokenizers.API:
return countTokensRemote('/tokenize_via_api', str, padding);
default:
@@ -229,6 +241,7 @@ export function getTokenizerModel() {
const claudeTokenizer = 'claude';
const llamaTokenizer = 'llama';
const mistralTokenizer = 'mistral';
const yiTokenizer = 'yi';
// Assuming no one would use it for different models.. right?
if (oai_settings.chat_completion_source == chat_completion_sources.SCALE) {
@@ -264,6 +277,9 @@ export function getTokenizerModel() {
else if (model?.architecture?.tokenizer === 'Mistral') {
return mistralTokenizer;
}
else if (model?.architecture?.tokenizer === 'Yi') {
return yiTokenizer;
}
else if (oai_settings.openrouter_model.includes('gpt-4')) {
return gpt4Tokenizer;
}
@@ -369,6 +385,8 @@ function getRemoteTokenizationParams(str) {
api: main_api,
url: getAPIServerUrl(),
legacy_api: main_api === 'textgenerationwebui' && textgenerationwebui_settings.legacy_api && !isMancer(),
use_tabby: main_api === 'textgenerationwebui' && isTabby(),
use_koboldcpp: main_api === 'textgenerationwebui' && isKoboldCpp(),
};
}
@@ -449,7 +467,11 @@ function getTextTokensRemote(endpoint, str, model = '') {
* @param {string} endpoint API endpoint.
* @param {number[]} ids Array of token ids
*/
function decodeTextTokensRemote(endpoint, ids) {
function decodeTextTokensRemote(endpoint, ids, model = '') {
if (model) {
endpoint += `?model=${model}`;
}
let text = '';
jQuery.ajax({
async: false,
@@ -483,6 +505,8 @@ export function getTextTokens(tokenizerType, str) {
return getTextTokensRemote('/api/tokenize/nerdstash_v2', str);
case tokenizers.MISTRAL:
return getTextTokensRemote('/api/tokenize/mistral', str);
case tokenizers.YI:
return getTextTokensRemote('/api/tokenize/yi', str);
case tokenizers.OPENAI:
const model = getTokenizerModel();
return getTextTokensRemote('/api/tokenize/openai-encode', str, model);
@@ -511,6 +535,11 @@ export function decodeTextTokens(tokenizerType, ids) {
return decodeTextTokensRemote('/api/decode/nerdstash_v2', ids);
case tokenizers.MISTRAL:
return decodeTextTokensRemote('/api/decode/mistral', ids);
case tokenizers.YI:
return decodeTextTokensRemote('/api/decode/yi', ids);
case tokenizers.OPENAI:
const model = getTokenizerModel();
return decodeTextTokensRemote('/api/decode/openai', ids, model);
default:
console.warn("Calling decodeTextTokens with unsupported tokenizer type", tokenizerType);
return '';

View File

@@ -512,6 +512,38 @@ export function trimToStartSentence(input) {
return input;
}
/**
* Format bytes as human-readable text.
*
* @param bytes Number of bytes.
* @param si True to use metric (SI) units, aka powers of 1000. False to use
* binary (IEC), aka powers of 1024.
* @param dp Number of decimal places to display.
*
* @return Formatted string.
*/
export function humanFileSize(bytes, si = false, dp = 1) {
const thresh = si ? 1000 : 1024;
if (Math.abs(bytes) < thresh) {
return bytes + ' B';
}
const units = si
? ['kB', 'MB', 'GB', 'TB', 'PB', 'EB', 'ZB', 'YB']
: ['KiB', 'MiB', 'GiB', 'TiB', 'PiB', 'EiB', 'ZiB', 'YiB'];
let u = -1;
const r = 10 ** dp;
do {
bytes /= thresh;
++u;
} while (Math.round(Math.abs(bytes) * r) / r >= thresh && u < units.length - 1);
return bytes.toFixed(dp) + ' ' + units[u];
}
/**
* Counts the number of occurrences of a character in a string.
* @param {string} string The string to count occurrences in.
@@ -533,6 +565,24 @@ export function countOccurrences(string, character) {
return count;
}
/**
* Checks if a string is "true" value.
* @param {string} arg String to check
* @returns {boolean} True if the string is true, false otherwise.
*/
export function isTrueBoolean(arg) {
return ['on', 'true', '1'].includes(arg);
}
/**
* Checks if a string is "false" value.
* @param {string} arg String to check
* @returns {boolean} True if the string is false, false otherwise.
*/
export function isFalseBoolean(arg) {
return ['off', 'false', '0'].includes(arg);
}
/**
* Checks if a number is odd.
* @param {number} number The number to check.
@@ -941,9 +991,10 @@ export function loadFileToDocument(url, type) {
* @param {string} dataUrl The data URL encoded data of the image.
* @param {number} maxWidth The maximum width of the thumbnail.
* @param {number} maxHeight The maximum height of the thumbnail.
* @param {string} [type='image/jpeg'] The type of the thumbnail.
* @returns {Promise<string>} A promise that resolves to the thumbnail data URL.
*/
export function createThumbnail(dataUrl, maxWidth, maxHeight) {
export function createThumbnail(dataUrl, maxWidth, maxHeight, type = 'image/jpeg') {
return new Promise((resolve, reject) => {
const img = new Image();
img.src = dataUrl;
@@ -968,7 +1019,7 @@ export function createThumbnail(dataUrl, maxWidth, maxHeight) {
ctx.drawImage(img, 0, 0, thumbnailWidth, thumbnailHeight);
// Convert the canvas to a data URL and resolve the promise
const thumbnailDataUrl = canvas.toDataURL('image/jpeg');
const thumbnailDataUrl = canvas.toDataURL(type);
resolve(thumbnailDataUrl);
};

View File

@@ -1,11 +1,15 @@
import { chat_metadata, getCurrentChatId, sendSystemMessage, system_message_types } from "../script.js";
import { extension_settings } from "./extensions.js";
import { registerSlashCommand } from "./slash-commands.js";
import { chat_metadata, getCurrentChatId, saveSettingsDebounced, sendSystemMessage, system_message_types } from "../script.js";
import { extension_settings, saveMetadataDebounced } from "./extensions.js";
import { executeSlashCommands, registerSlashCommand } from "./slash-commands.js";
function getLocalVariable(name) {
if (!chat_metadata.variables) {
chat_metadata.variables = {};
}
const localVariable = chat_metadata?.variables[name];
return localVariable || '';
return (localVariable === '' || isNaN(Number(localVariable))) ? (localVariable || '') : Number(localVariable);
}
function setLocalVariable(name, value) {
@@ -14,86 +18,128 @@ function setLocalVariable(name, value) {
}
chat_metadata.variables[name] = value;
saveMetadataDebounced();
return value;
}
function getGlobalVariable(name) {
const globalVariable = extension_settings.variables.global[name];
return globalVariable || '';
return (globalVariable === '' || isNaN(Number(globalVariable))) ? (globalVariable || '') : Number(globalVariable);
}
function setGlobalVariable(name, value) {
extension_settings.variables.global[name] = value;
saveSettingsDebounced();
}
export function replaceVariableMacros(str) {
// Replace {{getvar::name}} with the value of the variable name
str = str.replace(/{{getvar::([^}]+)}}/gi, (_, name) => {
name = name.toLowerCase().trim();
function addLocalVariable(name, value) {
const currentValue = getLocalVariable(name) || 0;
const increment = Number(value);
if (isNaN(increment)) {
const stringValue = String(currentValue || '') + value;
setLocalVariable(name, stringValue);
return stringValue;
}
const newValue = Number(currentValue) + increment;
if (isNaN(newValue)) {
return '';
}
setLocalVariable(name, newValue);
return newValue;
}
function addGlobalVariable(name, value) {
const currentValue = getGlobalVariable(name) || 0;
const increment = Number(value);
if (isNaN(increment)) {
const stringValue = String(currentValue || '') + value;
setGlobalVariable(name, stringValue);
return stringValue;
}
const newValue = Number(currentValue) + increment;
if (isNaN(newValue)) {
return '';
}
setGlobalVariable(name, newValue);
return newValue;
}
export function resolveVariable(name) {
if (existsLocalVariable(name)) {
return getLocalVariable(name);
});
}
// Replace {{setvar::name::value}} with empty string and set the variable name to value
str = str.replace(/{{setvar::([^:]+)::([^}]+)}}/gi, (_, name, value) => {
name = name.toLowerCase().trim();
setLocalVariable(name, value);
return '';
});
// Replace {{addvar::name::value}} with empty string and add value to the variable value
str = str.replace(/{{addvar::([^:]+)::([^}]+)}}/gi, (_, name, value) => {
name = name.toLowerCase().trim();
const currentValue = getLocalVariable(name) || 0;
const increment = Number(value);
if (isNaN(increment)) {
return '';
}
const newValue = Number(currentValue) + increment;
if (isNaN(newValue)) {
return '';
}
setLocalVariable(name, newValue);
return '';
});
// Replace {{getglobalvar::name}} with the value of the global variable name
str = str.replace(/{{getglobalvar::([^}]+)}}/gi, (_, name) => {
name = name.toLowerCase().trim();
if (existsGlobalVariable(name)) {
return getGlobalVariable(name);
});
}
// Replace {{setglobalvar::name::value}} with empty string and set the global variable name to value
str = str.replace(/{{setglobalvar::([^:]+)::([^}]+)}}/gi, (_, name, value) => {
name = name.toLowerCase().trim();
setGlobalVariable(name, value);
return '';
});
return name;
}
// Replace {{addglobalvar::name::value}} with empty string and add value to the global variable value
str = str.replace(/{{addglobalvar::([^:]+)::([^}]+)}}/gi, (_, name, value) => {
name = name.toLowerCase().trim();
const currentValue = getGlobalVariable(name) || 0;
const increment = Number(value);
export function replaceVariableMacros(input) {
const lines = input.split('\n');
if (isNaN(increment)) {
return '';
for (let i = 0; i < lines.length; i++) {
let line = lines[i];
// Skip lines without macros
if (!line || !line.includes('{{')) {
continue;
}
const newValue = Number(currentValue) + increment;
// Replace {{getvar::name}} with the value of the variable name
line = line.replace(/{{getvar::([^}]+)}}/gi, (_, name) => {
name = name.trim();
return getLocalVariable(name);
});
if (isNaN(newValue)) {
// Replace {{setvar::name::value}} with empty string and set the variable name to value
line = line.replace(/{{setvar::([^:]+)::([^}]+)}}/gi, (_, name, value) => {
name = name.trim();
setLocalVariable(name, value);
return '';
}
});
setGlobalVariable(name, newValue);
return '';
});
// Replace {{addvar::name::value}} with empty string and add value to the variable value
line = line.replace(/{{addvar::([^:]+)::([^}]+)}}/gi, (_, name, value) => {
name = name.trim();
addLocalVariable(name, value);;
return '';
});
return str;
// Replace {{getglobalvar::name}} with the value of the global variable name
line = line.replace(/{{getglobalvar::([^}]+)}}/gi, (_, name) => {
name = name.trim();
return getGlobalVariable(name);
});
// Replace {{setglobalvar::name::value}} with empty string and set the global variable name to value
line = line.replace(/{{setglobalvar::([^:]+)::([^}]+)}}/gi, (_, name, value) => {
name = name.trim();
setGlobalVariable(name, value);
return '';
});
// Replace {{addglobalvar::name::value}} with empty string and add value to the global variable value
line = line.replace(/{{addglobalvar::([^:]+)::([^}]+)}}/gi, (_, name, value) => {
name = name.trim();
addGlobalVariable(name, value);
return '';
});
lines[i] = line;
}
return lines.join('\n');
}
function listVariablesCallback() {
@@ -110,11 +156,200 @@ function listVariablesCallback() {
const converter = new showdown.Converter();
const message = `### Local variables (${chatName}):\n${localVariablesString}\n\n### Global variables:\n${globalVariablesString}`;
const htmlMessage = converter.makeHtml(message);
const htmlMessage = DOMPurify.sanitize(converter.makeHtml(message));
sendSystemMessage(system_message_types.GENERIC, htmlMessage);
}
export function registerVariableCommands() {
registerSlashCommand('listvar', listVariablesCallback, [''], ' list registered chat variables', true, true);
async function whileCallback(args, command) {
const MAX_LOOPS = 100;
const isGuardOff = ['off', 'false', '0'].includes(args.guard?.toLowerCase());
const iterations = isGuardOff ? Number.MAX_SAFE_INTEGER : MAX_LOOPS;
for (let i = 0; i < iterations; i++) {
const { a, b, rule } = parseBooleanOperands(args);
const result = evalBoolean(rule, a, b);
if (result && command) {
await executeSubCommands(command);
} else {
break;
}
}
return '';
}
async function ifCallback(args, command) {
const { a, b, rule } = parseBooleanOperands(args);
const result = evalBoolean(rule, a, b);
if (result && command) {
return await executeSubCommands(command);
} else if (!result && args.else && typeof args.else === 'string' && args.else !== '') {
return await executeSubCommands(args.else);
}
return '';
}
function existsLocalVariable(name) {
return chat_metadata.variables && chat_metadata.variables[name] !== undefined;
}
function existsGlobalVariable(name) {
return extension_settings.variables.global && extension_settings.variables.global[name] !== undefined;
}
function parseBooleanOperands(args) {
// Resultion order: numeric literal, local variable, global variable, string literal
function getOperand(operand) {
if (operand === undefined) {
return '';
}
const operandNumber = Number(operand);
if (!isNaN(operandNumber)) {
return operandNumber;
}
if (existsLocalVariable(operand)) {
const operandLocalVariable = getLocalVariable(operand);
return operandLocalVariable ?? '';
}
if (existsGlobalVariable(operand)) {
const operandGlobalVariable = getGlobalVariable(operand);
return operandGlobalVariable ?? '';
}
const stringLiteral = String(operand);
return stringLiteral || '';
}
const left = getOperand(args.a || args.left || args.first || args.x);
const right = getOperand(args.b || args.right || args.second || args.y);
const rule = args.rule;
return { a: left, b: right, rule };
}
function evalBoolean(rule, a, b) {
if (!rule) {
toastr.warning('The rule must be specified for the boolean comparison.', 'Invalid command');
throw new Error('Invalid command.');
}
let result = false;
if (typeof a === 'string' && typeof b !== 'number') {
const aString = String(a).toLowerCase();
const bString = String(b).toLowerCase();
switch (rule) {
case 'in':
result = aString.includes(bString);
break;
case 'nin':
result = !aString.includes(bString);
break;
case 'eq':
result = aString === bString;
break;
case 'neq':
result = aString !== bString;
break;
default:
toastr.error('Unknown boolean comparison rule for type string.', 'Invalid /if command');
throw new Error('Invalid command.');
}
} else if (typeof a === 'number') {
const aNumber = Number(a);
const bNumber = Number(b);
switch (rule) {
case 'not':
result = !aNumber;
break;
case 'gt':
result = aNumber > bNumber;
break;
case 'gte':
result = aNumber >= bNumber;
break;
case 'lt':
result = aNumber < bNumber;
break;
case 'lte':
result = aNumber <= bNumber;
break;
case 'eq':
result = aNumber === bNumber;
break;
case 'neq':
result = aNumber !== bNumber;
break;
default:
toastr.error('Unknown boolean comparison rule for type number.', 'Invalid command');
throw new Error('Invalid command.');
}
}
return result;
}
async function executeSubCommands(command) {
if (command.startsWith('"')) {
command = command.slice(1);
}
if (command.endsWith('"')) {
command = command.slice(0, -1);
}
const unescape = true;
const result = await executeSlashCommands(command, unescape);
if (!result || typeof result !== 'object') {
return '';
}
return result?.pipe || '';
}
function deleteLocalVariable(name) {
if (!existsLocalVariable(name)) {
console.warn(`The local variable "${name}" does not exist.`);
return '';
}
delete chat_metadata.variables[name];
saveMetadataDebounced();
return '';
}
function deleteGlobalVariable(name) {
if (!existsGlobalVariable(name)) {
console.warn(`The global variable "${name}" does not exist.`);
return '';
}
delete extension_settings.variables.global[name];
saveSettingsDebounced();
return '';
}
export function registerVariableCommands() {
registerSlashCommand('listvar', listVariablesCallback, [], ' list registered chat variables', true, true);
registerSlashCommand('setvar', (args, value) => setLocalVariable(args.key || args.name, value), [], '<span class="monospace">key=varname (value)</span> set a local variable value and pass it down the pipe, e.g. <tt>/setvar key=color green</tt>', true, true);
registerSlashCommand('getvar', (_, value) => getLocalVariable(value), [], '<span class="monospace">(key)</span> get a local variable value and pass it down the pipe, e.g. <tt>/getvar height</tt>', true, true);
registerSlashCommand('addvar', (args, value) => addLocalVariable(args.key || args.name, value), [], '<span class="monospace">key=varname (increment)</span> add a value to a local variable and pass the result down the pipe, e.g. <tt>/addvar score 10</tt>', true, true);
registerSlashCommand('setglobalvar', (args, value) => setGlobalVariable(args.key || args.name, value), [], '<span class="monospace">key=varname (value)</span> set a global variable value and pass it down the pipe, e.g. <tt>/setglobalvar key=color green</tt>', true, true);
registerSlashCommand('getglobalvar', (_, value) => getGlobalVariable(value), [], '<span class="monospace">(key)</span> get a global variable value and pass it down the pipe, e.g. <tt>/getglobalvar height</tt>', true, true);
registerSlashCommand('addglobalvar', (args, value) => addGlobalVariable(args.key || args.name, value), [], '<span class="monospace">key=varname (increment)</span> add a value to a global variable and pass the result down the pipe, e.g. <tt>/addglobalvar score 10</tt>', true, true);
registerSlashCommand('if', ifCallback, [], '<span class="monospace">left=varname1 right=varname2 rule=comparison else="(alt.command)" "(command)"</span> compare the value of the left operand "a" with the value of the right operand "b", and if the condition yields true, then execute any valid slash command enclosed in quotes and pass the result of the command execution down the pipe. Numeric values and string literals for left and right operands supported. Available rules: gt => a > b, gte => a >= b, lt => a < b, lte => a <= b, eq => a == b, neq => a != b, not => !a, in (strings) => a includes b, nin (strings) => a not includes b, e.g. <tt>/if left=score right=10 rule=gte "/speak You win"</tt> triggers a /speak command if the value of "score" is greater or equals 10.', true, true);
registerSlashCommand('while', whileCallback, [], '<span class="monospace">left=varname1 right=varname2 rule=comparison "(command)"</span> compare the value of the left operand "a" with the value of the right operand "b", and if the condition yields true, then execute any valid slash command enclosed in quotes. Numeric values and string literals for left and right operands supported. Available rules: gt => a > b, gte => a >= b, lt => a < b, lte => a <= b, eq => a == b, neq => a != b, not => !a, in (strings) => a includes b, nin (strings) => a not includes b, e.g. <tt>/setvar key=i 0 | /while left=i right=10 rule=let "/addvar key=i 1"</tt> adds 1 to the value of "i" until it reaches 10. Loops are limited to 100 iterations by default, pass guard=off to disable.', true, true);
registerSlashCommand('flushvar', (_, value) => deleteLocalVariable(value), [], '<span class="monospace">(key)</span> delete a local variable, e.g. <tt>/flushvar score</tt>', true, true);
registerSlashCommand('flushglobalvar', (_, value) => deleteGlobalVariable(value), [], '<span class="monospace">(key)</span> delete a global variable, e.g. <tt>/flushglobalvar score</tt>', true, true);
}

View File

@@ -7,6 +7,7 @@ import { getDeviceInfo } from "./RossAscends-mods.js";
import { FILTER_TYPES, FilterHelper } from "./filters.js";
import { getTokenCount } from "./tokenizers.js";
import { power_user } from "./power-user.js";
import { getTagKeyForCharacter } from "./tags.js";
export {
world_info,
@@ -227,7 +228,7 @@ async function loadWorldInfoData(name) {
}
async function updateWorldInfoList() {
var result = await fetch("/getsettings", {
const result = await fetch("/getsettings", {
method: "POST",
headers: getRequestHeaders(),
body: JSON.stringify({}),
@@ -269,7 +270,15 @@ function sortEntries(data) {
const sortRule = option.data('rule');
const orderSign = sortOrder === 'asc' ? 1 : -1;
if (sortRule === 'priority') {
if (sortRule === 'custom') {
// First by display index, then by order, then by uid
data.sort((a, b) => {
const aValue = a.displayIndex;
const bValue = b.displayIndex;
return (aValue - bValue || b.order - a.order || a.uid - b.uid);
});
} else if (sortRule === 'priority') {
// First constant, then normal, then disabled. Then sort by order
data.sort((a, b) => {
const aValue = a.constant ? 0 : a.disable ? 2 : 1;
@@ -375,7 +384,7 @@ function displayWorldEntries(name, data, navigation = navigation_option.none) {
nextText: '>',
formatNavigator: PAGINATION_TEMPLATE,
showNavigator: true,
callback: function (page) {
callback: function (/** @type {object[]} */ page) {
$("#world_popup_entries_list").empty();
const keywordHeaders = `
<div id="WIEntryHeaderTitlesPC" class="flex-container wide100p spaceBetween justifyCenter textAlignCenter" style="padding:0 2.5em;">
@@ -399,6 +408,12 @@ function displayWorldEntries(name, data, navigation = navigation_option.none) {
</small>
</div>`
const blocks = page.map(entry => getWorldEntry(name, data, entry)).filter(x => x);
const isCustomOrder = $('#world_info_sort_order').find(':selected').data('rule') === 'custom';
if (!isCustomOrder) {
blocks.forEach(block => {
block.find('.drag-handle').remove();
});
}
$("#world_popup_entries_list").append(keywordHeaders);
$("#world_popup_entries_list").append(blocks);
},
@@ -500,6 +515,8 @@ function displayWorldEntries(name, data, navigation = navigation_option.none) {
delay: getSortableDelay(),
handle: ".drag-handle",
stop: async function (event, ui) {
const firstEntryUid = $('#world_popup_entries_list .world_entry').first().data('uid');
const minDisplayIndex = data?.entries[firstEntryUid]?.displayIndex ?? 0;
$('#world_popup_entries_list .world_entry').each(function (index) {
const uid = $(this).data('uid');
@@ -511,8 +528,8 @@ function displayWorldEntries(name, data, navigation = navigation_option.none) {
return;
}
item.displayIndex = index;
setOriginalDataValue(data, uid, 'extensions.display_index', index);
item.displayIndex = minDisplayIndex + index;
setOriginalDataValue(data, uid, 'extensions.display_index', item.displayIndex);
});
console.table(Object.keys(data.entries).map(uid => data.entries[uid]).map(x => ({ uid: x.uid, key: x.key.join(','), displayIndex: x.displayIndex })));
@@ -587,7 +604,7 @@ function getWorldEntry(name, data, entry) {
setOriginalDataValue(data, uid, "keys", data.entries[uid].key);
saveWorldInfo(name, data);
});
keyInput.val(entry.key.join(",")).trigger("input");
keyInput.val(entry.key.join(", ")).trigger("input");
//initScrollHeight(keyInput);
// logic AND/NOT
@@ -624,7 +641,7 @@ function getWorldEntry(name, data, entry) {
const value = $(this).prop("checked");
characterFilterLabel.text(value ? "Exclude Character(s)" : "Filter to Character(s)");
if (data.entries[uid].characterFilter) {
if (!value && data.entries[uid].characterFilter.names.length === 0) {
if (!value && data.entries[uid].characterFilter.names.length === 0 && data.entries[uid].characterFilter.tags.length === 0) {
delete data.entries[uid].characterFilter;
} else {
data.entries[uid].characterFilter.isExclude = value
@@ -635,7 +652,8 @@ function getWorldEntry(name, data, entry) {
{
characterFilter: {
isExclude: true,
names: []
names: [],
tags: [],
}
}
);
@@ -657,13 +675,25 @@ function getWorldEntry(name, data, entry) {
closeOnSelect: false,
});
}
const characters = getContext().characters;
characters.forEach((character) => {
const option = document.createElement('option');
const name = character.avatar.replace(/\.[^/.]+$/, "") ?? character.name
option.innerText = name
option.selected = entry.characterFilter?.names.includes(name)
characterFilter.append(option)
const name = character.avatar.replace(/\.[^/.]+$/, "") ?? character.name;
option.innerText = name;
option.selected = entry.characterFilter?.names?.includes(name);
option.setAttribute('data-type', 'character');
characterFilter.append(option);
});
const tags = getContext().tags;
tags.forEach((tag) => {
const option = document.createElement('option');
option.innerText = `[Tag] ${tag.name}`;
option.selected = entry.characterFilter?.tags?.includes(tag.id);
option.value = tag.id;
option.setAttribute('data-type', 'tag');
characterFilter.append(option);
});
characterFilter.on('mousedown change', async function (e) {
@@ -674,16 +704,19 @@ function getWorldEntry(name, data, entry) {
}
const uid = $(this).data("uid");
const value = $(this).val();
if ((!value || value?.length === 0) && !data.entries[uid].characterFilter?.isExclude) {
const selected = $(this).find(':selected');
if ((!selected || selected?.length === 0) && !data.entries[uid].characterFilter?.isExclude) {
delete data.entries[uid].characterFilter;
} else {
const names = selected.filter('[data-type="character"]').map((_, e) => e instanceof HTMLOptionElement && e.innerText).toArray();
const tags = selected.filter('[data-type="tag"]').map((_, e) => e instanceof HTMLOptionElement && e.value).toArray();
Object.assign(
data.entries[uid],
{
characterFilter: {
isExclude: data.entries[uid].characterFilter?.isExclude ?? false,
names: value
names: names,
tags: tags,
}
}
);
@@ -708,7 +741,7 @@ function getWorldEntry(name, data, entry) {
saveWorldInfo(name, data);
});
keySecondaryInput.val(entry.keysecondary.join(",")).trigger("input");
keySecondaryInput.val(entry.keysecondary.join(", ")).trigger("input");
initScrollHeight(keySecondaryInput);
// comment
@@ -1449,15 +1482,35 @@ async function checkWorldInfo(chat, maxContext) {
for (let entry of sortedEntries) {
// Check if this entry applies to the character or if it's excluded
if (entry.characterFilter && entry.characterFilter?.names.length > 0) {
if (entry.characterFilter && entry.characterFilter?.names?.length > 0) {
const nameIncluded = entry.characterFilter.names.includes(getCharaFilename());
const filtered = entry.characterFilter.isExclude ? nameIncluded : !nameIncluded
if (filtered) {
console.debug(`WI entry ${entry.uid} filtered out by character`);
continue;
}
}
if (entry.characterFilter && entry.characterFilter?.tags?.length > 0) {
const tagKey = getTagKeyForCharacter(this_chid);
if (tagKey) {
const tagMapEntry = context.tagMap[tagKey];
if (Array.isArray(tagMapEntry)) {
// If tag map intersects with the tag exclusion list, skip
const includesTag = tagMapEntry.some((tag) => entry.characterFilter.tags.includes(tag));
const filtered = entry.characterFilter.isExclude ? includesTag : !includesTag;
if (filtered) {
console.debug(`WI entry ${entry.uid} filtered out by tag`);
continue;
}
}
}
}
if (failedProbabilityChecks.has(entry)) {
continue;
}
@@ -1473,6 +1526,9 @@ async function checkWorldInfo(chat, maxContext) {
}
if (Array.isArray(entry.key) && entry.key.length) { //check for keywords existing
// If selectiveLogic isn't found, assume it's AND, only do this once per entry
const selectiveLogic = entry.selectiveLogic ?? 0;
let notFlag = true;
primary: for (let key of entry.key) {
const substituted = substituteParams(key);
console.debug(`${entry.uid}: ${substituted}`)
@@ -1488,10 +1544,6 @@ async function checkWorldInfo(chat, maxContext) {
secondary: for (let keysecondary of entry.keysecondary) {
const secondarySubstituted = substituteParams(keysecondary);
console.debug(`uid:${entry.uid}: filtering ${secondarySubstituted}`);
// If selectiveLogic isn't found, assume it's AND
const selectiveLogic = entry.selectiveLogic ?? 0;
//AND operator
if (selectiveLogic === 0) {
console.debug('saw AND logic, checking..')
@@ -1506,11 +1558,8 @@ async function checkWorldInfo(chat, maxContext) {
console.debug(`uid ${entry.uid}: checking NOT logic for ${secondarySubstituted}`)
if (secondarySubstituted && matchKeys(textToScan, secondarySubstituted.trim())) {
console.debug(`uid ${entry.uid}: canceled; filtered out by ${secondarySubstituted}`)
notFlag = false;
break primary;
} else {
console.debug(`${entry.uid}: activated; passed NOT filter`)
activatedNow.add(entry);
break secondary;
}
}
}
@@ -1522,6 +1571,11 @@ async function checkWorldInfo(chat, maxContext) {
}
} else { console.debug('no active entries for logic checks yet') }
}
//for a NOT all entries must be checked, a single match invalidates activation
if (selectiveLogic === 1 && notFlag) {
console.debug(`${entry.uid}: activated; passed NOT filter`)
activatedNow.add(entry);
}
}
}
@@ -1582,9 +1636,7 @@ async function checkWorldInfo(chat, maxContext) {
over_max = (
world_info_min_activations_depth_max > 0 &&
minActivationMsgIndex > world_info_min_activations_depth_max
) || (
minActivationMsgIndex >= chat.length
)
) || (minActivationMsgIndex >= chat.length)
if (!over_max) {
needsToScan = true
textToScan = transformString(chat.slice(minActivationMsgIndex, minActivationMsgIndex + 1).join(""));
@@ -1879,7 +1931,7 @@ export async function importEmbeddedWorldInfo(skipPopup = false) {
function onWorldInfoChange(_, text) {
if (_ !== '__notSlashCommand__') { // if it's a slash command
if (text !== undefined) { // and args are provided
if (text.trim() !== '') { // and args are provided
const slashInputSplitText = text.trim().toLowerCase().split(",");
slashInputSplitText.forEach((worldName) => {
@@ -1896,7 +1948,7 @@ function onWorldInfoChange(_, text) {
} else { // if no args, unset all worlds
toastr.success('Deactivated all worlds');
selected_world_info = [];
$("#world_info").val("");
$("#world_info").val(null).trigger('change');
}
} else { //if it's a pointer selection
let tempWorldInfo = [];
@@ -2167,11 +2219,9 @@ jQuery(() => {
updateEditor(navigation_option.previous);
});
$('#world_info_sort_order').on('change', function (e) {
if (e.target instanceof HTMLOptionElement) {
localStorage.setItem(SORT_ORDER_KEY, e.target.value);
}
$('#world_info_sort_order').on('change', function () {
const value = String($(this).find(":selected").val());
localStorage.setItem(SORT_ORDER_KEY, value);
updateEditor(navigation_option.none);
})

View File

@@ -3,6 +3,7 @@
@import url(css/promptmanager.css);
@import url(css/loader.css);
@import url(css/character-group-overlay.css);
@import url(css/file-form.css);
:root {
--doc-height: 100%;
@@ -57,6 +58,13 @@
--SmartThemeBlurStrength: calc(var(--blurStrength) * 1px);
--SmartThemeShadowColor: rgba(0, 0, 0, 0.5);
--SmartThemeBorderColor: rgba(0, 0, 0, 0.5);
--SmartThemeCheckboxBgColorR: 220;
--SmartThemeCheckboxBgColorG: 220;
--SmartThemeCheckboxBgColorB: 210;
--SmartThemeCheckboxTickColorValue: calc(((((var(--SmartThemeCheckboxBgColorR) * 299) + (var(--SmartThemeCheckboxBgColorG) * 587) + (var(--SmartThemeCheckboxBgColorB) * 114)) / 1000) - 128) * -1000);
--SmartThemeCheckboxTickColor: rgb(var(--SmartThemeCheckboxTickColorValue),
var(--SmartThemeCheckboxTickColorValue),
var(--SmartThemeCheckboxTickColorValue));
--sheldWidth: 50vw;
@@ -74,8 +82,8 @@
color-scheme: only light;
/* Send form variables */
--bottomFormBlockPadding: calc(var(--mainFontSize) / 3);
--bottomFormIconSize: calc(var(--mainFontSize) * 2);
--bottomFormBlockPadding: calc(var(--mainFontSize) / 2.5);
--bottomFormIconSize: calc(var(--mainFontSize) * 1.9);
--bottomFormBlockSize: calc(var(--bottomFormIconSize) + var(--bottomFormBlockPadding));
/*Top Bar Scaling Variables*/
@@ -118,6 +126,7 @@ body {
font-family: "Noto Sans", "Noto Color Emoji", sans-serif;
font-size: var(--mainFontSize);
color: var(--SmartThemeBodyColor);
overflow: hidden;
}
::-webkit-scrollbar {
@@ -217,7 +226,7 @@ table.responsiveTable {
color: var(--white50a);
}
.mes[is_system="true"] .mes_text br {
.mes[is_system="true"][ch_name="SillyTavern System"] .mes_text br {
display: none;
}
@@ -559,10 +568,9 @@ hr {
background-color: var(--crimson70a) !important;
}
#send_but_sheld {
#nonQRFormItems {
padding: 0;
border: 0;
height: var(--bottomFormBlockSize);
position: relative;
background-position: center;
display: flex;
@@ -570,10 +578,28 @@ hr {
column-gap: 5px;
font-size: var(--bottomFormIconSize);
overflow: hidden;
order: 1003;
order: 25;
width: 100%;
}
#send_but_sheld>div {
#leftSendForm,
#rightSendForm {
display: flex;
flex-wrap: wrap;
}
#leftSendForm {
order: 1;
padding-left: 2px;
}
#rightSendForm {
order: 3;
padding-right: 2px;
}
#send_form>#nonQRFormItems>div>div:not(.mes_stop) {
width: var(--bottomFormBlockSize);
height: var(--bottomFormBlockSize);
margin: 0;
@@ -585,25 +611,26 @@ hr {
display: flex;
align-items: center;
justify-content: center;
transition: all 300ms;
}
#options_button:hover,
#send_but_sheld>div:hover {
#send_form>#nonQRFormItems>div>div:hover {
opacity: 1;
filter: brightness(1.2);
}
#send_but {
order: 99999;
order: 2;
}
#mes_continue {
order: 99998;
order: 1;
}
#send_but_sheld .mes_stop {
#send_form .mes_stop {
display: none;
order: 99997;
order: 2;
padding-right: 2px;
}
#options_button {
@@ -622,7 +649,7 @@ hr {
transition: 0.3s;
display: flex;
align-items: center;
order: 1001;
order: 2;
}
.font-family-reset {
@@ -640,6 +667,7 @@ hr {
backdrop-filter: blur(var(--SmartThemeBlurStrength));
box-shadow: 0 0 10px rgba(0, 0, 0, 0.5);
flex-flow: column;
border-radius: 10px;
}
.options-content,
@@ -670,7 +698,7 @@ hr {
}
#extensionsMenuButton {
order: 100;
order: 4;
padding: 1px;
}
@@ -739,6 +767,10 @@ hr {
/*only affects bubblechat to make it sit nicely at the bottom*/
}
.last_mes .mes_text {
padding-right: 30px;
}
/* SWIPE RELATED STYLES*/
.swipe_right,
@@ -907,6 +939,7 @@ hr {
padding-top: 0;
padding-left: 10px;
width: 100%;
overflow-x: hidden;
}
.mes_text {
@@ -963,12 +996,12 @@ select {
background-color: rgba(255, 0, 0, 0);
border: 0;
box-shadow: none;
padding-top: 6px;
padding: 6px;
font-family: "Noto Sans", "Noto Color Emoji", sans-serif;
margin: 0;
text-shadow: 0px 0px calc(var(--shadowWidth) * 1px) var(--SmartThemeShadowColor);
flex: 1;
order: 1002;
order: 3;
}
.text_pole::placeholder {
@@ -1383,7 +1416,8 @@ select option:not(:checked) {
.menu_button.disabled {
filter: brightness(75%) grayscale(1);
cursor: not-allowed;
opacity: 0.5;
pointer-events: none;
}
.fav_on {
@@ -2217,7 +2251,7 @@ input[type="checkbox"]:not(#nav-toggle):not(#rm_button_panel_pin):not(#lm_button
height: 0.65em;
transform: scale(0);
transition: 120ms transform ease-in-out;
box-shadow: inset 1em 1em var(--SmartThemeBlurTintColor);
box-shadow: inset 1em 1em var(--SmartThemeCheckboxTickColor);
transform-origin: bottom left;
clip-path: polygon(14% 44%, 0 65%, 50% 100%, 100% 16%, 80% 0%, 43% 62%);
}
@@ -2466,14 +2500,6 @@ input[type="range"]::-webkit-slider-thumb {
display: none;
}
.last_mes .mes_buttons {
right: -30px;
}
.last_mes .mes_block {
margin-right: 30px;
}
.mes_buttons .mes_edit,
.mes_buttons .mes_bookmark,
.mes_buttons .mes_create_bookmark,
@@ -3491,14 +3517,6 @@ a {
align-items: center;
}
.prompt_overridden,
.jailbreak_overridden {
color: var(--SmartThemeQuoteColor);
font-weight: bold;
font-style: italic;
font-size: 0.8em;
}
.openai_restorable .right_menu_button img {
height: 20px;
}
@@ -3721,11 +3739,15 @@ a {
#show_more_messages {
text-align: center;
margin: 10px 0;
margin: 10px auto;
font-weight: 500;
text-decoration: underline;
order: -1;
cursor: pointer;
padding: 0.5em 1em;
background-color: var(--SmartThemeBlurTintColor);
width: fit-content;
border-radius: 10px;
outline: 1px solid var(--SmartThemeBorderColor);
}
.draggable img {
@@ -3749,4 +3771,4 @@ a {
height: 100vh;
z-index: 9999;
}
}
}

219
server.js
View File

@@ -55,7 +55,7 @@ const characterCardParser = require('./src/character-card-parser.js');
const contentManager = require('./src/content-manager');
const statsHelpers = require('./statsHelpers.js');
const { readSecret, migrateSecrets, SECRET_KEYS } = require('./src/secrets');
const { delay, getVersion, deepMerge } = require('./src/util');
const { delay, getVersion, deepMerge, getConfigValue, color } = require('./src/util');
const { invalidateThumbnail, ensureThumbnailCache } = require('./src/thumbnails');
const { getTokenizerModel, getTiktokenTokenizer, loadTokenizers, TEXT_COMPLETION_MODELS, getSentencepiceTokenizer, sentencepieceTokenizers } = require('./src/tokenizers');
const { convertClaudePrompt } = require('./src/chat-completion');
@@ -72,7 +72,15 @@ if (process.versions && process.versions.node && process.versions.node.match(/20
dns.setDefaultResultOrder('ipv4first');
const cliArguments = yargs(hideBin(process.argv))
.option('disableCsrf', {
.option('autorun', {
type: 'boolean',
default: null,
describe: 'Automatically launch SillyTavern in the browser.'
}).option('corsProxy', {
type: 'boolean',
default: false,
describe: 'Enables CORS proxy',
}).option('disableCsrf', {
type: 'boolean',
default: false,
describe: 'Disables CSRF protection'
@@ -101,12 +109,10 @@ app.use(responseTime());
// impoort from statsHelpers.js
const config = require(path.join(process.cwd(), './config.conf'));
const server_port = process.env.SILLY_TAVERN_PORT || config.port;
const server_port = process.env.SILLY_TAVERN_PORT || getConfigValue('port', 8000);
const whitelistPath = path.join(process.cwd(), "./whitelist.txt");
let whitelist = config.whitelist;
let whitelist = getConfigValue('whitelist', []);
if (fs.existsSync(whitelistPath)) {
try {
@@ -115,10 +121,10 @@ if (fs.existsSync(whitelistPath)) {
} catch (e) { }
}
const whitelistMode = config.whitelistMode;
const autorun = config.autorun && !cliArguments.ssl;
const enableExtensions = config.enableExtensions;
const listen = config.listen;
const whitelistMode = getConfigValue('whitelistMode', true);
const autorun = getConfigValue('autorun', false) && cliArguments.autorun !== false && !cliArguments.ssl;
const enableExtensions = getConfigValue('enableExtensions', true);
const listen = getConfigValue('listen', false);
const API_OPENAI = "https://api.openai.com/v1";
const API_CLAUDE = "https://api.anthropic.com/v1";
@@ -130,22 +136,6 @@ let main_api = "kobold";
let characters = {};
let response_dw_bg;
let color = {
byNum: (mess, fgNum) => {
mess = mess || '';
fgNum = fgNum === undefined ? 31 : fgNum;
return '\u001b[' + fgNum + 'm' + mess + '\u001b[39m';
},
black: (mess) => color.byNum(mess, 30),
red: (mess) => color.byNum(mess, 31),
green: (mess) => color.byNum(mess, 32),
yellow: (mess) => color.byNum(mess, 33),
blue: (mess) => color.byNum(mess, 34),
magenta: (mess) => color.byNum(mess, 35),
cyan: (mess) => color.byNum(mess, 36),
white: (mess) => color.byNum(mess, 37)
};
function getMancerHeaders() {
const apiKey = readSecret(SECRET_KEYS.MANCER);
@@ -164,8 +154,18 @@ function getAphroditeHeaders() {
}) : {};
}
function getTabbyHeaders() {
const apiKey = readSecret(SECRET_KEYS.TABBY)
return apiKey ? ({
"x-api-key": apiKey,
"Authorization": `Bearer ${apiKey}`,
}) : {};
}
function getOverrideHeaders(urlHost) {
const overrideHeaders = config.requestOverrides?.find((e) => e.hosts?.includes(urlHost))?.headers;
const requestOverrides = getConfigValue('requestOverrides', []);
const overrideHeaders = requestOverrides?.find((e) => e.hosts?.includes(urlHost))?.headers;
if (overrideHeaders && urlHost) {
return overrideHeaders;
} else {
@@ -186,6 +186,8 @@ function setAdditionalHeaders(request, args, server) {
headers = getMancerHeaders();
} else if (request.body.use_aphrodite) {
headers = getAphroditeHeaders();
} else if (request.body.use_tabby) {
headers = getTabbyHeaders();
} else {
headers = server ? getOverrideHeaders((new URL(server))?.host) : {};
}
@@ -258,7 +260,7 @@ const CORS = cors({
app.use(CORS);
if (listen && config.basicAuthMode) app.use(basicAuthMiddleware);
if (listen && getConfigValue('basicAuthMode', false)) app.use(basicAuthMiddleware);
// IP Whitelist //
let knownIPs = new Set();
@@ -297,12 +299,50 @@ app.use(function (req, res, next) {
//clientIp = req.connection.remoteAddress.split(':').pop();
if (whitelistMode === true && !whitelist.some(x => ipMatching.matches(clientIp, ipMatching.getMatch(x)))) {
console.log(color.red('Forbidden: Connection attempt from ' + clientIp + '. If you are attempting to connect, please add your IP address in whitelist or disable whitelist mode in config.conf in root of SillyTavern folder.\n'));
return res.status(403).send('<b>Forbidden</b>: Connection attempt from <b>' + clientIp + '</b>. If you are attempting to connect, please add your IP address in whitelist or disable whitelist mode in config.conf in root of SillyTavern folder.');
console.log(color.red('Forbidden: Connection attempt from ' + clientIp + '. If you are attempting to connect, please add your IP address in whitelist or disable whitelist mode in config.yaml in root of SillyTavern folder.\n'));
return res.status(403).send('<b>Forbidden</b>: Connection attempt from <b>' + clientIp + '</b>. If you are attempting to connect, please add your IP address in whitelist or disable whitelist mode in config.yaml in root of SillyTavern folder.');
}
next();
});
if (getConfigValue('enableCorsProxy', false) === true || cliArguments.corsProxy === true) {
console.log('Enabling CORS proxy');
app.use('/proxy/:url', async (req, res) => {
const url = req.params.url; // get the url from the request path
// Disallow circular requests
const serverUrl = req.protocol + '://' + req.get('host');
if (url.startsWith(serverUrl)) {
return res.status(400).send('Circular requests are not allowed');
}
try {
const headers = JSON.parse(JSON.stringify(req.headers));
delete headers['x-csrf-token'];
delete headers['host'];
delete headers['referer'];
delete headers['origin'];
delete headers['cookie'];
delete headers['sec-fetch-mode'];
delete headers['sec-fetch-site'];
delete headers['sec-fetch-dest'];
const bodyMethods = ['POST', 'PUT', 'PATCH'];
const response = await fetch(url, {
method: req.method,
headers: headers,
body: bodyMethods.includes(req.method) ? JSON.stringify(req.body) : undefined,
});
response.body.pipe(res); // pipe the response to the proxy response
} catch (error) {
res.status(500).send('Error occurred while trying to proxy to: ' + url + ' ' + error);
}
});
}
app.use(express.static(process.cwd() + "/public", {}));
@@ -520,6 +560,12 @@ app.post("/api/textgenerationwebui/status", jsonParser, async function (request,
else if (request.body.use_mancer) {
url += "/oai/v1/models";
}
else if (request.body.use_tabby) {
url += "/v1/model/list"
}
else if (request.body.use_koboldcpp) {
url += "/v1/models";
}
const modelsReply = await fetch(url, args);
@@ -548,7 +594,7 @@ app.post("/api/textgenerationwebui/status", jsonParser, async function (request,
if (request.body.use_ooba) {
try {
const modelInfoUrl = baseUrl + '/v1/internal/model/info';
const modelInfoUrl = baseUrl + "/v1/internal/model/info";
const modelInfoReply = await fetch(modelInfoUrl, args);
if (modelInfoReply.ok) {
@@ -559,7 +605,28 @@ app.post("/api/textgenerationwebui/status", jsonParser, async function (request,
result = modelName || result;
}
} catch (error) {
console.error('Failed to get Ooba model info:', error);
console.error(`Failed to get Ooba model info: ${error}`);
}
}
if (request.body.use_tabby) {
try {
const modelInfoUrl = baseUrl + "/v1/model";
const modelInfoReply = await fetch(modelInfoUrl, args);
if (modelInfoReply.ok) {
const modelInfo = await modelInfoReply.json();
console.log('Tabby model info:', modelInfo);
const modelName = modelInfo?.id;
result = modelName || result;
} else {
// TabbyAPI returns an error 400 if a model isn't loaded
result = "None"
}
} catch (error) {
console.error(`Failed to get TabbyAPI model info: ${error}`);
}
}
@@ -593,7 +660,7 @@ app.post("/api/textgenerationwebui/generate", jsonParser, async function (reques
if (request.body.legacy_api) {
url += "/v1/generate";
}
else if (request.body.use_aphrodite || request.body.use_ooba) {
else if (request.body.use_aphrodite || request.body.use_ooba || request.body.use_tabby || request.body.use_koboldcpp) {
url += "/v1/completions";
}
else if (request.body.use_mancer) {
@@ -1654,7 +1721,7 @@ app.post('/getsettings', jsonParser, (request, response) => {
// OpenAI Settings
const { fileContents: openai_settings, fileNames: openai_setting_names }
= readPresetsFromDirectory(DIRECTORIES.openAI_Settings, {
sortFunction: sortByModifiedDate(DIRECTORIES.openAI_Settings), removeFileExtension: true
sortFunction: sortByName(DIRECTORIES.openAI_Settings), removeFileExtension: true
});
// TextGenerationWebUI Settings
@@ -2457,27 +2524,28 @@ app.post('/uploadimage', jsonParser, async (request, response) => {
return response.status(400).send({ error: "No image data provided" });
}
// Extracting the base64 data and the image format
const match = request.body.image.match(/^data:image\/(png|jpg|webp|jpeg|gif);base64,(.+)$/);
if (!match) {
return response.status(400).send({ error: "Invalid image format" });
}
const [, format, base64Data] = match;
// Constructing filename and path
let filename = `${Date.now()}.${format}`;
if (request.body.filename) {
filename = `${request.body.filename}.${format}`;
}
// if character is defined, save to a sub folder for that character
let pathToNewFile = path.join(DIRECTORIES.userImages, filename);
if (request.body.ch_name) {
pathToNewFile = path.join(DIRECTORIES.userImages, request.body.ch_name, filename);
}
try {
// Extracting the base64 data and the image format
const splitParts = request.body.image.split(',');
const format = splitParts[0].split(';')[0].split('/')[1];
const base64Data = splitParts[1];
const validFormat = ['png', 'jpg', 'webp', 'jpeg', 'gif'].includes(format);
if (!validFormat) {
return response.status(400).send({ error: "Invalid image format" });
}
// Constructing filename and path
let filename = `${Date.now()}.${format}`;
if (request.body.filename) {
filename = `${request.body.filename}.${format}`;
}
// if character is defined, save to a sub folder for that character
let pathToNewFile = path.join(DIRECTORIES.userImages, filename);
if (request.body.ch_name) {
pathToNewFile = path.join(DIRECTORIES.userImages, request.body.ch_name, filename);
}
ensureDirectoryExistence(pathToNewFile);
const imageBuffer = Buffer.from(base64Data, 'base64');
await fs.promises.writeFile(pathToNewFile, imageBuffer);
@@ -2743,7 +2811,7 @@ app.post("/getstatus_openai", jsonParser, async function (request, response_gets
data.data.forEach(model => {
const context_length = model.context_length;
const tokens_dollar = Number(1 / (1000 * model.pricing.prompt));
const tokens_dollar = Number(1 / (1000 * model.pricing?.prompt));
const tokens_rounded = (Math.round(tokens_dollar * 1000) / 1000).toFixed(0);
models[model.id] = {
tokens_per_dollar: tokens_rounded + 'k',
@@ -2764,8 +2832,8 @@ app.post("/getstatus_openai", jsonParser, async function (request, response_gets
}
}
else {
console.log('Access Token is incorrect.');
response_getstatus_openai.send({ error: true });
console.log('OpenAI status check failed. Either Access Token is incorrect or API endpoint is down.');
response_getstatus_openai.send({ error: true, can_bypass: true, data: { data: [] } });
}
} catch (e) {
console.error(e);
@@ -2795,13 +2863,13 @@ app.post("/openai_bias", jsonParser, async function (request, response) {
if (sentencepieceTokenizers.includes(model)) {
const tokenizer = getSentencepiceTokenizer(model);
encodeFunction = (text) => new Uint32Array(tokenizer.encodeIds(text));
const instance = await tokenizer?.get();
encodeFunction = (text) => new Uint32Array(instance?.encodeIds(text));
} else {
const tokenizer = getTiktokenTokenizer(model);
encodeFunction = (tokenizer.encode.bind(tokenizer));
}
for (const entry of request.body) {
if (!entry || !entry.text) {
continue;
@@ -3003,7 +3071,8 @@ async function sendClaudeRequest(request, response) {
controller.abort();
});
let requestPrompt = convertClaudePrompt(request.body.messages, true, !request.body.exclude_assistant);
let doSystemPrompt = request.body.model === 'claude-2' || request.body.model === 'claude-2.1';
let requestPrompt = convertClaudePrompt(request.body.messages, true, !request.body.exclude_assistant, doSystemPrompt);
if (request.body.assistant_prefill && !request.body.exclude_assistant) {
requestPrompt += request.body.assistant_prefill;
@@ -3408,7 +3477,16 @@ app.post("/tokenize_via_api", jsonParser, async function (request, response) {
if (legacyApi) {
url += '/v1/token-count';
args.body = JSON.stringify({ "prompt": text });
} else {
}
else if (request.body.use_tabby) {
url += '/v1/token/encode';
args.body = JSON.stringify({ "text": text });
}
else if (request.body.use_koboldcpp) {
url += '/api/extra/tokencount';
args.body = JSON.stringify({ "prompt": text });
}
else {
url += '/v1/internal/encode';
args.body = JSON.stringify({ "text": text });
}
@@ -3421,8 +3499,8 @@ app.post("/tokenize_via_api", jsonParser, async function (request, response) {
}
const data = await result.json();
const count = legacyApi ? data?.results[0]?.tokens : data?.length;
const ids = legacyApi ? [] : data?.tokens;
const count = legacyApi ? data?.results[0]?.tokens : (data?.length ?? data?.value);
const ids = legacyApi ? [] : (data?.tokens ?? []);
return response.send({ count, ids });
}
@@ -3481,7 +3559,7 @@ async function fetchJSON(url, args = {}) {
// ** END **
// OpenAI API
require('./src/openai').registerEndpoints(app, jsonParser);
require('./src/openai').registerEndpoints(app, jsonParser, urlencodedParser);
// Tokenizers
require('./src/tokenizers').registerEndpoints(app, jsonParser);
@@ -3528,6 +3606,9 @@ require('./src/classify').registerEndpoints(app, jsonParser);
// Image captioning
require('./src/caption').registerEndpoints(app, jsonParser);
// Web search extension
require('./src/serpapi').registerEndpoints(app, jsonParser);
const tavernUrl = new URL(
(cliArguments.ssl ? 'https://' : 'http://') +
(listen ? '0.0.0.0' : '127.0.0.1') +
@@ -3572,12 +3653,12 @@ const setupTasks = async function () {
console.log(color.green('SillyTavern is listening on: ' + tavernUrl));
if (listen) {
console.log('\n0.0.0.0 means SillyTavern is listening on all network interfaces (Wi-Fi, LAN, localhost). If you want to limit it only to internal localhost (127.0.0.1), change the setting in config.conf to "listen=false". Check "access.log" file in the SillyTavern directory if you want to inspect incoming connections.\n');
console.log('\n0.0.0.0 means SillyTavern is listening on all network interfaces (Wi-Fi, LAN, localhost). If you want to limit it only to internal localhost (127.0.0.1), change the setting in config.yaml to "listen: false". Check "access.log" file in the SillyTavern directory if you want to inspect incoming connections.\n');
}
}
if (listen && !config.whitelistMode && !config.basicAuthMode) {
if (config.securityOverride) {
if (listen && !getConfigValue('whitelistMode', true) && !getConfigValue('basicAuthMode', false)) {
if (getConfigValue('securityOverride', false)) {
console.warn(color.red("Security has been overridden. If it's not a trusted network, change the settings."));
}
else {
@@ -3624,7 +3705,7 @@ function generateTimestamp() {
*/
function backupChat(name, chat) {
try {
const isBackupDisabled = config.disableChatBackup;
const isBackupDisabled = getConfigValue('disableChatBackup', false);
if (isBackupDisabled) {
return;
@@ -3637,7 +3718,7 @@ function backupChat(name, chat) {
// replace non-alphanumeric characters with underscores
name = sanitize(name).replace(/[^a-z0-9]/gi, '_').toLowerCase();
const backupFile = path.join(DIRECTORIES.backups, `chat_${name}_${generateTimestamp()}.json`);
const backupFile = path.join(DIRECTORIES.backups, `chat_${name}_${generateTimestamp()}.jsonl`);
writeFileAtomicSync(backupFile, chat, 'utf-8');
removeOldBackups(`chat_${name}_`);

View File

@@ -37,6 +37,25 @@ function checkAssetFileName(inputFilename) {
return path.normalize(inputFilename).replace(/^(\.\.(\/|\\|$))+/, '');;
}
// Recursive function to get files
function getFiles(dir, files = []) {
// Get an array of all files and directories in the passed directory using fs.readdirSync
const fileList = fs.readdirSync(dir);
// Create the full path of the file/directory by concatenating the passed directory and file/directory name
for (const file of fileList) {
const name = `${dir}/${file}`;
// Check if the current file/directory is a directory using fs.statSync
if (fs.statSync(name).isDirectory()) {
// If it is a directory, recursively call the getFiles function with the directory path and the files array
getFiles(name, files);
} else {
// If it is a file, push the full path to the files array
files.push(name);
}
}
return files;
}
/**
* Registers the endpoints for the asset management.
* @param {import('express').Express} app Express app
@@ -70,16 +89,14 @@ function registerEndpoints(app, jsonParser) {
// Live2d assets
if (folder == "live2d") {
output[folder] = [];
const live2d_folders = fs.readdirSync(path.join(folderPath, folder));
for (let model_folder of live2d_folders) {
const live2d_model_path = path.join(folderPath, folder, model_folder);
if (fs.statSync(live2d_model_path).isDirectory()) {
for (let file of fs.readdirSync(live2d_model_path)) {
if (file.includes("model")) {
//console.debug("Asset live2d model found:",file)
output[folder].push([`${model_folder}`, path.join("assets", folder, model_folder, file)]);
}
}
const live2d_folder = path.normalize(path.join(folderPath, folder));
const files = getFiles(live2d_folder);
//console.debug("FILE FOUND:",files)
for (let file of files) {
file = path.normalize(file.replace('public' + path.sep, ''));
if (file.includes("model") && file.endsWith(".json")) {
//console.debug("Asset live2d model found:",file)
output[folder].push(path.normalize(path.join(file)));
}
}
continue;
@@ -256,8 +273,8 @@ function registerEndpoints(app, jsonParser) {
if (fs.statSync(live2dModelPath).isDirectory()) {
for (let file of fs.readdirSync(live2dModelPath)) {
//console.debug("Character live2d model found:", file)
if (file.includes("model"))
output.push([`${modelFolder}`, path.join("characters", name, category, modelFolder, file)]);
if (file.includes("model") && file.endsWith(".json"))
output.push(path.join("characters", name, category, modelFolder, file));
}
}
}

View File

@@ -3,10 +3,11 @@
* @param {object[]} messages Array of messages
* @param {boolean} addHumanPrefix Add Human prefix
* @param {boolean} addAssistantPostfix Add Assistant postfix
* @param {boolean} withSystemPrompt Build system prompt before "\n\nHuman: "
* @returns {string} Prompt for Claude
* @copyright Prompt Conversion script taken from RisuAI by kwaroran (GPLv3).
*/
function convertClaudePrompt(messages, addHumanPrefix, addAssistantPostfix) {
function convertClaudePrompt(messages, addHumanPrefix, addAssistantPostfix, withSystemPrompt) {
// Claude doesn't support message names, so we'll just add them to the message content.
for (const message of messages) {
if (message.name && message.role !== "system") {
@@ -15,6 +16,24 @@ function convertClaudePrompt(messages, addHumanPrefix, addAssistantPostfix) {
}
}
let systemPrompt = '';
if (withSystemPrompt) {
let lastSystemIdx = -1;
for (let i = 0; i < messages.length - 1; i++) {
const message = messages[i];
if (message.role === "system" && !message.name) {
systemPrompt += message.content + '\n\n';
} else {
lastSystemIdx = i - 1;
break;
}
}
if (lastSystemIdx >= 0) {
messages.splice(0, lastSystemIdx + 1);
}
}
let requestPrompt = messages.map((v) => {
let prefix = '';
switch (v.role) {
@@ -46,6 +65,10 @@ function convertClaudePrompt(messages, addHumanPrefix, addAssistantPostfix) {
requestPrompt = requestPrompt + '\n\nAssistant: ';
}
if (withSystemPrompt) {
requestPrompt = systemPrompt + requestPrompt;
}
return requestPrompt;
}

View File

@@ -23,6 +23,7 @@ const DIRECTORIES = {
backups: 'backups/',
quickreplies: 'public/QuickReplies',
assets: 'public/assets',
comfyWorkflows: 'public/user/workflows',
};
const UNSAFE_EXTENSIONS = [
@@ -103,10 +104,6 @@ const UNSAFE_EXTENSIONS = [
];
const PALM_SAFETY = [
{
category: "HARM_CATEGORY_UNSPECIFIED",
threshold: "BLOCK_NONE"
},
{
category: "HARM_CATEGORY_DEROGATORY",
threshold: "BLOCK_NONE"

View File

@@ -2,14 +2,14 @@ const fs = require('fs');
const path = require('path');
const fetch = require('node-fetch').default;
const sanitize = require('sanitize-filename');
const config = require(path.join(process.cwd(), './config.conf'));
const { getConfigValue } = require('./util');
const contentDirectory = path.join(process.cwd(), 'default/content');
const contentLogPath = path.join(contentDirectory, 'content.log');
const contentIndexPath = path.join(contentDirectory, 'index.json');
function checkForNewContent() {
try {
if (config.skipContentCheck) {
if (getConfigValue('skipContentCheck', false)) {
return;
}
@@ -71,6 +71,8 @@ function getTargetByType(type) {
return 'public/User Avatars';
case 'theme':
return 'public/themes';
case 'workflow':
return 'public/user/workflows';
default:
return null;
}

View File

@@ -268,12 +268,20 @@ function registerEndpoints(app, jsonParser) {
// NAI handholding for prompts
ucPreset: 0,
qualityToggle: false,
add_original_image: false,
controlnet_strength: 1,
dynamic_thresholding: false,
legacy: false,
sm: false,
sm_dyn: false,
uncond_scale: 1,
},
}),
});
if (!generateResult.ok) {
console.log('NovelAI returned an error.', generateResult.statusText);
const text = await generateResult.text();
console.log('NovelAI returned an error.', generateResult.statusText, text);
return response.sendStatus(500);
}

View File

@@ -1,23 +1,34 @@
const { readSecret, SECRET_KEYS } = require("./secrets");
const fetch = require('node-fetch').default;
const FormData = require('form-data');
const fs = require('fs');
/**
* Registers the OpenAI endpoints.
* @param {import("express").Express} app
* @param {any} jsonParser
* @param {import("express").Express} app Express app
* @param {any} jsonParser JSON parser
* @param {any} urlencodedParser Form data parser
*/
function registerEndpoints(app, jsonParser) {
function registerEndpoints(app, jsonParser, urlencodedParser) {
app.post('/api/openai/caption-image', jsonParser, async (request, response) => {
try {
const key = readSecret(SECRET_KEYS.OPENAI);
let key = '';
if (request.body.api === 'openai') {
key = readSecret(SECRET_KEYS.OPENAI);
}
if (request.body.api === 'openrouter') {
key = readSecret(SECRET_KEYS.OPENROUTER);
}
if (!key) {
console.log('No OpenAI key found');
console.log('No key found for API', request.body.api);
return response.sendStatus(401);
}
const body = {
model: "gpt-4-vision-preview",
model: request.body.model,
messages: [
{
role: "user",
@@ -27,15 +38,29 @@ function registerEndpoints(app, jsonParser) {
]
}
],
max_tokens: 300
max_tokens: 500
};
console.log('OpenAI request', body);
const result = await fetch('https://api.openai.com/v1/chat/completions', {
console.log('Multimodal captioning request', body);
let apiUrl = '';
let headers = {};
if (request.body.api === 'openrouter') {
apiUrl = 'https://openrouter.ai/api/v1/chat/completions';
headers['HTTP-Referer'] = request.headers.referer;
}
if (request.body.api === 'openai') {
apiUrl = 'https://api.openai.com/v1/chat/completions';
}
const result = await fetch(apiUrl, {
method: 'POST',
headers: {
'Content-Type': 'application/json',
Authorization: `Bearer ${key}`,
...headers,
},
body: JSON.stringify(body),
timeout: 0,
@@ -43,12 +68,12 @@ function registerEndpoints(app, jsonParser) {
if (!result.ok) {
const text = await result.text();
console.log('OpenAI request failed', result.statusText, text);
console.log('Multimodal captioning request failed', result.statusText, text);
return response.status(500).send(text);
}
const data = await result.json();
console.log('OpenAI response', data);
console.log('Multimodal captioning response', data);
const caption = data?.choices[0]?.message?.content;
if (!caption) {
@@ -63,6 +88,93 @@ function registerEndpoints(app, jsonParser) {
}
});
app.post('/api/openai/transcribe-audio', urlencodedParser, async (request, response) => {
try {
const key = readSecret(SECRET_KEYS.OPENAI);
if (!key) {
console.log('No OpenAI key found');
return response.sendStatus(401);
}
if (!request.file) {
console.log('No audio file found');
return response.sendStatus(400);
}
const formData = new FormData();
console.log('Processing audio file', request.file.path);
formData.append('file', fs.createReadStream(request.file.path), { filename: 'audio.wav', contentType: 'audio/wav' });
formData.append('model', request.body.model);
if (request.body.language) {
formData.append('language', request.body.language);
}
const result = await fetch('https://api.openai.com/v1/audio/transcriptions', {
method: 'POST',
headers: {
'Authorization': `Bearer ${key}`,
...formData.getHeaders(),
},
body: formData,
});
if (!result.ok) {
const text = await result.text();
console.log('OpenAI request failed', result.statusText, text);
return response.status(500).send(text);
}
fs.rmSync(request.file.path);
const data = await result.json();
console.log('OpenAI transcription response', data);
return response.json(data);
} catch (error) {
console.error('OpenAI transcription failed', error);
response.status(500).send('Internal server error');
}
});
app.post('/api/openai/generate-voice', jsonParser, async (request, response) => {
try {
const key = readSecret(SECRET_KEYS.OPENAI);
if (!key) {
console.log('No OpenAI key found');
return response.sendStatus(401);
}
const result = await fetch('https://api.openai.com/v1/audio/speech', {
method: 'POST',
headers: {
'Content-Type': 'application/json',
Authorization: `Bearer ${key}`,
},
body: JSON.stringify({
input: request.body.text,
response_format: 'mp3',
voice: request.body.voice ?? 'alloy',
speed: request.body.speed ?? 1,
model: request.body.model ?? 'tts-1',
}),
});
if (!result.ok) {
const text = await result.text();
console.log('OpenAI request failed', result.statusText, text);
return response.status(500).send(text);
}
const buffer = await result.arrayBuffer();
response.setHeader('Content-Type', 'audio/mpeg');
return response.send(Buffer.from(buffer));
} catch (error) {
console.error('OpenAI TTS generation failed', error);
response.status(500).send('Internal server error');
}
});
app.post('/api/openai/generate-image', jsonParser, async (request, response) => {
try {
const key = readSecret(SECRET_KEYS.OPENAI);

View File

@@ -8,6 +8,7 @@ const SECRET_KEYS = {
HORDE: 'api_key_horde',
MANCER: 'api_key_mancer',
APHRODITE: 'api_key_aphrodite',
TABBY: 'api_key_tabby',
OPENAI: 'api_key_openai',
NOVEL: 'api_key_novel',
CLAUDE: 'api_key_claude',
@@ -21,6 +22,7 @@ const SECRET_KEYS = {
ONERING_URL: 'oneringtranslator_url',
DEEPLX_URL: 'deeplx_url',
PALM: 'api_key_palm',
SERPAPI: 'api_key_serpapi',
}
/**
@@ -171,7 +173,7 @@ function registerEndpoints(app, jsonParser) {
const allowKeysExposure = getConfigValue('allowKeysExposure', false);
if (!allowKeysExposure) {
console.error('secrets.json could not be viewed unless the value of allowKeysExposure in config.conf is set to true');
console.error('secrets.json could not be viewed unless the value of allowKeysExposure in config.yaml is set to true');
return response.sendStatus(403);
}
@@ -188,6 +190,30 @@ function registerEndpoints(app, jsonParser) {
return response.sendStatus(500);
}
});
app.post('/api/secrets/find', jsonParser, (request, response) => {
const allowKeysExposure = getConfigValue('allowKeysExposure', false);
if (!allowKeysExposure) {
console.error('Cannot fetch secrets unless allowKeysExposure in config.yaml is set to true');
return response.sendStatus(403);
}
const key = request.body.key
try {
const secret = readSecret(key)
if (!secret) {
response.sendStatus(404);
}
return response.send({ value: secret });
} catch (error) {
console.error(error);
return response.sendStatus(500);
}
});
}
module.exports = {

BIN
src/sentencepiece/yi.model Normal file

Binary file not shown.

39
src/serpapi.js Normal file
View File

@@ -0,0 +1,39 @@
const fetch = require('node-fetch').default;
const { readSecret, SECRET_KEYS } = require('./secrets');
/**
* Registers the SerpApi endpoints.
* @param {import("express").Express} app
* @param {any} jsonParser
*/
function registerEndpoints(app, jsonParser) {
app.post('/api/serpapi/search', jsonParser, async (request, response) => {
try {
const key = readSecret(SECRET_KEYS.SERPAPI);
if (!key) {
console.log('No SerpApi key found');
return response.sendStatus(401);
}
const { query } = request.body;
const result = await fetch(`https://serpapi.com/search.json?q=${encodeURIComponent(query)}&api_key=${key}`);
if (!result.ok) {
const text = await result.text();
console.log('SerpApi request failed', result.statusText, text);
return response.status(500).send(text);
}
const data = await result.json();
return response.json(data);
} catch (error) {
console.log(error);
return response.sendStatus(500);
}
});
}
module.exports = {
registerEndpoints,
};

View File

@@ -1,5 +1,9 @@
const fetch = require('node-fetch').default;
const sanitize = require('sanitize-filename');
const { getBasicAuthHeader, delay } = require('./util');
const fs = require('fs');
const { DIRECTORIES } = require('./constants.js');
const writeFileAtomicSync = require('write-file-atomic').sync;
/**
* Sanitizes a string.
@@ -38,6 +42,13 @@ function removePattern(x, pattern) {
return x;
}
function getComfyWorkflows() {
return fs
.readdirSync(DIRECTORIES.comfyWorkflows)
.filter(file => file[0] != '.' && file.toLowerCase().endsWith('.json'))
.sort(Intl.Collator().compare);
}
/**
* Registers the endpoints for the Stable Diffusion API extension.
* @param {import("express").Express} app Express app
@@ -347,6 +358,194 @@ function registerEndpoints(app, jsonParser) {
return response.send({ prompt: originalPrompt });
}
});
app.post('/api/sd/comfy/ping', jsonParser, async (request, response) => {
try {
const url = new URL(request.body.url);
url.pathname = '/system_stats'
const result = await fetch(url);
if (!result.ok) {
throw new Error('ComfyUI returned an error.');
}
return response.sendStatus(200);
} catch (error) {
console.log(error);
return response.sendStatus(500);
}
});
app.post('/api/sd/comfy/samplers', jsonParser, async (request, response) => {
try {
const url = new URL(request.body.url);
url.pathname = '/object_info'
const result = await fetch(url);
if (!result.ok) {
throw new Error('ComfyUI returned an error.');
}
const data = await result.json();
return response.send(data.KSampler.input.required.sampler_name[0]);
} catch (error) {
console.log(error);
return response.sendStatus(500);
}
});
app.post('/api/sd/comfy/models', jsonParser, async (request, response) => {
try {
const url = new URL(request.body.url);
url.pathname = '/object_info'
const result = await fetch(url);
if (!result.ok) {
throw new Error('ComfyUI returned an error.');
}
const data = await result.json();
return response.send(data.CheckpointLoaderSimple.input.required.ckpt_name[0].map(it => ({ value: it, text: it })));
} catch (error) {
console.log(error);
return response.sendStatus(500);
}
});
app.post('/api/sd/comfy/schedulers', jsonParser, async (request, response) => {
try {
const url = new URL(request.body.url);
url.pathname = '/object_info'
const result = await fetch(url);
if (!result.ok) {
throw new Error('ComfyUI returned an error.');
}
const data = await result.json();
return response.send(data.KSampler.input.required.scheduler[0]);
} catch (error) {
console.log(error);
return response.sendStatus(500);
}
});
app.post('/api/sd/comfy/vaes', jsonParser, async (request, response) => {
try {
const url = new URL(request.body.url);
url.pathname = '/object_info'
const result = await fetch(url);
if (!result.ok) {
throw new Error('ComfyUI returned an error.');
}
const data = await result.json();
return response.send(data.VAELoader.input.required.vae_name[0]);
} catch (error) {
console.log(error);
return response.sendStatus(500);
}
});
app.post('/api/sd/comfy/workflows', jsonParser, async (request, response) => {
try {
const data = getComfyWorkflows();
return response.send(data);
} catch (error) {
console.log(error);
return response.sendStatus(500);
}
});
app.post('/api/sd/comfy/workflow', jsonParser, async (request, response) => {
try {
let path = `${DIRECTORIES.comfyWorkflows}/${sanitize(String(request.body.file_name))}`;
if (!fs.existsSync(path)) {
path = `${DIRECTORIES.comfyWorkflows}/Default_Comfy_Workflow.json`;
}
const data = fs.readFileSync(
path,
{ encoding: 'utf-8' }
);
return response.send(JSON.stringify(data));
} catch (error) {
console.log(error);
return response.sendStatus(500);
}
});
app.post('/api/sd/comfy/save-workflow', jsonParser, async (request, response) => {
try {
writeFileAtomicSync(
`${DIRECTORIES.comfyWorkflows}/${sanitize(String(request.body.file_name))}`,
request.body.workflow,
'utf8'
);
const data = getComfyWorkflows();
return response.send(data);
} catch (error) {
console.log(error);
return response.sendStatus(500);
}
});
app.post('/api/sd/comfy/delete-workflow', jsonParser, async (request, response) => {
try {
let path = `${DIRECTORIES.comfyWorkflows}/${sanitize(String(request.body.file_name))}`;
if (fs.existsSync(path)) {
fs.unlinkSync(path);
}
return response.sendStatus(200);
} catch (error) {
console.log(error);
return response.sendStatus(500);
}
});
app.post('/api/sd/comfy/generate', jsonParser, async (request, response) => {
try {
const url = new URL(request.body.url);
url.pathname = '/prompt'
const promptResult = await fetch(url, {
method: 'POST',
body: request.body.prompt,
});
if (!promptResult.ok) {
throw new Error('ComfyUI returned an error.');
}
const data = await promptResult.json();
const id = data.prompt_id;
let item;
const historyUrl = new URL(request.body.url);
historyUrl.pathname = '/history';
while (true) {
const result = await fetch(historyUrl);
if (!result.ok) {
throw new Error('ComfyUI returned an error.');
}
const history = await result.json();
item = history[id];
if (item) {
break;
}
await delay(100);
}
const imgInfo = Object.keys(item.outputs).map(it => item.outputs[it].images).flat()[0];
const imgUrl = new URL(request.body.url);
imgUrl.pathname = '/view';
imgUrl.search = `?filename=${imgInfo.filename}&subfolder=${imgInfo.subfolder}&type=${imgInfo.type}`;
const imgResponse = await fetch(imgUrl);
if (!imgResponse.ok) {
throw new Error('ComfyUI returned an error.');
}
const imgBuffer = await imgResponse.buffer();
return response.send(imgBuffer.toString('base64'));
} catch (error) {
return response.sendStatus(500);
}
});
}
module.exports = {

View File

@@ -1,4 +1,5 @@
const fs = require('fs');
const path = require('path');
const { SentencePieceProcessor } = require("@agnai/sentencepiece-js");
const tiktoken = require('@dqbd/tiktoken');
const { Tokenizer } = require('@agnai/web-tokenizers');
@@ -43,22 +44,40 @@ const TEXT_COMPLETION_MODELS = [
const CHARS_PER_TOKEN = 3.35;
let spp_llama;
let spp_nerd;
let spp_nerd_v2;
let spp_mistral;
let claude_tokenizer;
class SentencePieceTokenizer {
#instance;
#model;
async function loadSentencepieceTokenizer(modelPath) {
try {
const spp = new SentencePieceProcessor();
await spp.load(modelPath);
return spp;
} catch (error) {
console.error("Sentencepiece tokenizer failed to load: " + modelPath, error);
return null;
constructor(model) {
this.#model = model;
}
};
/**
* Gets the Sentencepiece tokenizer instance.
*/
async get() {
if (this.#instance) {
return this.#instance;
}
try {
this.#instance = new SentencePieceProcessor();
await this.#instance.load(this.#model);
console.log('Instantiated the tokenizer for', path.parse(this.#model).name);
return this.#instance;
} catch (error) {
console.error("Sentencepiece tokenizer failed to load: " + this.#model, error);
return null;
}
}
}
const spp_llama = new SentencePieceTokenizer('src/sentencepiece/llama.model');
const spp_nerd = new SentencePieceTokenizer('src/sentencepiece/nerdstash.model');
const spp_nerd_v2 = new SentencePieceTokenizer('src/sentencepiece/nerdstash_v2.model');
const spp_mistral = new SentencePieceTokenizer('src/sentencepiece/mistral.model');
const spp_yi = new SentencePieceTokenizer('src/sentencepiece/yi.model');
let claude_tokenizer;
const sentencepieceTokenizers = [
'llama',
@@ -70,7 +89,7 @@ const sentencepieceTokenizers = [
/**
* Gets the Sentencepiece tokenizer by the model name.
* @param {string} model Sentencepiece model name
* @returns {*} Sentencepiece tokenizer
* @returns {SentencePieceTokenizer|null} Sentencepiece tokenizer
*/
function getSentencepiceTokenizer(model) {
if (model.includes('llama')) {
@@ -88,11 +107,21 @@ function getSentencepiceTokenizer(model) {
if (model.includes('nerdstash_v2')) {
return spp_nerd_v2;
}
return null;
}
async function countSentencepieceTokens(spp, text) {
/**
* Counts the token ids for the given text using the Sentencepiece tokenizer.
* @param {SentencePieceTokenizer} tokenizer Sentencepiece tokenizer
* @param {string} text Text to tokenize
* @returns { Promise<{ids: number[], count: number}> } Tokenization result
*/
async function countSentencepieceTokens(tokenizer, text) {
const instance = await tokenizer?.get();
// Fallback to strlen estimation
if (!spp) {
if (!instance) {
return {
ids: [],
count: Math.ceil(text.length / CHARS_PER_TOKEN)
@@ -101,13 +130,19 @@ async function countSentencepieceTokens(spp, text) {
let cleaned = text; // cleanText(text); <-- cleaning text can result in an incorrect tokenization
let ids = spp.encodeIds(cleaned);
let ids = instance.encodeIds(cleaned);
return {
ids,
count: ids.length
};
}
/**
* Counts the tokens in the given array of objects using the Sentencepiece tokenizer.
* @param {SentencePieceTokenizer} tokenizer
* @param {object[]} array Array of objects to tokenize
* @returns {Promise<number>} Number of tokens
*/
async function countSentencepieceArrayTokens(tokenizer, array) {
const jsonBody = array.flatMap(x => Object.values(x)).join('\n\n');
const result = await countSentencepieceTokens(tokenizer, jsonBody);
@@ -147,18 +182,6 @@ async function getWebTokenizersChunks(tokenizer, ids) {
* @returns {string} Tokenizer model to use
*/
function getTokenizerModel(requestModel) {
if (requestModel.includes('claude')) {
return 'claude';
}
if (requestModel.includes('llama')) {
return 'llama';
}
if (requestModel.includes('mistral')) {
return 'mistral';
}
if (requestModel.includes('gpt-4-32k')) {
return 'gpt-4-32k';
}
@@ -179,6 +202,22 @@ function getTokenizerModel(requestModel) {
return requestModel;
}
if (requestModel.includes('claude')) {
return 'claude';
}
if (requestModel.includes('llama')) {
return 'llama';
}
if (requestModel.includes('mistral')) {
return 'mistral';
}
if (requestModel.includes('yi')) {
return 'yi';
}
// default
return 'gpt-3.5-turbo';
}
@@ -206,7 +245,7 @@ async function loadClaudeTokenizer(modelPath) {
}
function countClaudeTokens(tokenizer, messages) {
const convertedPrompt = convertClaudePrompt(messages, false, false);
const convertedPrompt = convertClaudePrompt(messages, false, false, false);
// Fallback to strlen estimation
if (!tokenizer) {
@@ -219,10 +258,10 @@ function countClaudeTokens(tokenizer, messages) {
/**
* Creates an API handler for encoding Sentencepiece tokens.
* @param {function} getTokenizerFn Tokenizer provider function
* @param {SentencePieceTokenizer} tokenizer Sentencepiece tokenizer
* @returns {any} Handler function
*/
function createSentencepieceEncodingHandler(getTokenizerFn) {
function createSentencepieceEncodingHandler(tokenizer) {
return async function (request, response) {
try {
if (!request.body) {
@@ -230,9 +269,9 @@ function createSentencepieceEncodingHandler(getTokenizerFn) {
}
const text = request.body.text || '';
const tokenizer = getTokenizerFn();
const instance = await tokenizer?.get();
const { ids, count } = await countSentencepieceTokens(tokenizer, text);
const chunks = await tokenizer.encodePieces(text);
const chunks = await instance?.encodePieces(text);
return response.send({ ids, count, chunks });
} catch (error) {
console.log(error);
@@ -243,10 +282,10 @@ function createSentencepieceEncodingHandler(getTokenizerFn) {
/**
* Creates an API handler for decoding Sentencepiece tokens.
* @param {function} getTokenizerFn Tokenizer provider function
* @param {SentencePieceTokenizer} tokenizer Sentencepiece tokenizer
* @returns {any} Handler function
*/
function createSentencepieceDecodingHandler(getTokenizerFn) {
function createSentencepieceDecodingHandler(tokenizer) {
return async function (request, response) {
try {
if (!request.body) {
@@ -254,8 +293,8 @@ function createSentencepieceDecodingHandler(getTokenizerFn) {
}
const ids = request.body.ids || [];
const tokenizer = getTokenizerFn();
const text = await tokenizer.decodeIds(ids);
const instance = await tokenizer?.get();
const text = await instance?.decodeIds(ids);
return response.send({ text });
} catch (error) {
console.log(error);
@@ -317,13 +356,7 @@ function createTiktokenDecodingHandler(modelId) {
* @returns {Promise<void>} Promise that resolves when the tokenizers are loaded
*/
async function loadTokenizers() {
[spp_llama, spp_nerd, spp_nerd_v2, spp_mistral, claude_tokenizer] = await Promise.all([
loadSentencepieceTokenizer('src/sentencepiece/llama.model'),
loadSentencepieceTokenizer('src/sentencepiece/nerdstash.model'),
loadSentencepieceTokenizer('src/sentencepiece/nerdstash_v2.model'),
loadSentencepieceTokenizer('src/sentencepiece/mistral.model'),
loadClaudeTokenizer('src/claude.json'),
]);
claude_tokenizer = await loadClaudeTokenizer('src/claude.json');
}
/**
@@ -354,15 +387,17 @@ function registerEndpoints(app, jsonParser) {
}
});
app.post("/api/tokenize/llama", jsonParser, createSentencepieceEncodingHandler(() => spp_llama));
app.post("/api/tokenize/nerdstash", jsonParser, createSentencepieceEncodingHandler(() => spp_nerd));
app.post("/api/tokenize/nerdstash_v2", jsonParser, createSentencepieceEncodingHandler(() => spp_nerd_v2));
app.post("/api/tokenize/mistral", jsonParser, createSentencepieceEncodingHandler(() => spp_mistral));
app.post("/api/tokenize/llama", jsonParser, createSentencepieceEncodingHandler(spp_llama));
app.post("/api/tokenize/nerdstash", jsonParser, createSentencepieceEncodingHandler(spp_nerd));
app.post("/api/tokenize/nerdstash_v2", jsonParser, createSentencepieceEncodingHandler(spp_nerd_v2));
app.post("/api/tokenize/mistral", jsonParser, createSentencepieceEncodingHandler(spp_mistral));
app.post("/api/tokenize/yi", jsonParser, createSentencepieceEncodingHandler(spp_yi));
app.post("/api/tokenize/gpt2", jsonParser, createTiktokenEncodingHandler('gpt2'));
app.post("/api/decode/llama", jsonParser, createSentencepieceDecodingHandler(() => spp_llama));
app.post("/api/decode/nerdstash", jsonParser, createSentencepieceDecodingHandler(() => spp_nerd));
app.post("/api/decode/nerdstash_v2", jsonParser, createSentencepieceDecodingHandler(() => spp_nerd_v2));
app.post("/api/decode/mistral", jsonParser, createSentencepieceDecodingHandler(() => spp_mistral));
app.post("/api/decode/llama", jsonParser, createSentencepieceDecodingHandler(spp_llama));
app.post("/api/decode/nerdstash", jsonParser, createSentencepieceDecodingHandler(spp_nerd));
app.post("/api/decode/nerdstash_v2", jsonParser, createSentencepieceDecodingHandler(spp_nerd_v2));
app.post("/api/decode/mistral", jsonParser, createSentencepieceDecodingHandler(spp_mistral));
app.post("/api/decode/yi", jsonParser, createSentencepieceDecodingHandler(spp_yi));
app.post("/api/decode/gpt2", jsonParser, createTiktokenDecodingHandler('gpt2'));
app.post("/api/tokenize/openai-encode", jsonParser, async function (req, res) {
@@ -370,12 +405,17 @@ function registerEndpoints(app, jsonParser) {
const queryModel = String(req.query.model || '');
if (queryModel.includes('llama')) {
const handler = createSentencepieceEncodingHandler(() => spp_llama);
const handler = createSentencepieceEncodingHandler(spp_llama);
return handler(req, res);
}
if (queryModel.includes('mistral')) {
const handler = createSentencepieceEncodingHandler(() => spp_mistral);
const handler = createSentencepieceEncodingHandler(spp_mistral);
return handler(req, res);
}
if (queryModel.includes('yi')) {
const handler = createSentencepieceEncodingHandler(spp_yi);
return handler(req, res);
}
@@ -395,6 +435,40 @@ function registerEndpoints(app, jsonParser) {
}
});
app.post('/api/decode/openai', jsonParser, async function (req, res) {
try {
const queryModel = String(req.query.model || '');
if (queryModel.includes('llama')) {
const handler = createSentencepieceDecodingHandler(spp_llama);
return handler(req, res);
}
if (queryModel.includes('mistral')) {
const handler = createSentencepieceDecodingHandler(spp_mistral);
return handler(req, res);
}
if (queryModel.includes('yi')) {
const handler = createSentencepieceDecodingHandler(spp_yi);
return handler(req, res);
}
if (queryModel.includes('claude')) {
const ids = req.body.ids || [];
const chunkText = await claude_tokenizer.decode(new Uint32Array(ids));
return res.send({ text: chunkText });
}
const model = getTokenizerModel(queryModel);
const handler = createTiktokenDecodingHandler(model);
return handler(req, res);
} catch (error) {
console.log(error);
return res.send({ text: '' });
}
});
app.post("/api/tokenize/openai", jsonParser, async function (req, res) {
try {
if (!req.body) return res.sendStatus(400);
@@ -403,21 +477,26 @@ function registerEndpoints(app, jsonParser) {
const queryModel = String(req.query.model || '');
const model = getTokenizerModel(queryModel);
if (model == 'claude') {
if (model === 'claude') {
num_tokens = countClaudeTokens(claude_tokenizer, req.body);
return res.send({ "token_count": num_tokens });
}
if (model == 'llama') {
if (model === 'llama') {
num_tokens = await countSentencepieceArrayTokens(spp_llama, req.body);
return res.send({ "token_count": num_tokens });
}
if (model == 'mistral') {
if (model === 'mistral') {
num_tokens = await countSentencepieceArrayTokens(spp_mistral, req.body);
return res.send({ "token_count": num_tokens });
}
if (model === 'yi') {
num_tokens = await countSentencepieceArrayTokens(spp_yi, req.body);
return res.send({ "token_count": num_tokens });
}
const tokensPerName = queryModel.includes('gpt-3.5-turbo-0301') ? -1 : 1;
const tokensPerMessage = queryModel.includes('gpt-3.5-turbo-0301') ? 4 : 3;
const tokensPadding = 3;
@@ -462,9 +541,6 @@ module.exports = {
TEXT_COMPLETION_MODELS,
getTokenizerModel,
getTiktokenTokenizer,
loadSentencepieceTokenizer,
loadClaudeTokenizer,
countSentencepieceTokens,
countClaudeTokens,
loadTokenizers,
registerEndpoints,

View File

@@ -65,7 +65,7 @@ function getModelForTask(task) {
const model = getConfigValue(tasks[task].configField, null);
return model || defaultModel;
} catch (error) {
console.warn('Failed to read config.conf, using default classification model.');
console.warn('Failed to read config.yaml, using default classification model.');
return defaultModel;
}
}

View File

@@ -58,34 +58,44 @@ function registerEndpoints(app, jsonParser) {
});
app.post('/api/translate/google', jsonParser, async (request, response) => {
const { generateRequestUrl, normaliseResponse } = require('google-translate-api-browser');
const text = request.body.text;
const lang = request.body.lang;
try {
const { generateRequestUrl, normaliseResponse } = require('google-translate-api-browser');
const text = request.body.text;
const lang = request.body.lang;
if (!text || !lang) {
return response.sendStatus(400);
}
if (!text || !lang) {
return response.sendStatus(400);
}
console.log('Input text: ' + text);
console.log('Input text: ' + text);
const url = generateRequestUrl(text, { to: lang });
const url = generateRequestUrl(text, { to: lang });
https.get(url, (resp) => {
let data = '';
https.get(url, (resp) => {
let data = '';
resp.on('data', (chunk) => {
data += chunk;
resp.on('data', (chunk) => {
data += chunk;
});
resp.on('end', () => {
try {
const result = normaliseResponse(JSON.parse(data));
console.log('Translated text: ' + result.text);
return response.send(result.text);
} catch (error) {
console.log("Translation error", error);
return response.sendStatus(500);
}
});
}).on("error", (err) => {
console.log("Translation error: " + err.message);
return response.sendStatus(500);
});
resp.on('end', () => {
const result = normaliseResponse(JSON.parse(data));
console.log('Translated text: ' + result.text);
return response.send(result.text);
});
}).on("error", (err) => {
console.log("Translation error: " + err.message);
} catch (error) {
console.log("Translation error", error);
return response.sendStatus(500);
});
}
});
app.post('/api/translate/deepl', jsonParser, async (request, response) => {

View File

@@ -4,20 +4,46 @@ const commandExistsSync = require('command-exists').sync;
const _ = require('lodash');
const yauzl = require('yauzl');
const mime = require('mime-types');
const yaml = require('yaml');
const { default: simpleGit } = require('simple-git');
/**
* Returns the config object from the config.conf file.
* Returns the config object from the config.yaml file.
* @returns {object} Config object
*/
function getConfig() {
try {
const config = require(path.join(process.cwd(), './config.conf'));
return config;
} catch (error) {
console.warn('Failed to read config.conf');
return {};
function getNewConfig() {
try {
const config = yaml.parse(fs.readFileSync(path.join(process.cwd(), './config.yaml'), 'utf8'));
return config;
} catch (error) {
console.warn('Failed to read config.yaml');
return {};
}
}
function getLegacyConfig() {
try {
console.log(color.yellow('WARNING: config.conf is deprecated. Please run "npm run postinstall" to convert to config.yaml'));
const config = require(path.join(process.cwd(), './config.conf'));
return config;
} catch (error) {
console.warn('Failed to read config.conf');
return {};
}
}
if (fs.existsSync('./config.yaml')) {
return getNewConfig();
}
if (fs.existsSync('./config.conf')) {
return getLegacyConfig();
}
console.error(color.red('No config file found. Please create a config.yaml file. The default config file can be found in the /default folder.'));
console.error(color.red('The program will now exit.'));
process.exit(1);
}
/**
@@ -217,6 +243,22 @@ function deepMerge(target, source) {
return output;
}
const color = {
byNum: (mess, fgNum) => {
mess = mess || '';
fgNum = fgNum === undefined ? 31 : fgNum;
return '\u001b[' + fgNum + 'm' + mess + '\u001b[39m';
},
black: (mess) => color.byNum(mess, 30),
red: (mess) => color.byNum(mess, 31),
green: (mess) => color.byNum(mess, 32),
yellow: (mess) => color.byNum(mess, 33),
blue: (mess) => color.byNum(mess, 34),
magenta: (mess) => color.byNum(mess, 35),
cyan: (mess) => color.byNum(mess, 36),
white: (mess) => color.byNum(mess, 37)
};
module.exports = {
getConfig,
getConfigValue,
@@ -227,4 +269,5 @@ module.exports = {
readAllChunks,
delay,
deepMerge,
color,
};