mirror of
https://github.com/SillyTavern/SillyTavern.git
synced 2025-06-05 21:59:27 +02:00
Merge branch 'staging' into plugin-router
This commit is contained in:
@@ -25,6 +25,9 @@ autorun: true
|
||||
disableThumbnails: false
|
||||
# Thumbnail quality (0-100)
|
||||
thumbnailsQuality: 95
|
||||
# Generate avatar thumbnails as PNG instead of JPG (preserves transparency but increases filesize by about 100%)
|
||||
# Changing this only affects new thumbnails. To recreate the old ones, clear out your ST/thumbnails/ folder.
|
||||
avatarThumbnailsPng: false
|
||||
# Allow secret keys exposure via API
|
||||
allowKeysExposure: false
|
||||
# Skip new default content checks
|
||||
@@ -54,6 +57,10 @@ extras:
|
||||
openai:
|
||||
# Will send a random user ID to OpenAI completion API
|
||||
randomizeUserId: false
|
||||
# If not empty, will add this as a system message to the start of every caption completion prompt
|
||||
# Example: "Perform the instructions to the best of your ability.\n" (for LLaVA)
|
||||
# Not used in image inlining mode
|
||||
captionSystemPrompt: ""
|
||||
# -- DEEPL TRANSLATION CONFIGURATION --
|
||||
deepl:
|
||||
# Available options: default, more, less, prefer_more, prefer_less
|
||||
|
@@ -2,8 +2,6 @@
|
||||
"firstRun": true,
|
||||
"username": "User",
|
||||
"api_server": "http://127.0.0.1:5000/api",
|
||||
"api_server_textgenerationwebui": "http://127.0.0.1:5000/api",
|
||||
"api_use_mancer_webui": false,
|
||||
"preset_settings": "RecoveredRuins",
|
||||
"user_avatar": "user-default.png",
|
||||
"amount_gen": 250,
|
||||
|
@@ -121,7 +121,7 @@
|
||||
}
|
||||
|
||||
/* Add the custom checkbox */
|
||||
.select2-results__option:before {
|
||||
.select2-results__option::before {
|
||||
content: '';
|
||||
display: inline-block;
|
||||
position: absolute;
|
||||
@@ -141,11 +141,19 @@
|
||||
}
|
||||
|
||||
/* Add the custom checkbox checkmark */
|
||||
.select2-results__option--selected.select2-results__option:before {
|
||||
.select2-results__option--selected.select2-results__option::before {
|
||||
content: '\2713';
|
||||
font-weight: bold;
|
||||
color: var(--SmartThemeBodyColor);
|
||||
background-color: var(--SmartThemeBlurTintColor);
|
||||
text-align: center;
|
||||
line-height: 14px;
|
||||
}
|
||||
}
|
||||
|
||||
.select2-results__option.select2-results__message {
|
||||
background-color: inherit;
|
||||
}
|
||||
|
||||
.select2-results__option.select2-results__message::before {
|
||||
display: none;
|
||||
}
|
||||
|
39
public/img/llamacpp.svg
Normal file
39
public/img/llamacpp.svg
Normal file
@@ -0,0 +1,39 @@
|
||||
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
|
||||
<svg
|
||||
version="1.0"
|
||||
width="350.95343pt"
|
||||
height="433.92468pt"
|
||||
viewBox="0 0 350.95343 433.92468"
|
||||
preserveAspectRatio="xMidYMid"
|
||||
id="svg3"
|
||||
sodipodi:docname="llamacpp.svg"
|
||||
inkscape:version="1.3 (0e150ed, 2023-07-21)"
|
||||
xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape"
|
||||
xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd"
|
||||
xmlns="http://www.w3.org/2000/svg"
|
||||
xmlns:svg="http://www.w3.org/2000/svg">
|
||||
<defs
|
||||
id="defs3" />
|
||||
<sodipodi:namedview
|
||||
id="namedview3"
|
||||
pagecolor="#ffffff"
|
||||
bordercolor="#000000"
|
||||
borderopacity="0.25"
|
||||
inkscape:showpageshadow="2"
|
||||
inkscape:pageopacity="0.0"
|
||||
inkscape:pagecheckerboard="true"
|
||||
inkscape:deskcolor="#d1d1d1"
|
||||
inkscape:document-units="pt"
|
||||
inkscape:zoom="0.61795062"
|
||||
inkscape:cx="360.87026"
|
||||
inkscape:cy="319.60482"
|
||||
inkscape:window-width="1280"
|
||||
inkscape:window-height="688"
|
||||
inkscape:window-x="0"
|
||||
inkscape:window-y="25"
|
||||
inkscape:window-maximized="1"
|
||||
inkscape:current-layer="svg3" />
|
||||
<path
|
||||
id="path15"
|
||||
d="M 115.66411,7.7769089e-4 C 108.03646,-0.04647231 97.66356,2.0614588 89.22605,5.7471588 51.629188,22.170371 29.279858,72.255744 26.302778,146.75404 l -1.08171,27.05939 10.19027,-9.11895 c 16.68028,-14.92501 43.7359,-30.80127 65.967952,-38.71307 3.63963,-1.29525 7.39727,-3.00875 8.34819,-3.80665 1.00889,-0.84654 -0.84203,-6.76797 -4.44134,-14.21878 -5.75466,-11.912432 -6.12062,-13.824142 -5.45304,-28.480056 0.68369,-15.00947 1.27807,-16.84384 13.33674,-41.2326 C 128.87131,6.4869918 129.50802,4.3066778 123.92323,1.4548327 122.03009,0.48812169 119.13122,0.02222669 115.66411,7.7769089e-4 Z M 204.3319,24.868452 c -7.90831,-0.07627 -17.36177,1.199451 -23.54292,3.870384 -18.58511,8.030767 -38.06958,36.609918 -47.25132,69.305902 -2.22908,7.937702 -4.5161,15.970742 -5.08401,17.852392 -0.86974,2.88178 -0.32873,3.22525 3.43601,2.17653 2.45813,-0.68477 18.29522,-1.73488 35.1935,-2.33437 16.89826,-0.59952 30.72354,-1.40131 30.72354,-1.78192 0,-0.38061 -1.78758,-5.74168 -3.97051,-11.9117 -6.54342,-18.495036 -4.8829,-25.966506 11.1988,-50.400166 7.46265,-11.33831 13.56896,-21.480943 13.56896,-22.542378 0,-2.73047 -6.36368,-4.158497 -14.27205,-4.234674 z M 168.50212,145.23018 c -45.12449,0.0128 -76.75805,10.98462 -110.460932,38.31236 -22.62195,18.34285 -45.99259,54.10069 -54.3650997,83.1786 -4.94441,17.17201 -4.88874,65.42308 0.0924,79.37804 16.4963297,46.21663 57.3528097,79.08349 107.4639617,86.44794 32.21284,4.73407 74.8601,-2.95259 109.24245,-19.68893 l 7.20925,-3.50917 -4.64502,-17.64293 c -2.55479,-9.70397 -5.46337,-20.62804 -6.46485,-24.27571 l -1.82292,-6.63282 -14.30391,6.30496 c -22.86829,10.08133 -41.37356,13.8047 -63.89044,12.8558 -13.70887,-0.57772 -22.19455,-1.94878 -30.04268,-4.85697 -14.96555,-5.54563 -31.436082,-20.30658 -37.827792,-33.90468 -16.63575,-35.39192 -7.26602,-83.4333 21.984032,-112.712 34.5434,-34.57726 78.91103,-41.04325 127.6377,-18.6022 9.71534,4.47445 18.40283,7.701 19.30836,7.16708 1.84426,-1.08761 26.365,-41.92583 26.365,-43.91001 0,-1.77105 -17.98211,-11.91179 -29.15193,-16.43783 -20.81281,-8.43331 -38.421,-11.4793 -66.32745,-11.47153 z m -4.7277,92.6254 v 17.13902 17.13905 h -17.96261 -17.96264 v 15.33281 15.33588 h 17.96264 17.96261 v 17.13903 17.13599 h 16.06964 16.07283 v -17.13599 -17.13903 h 17.01451 17.0178 V 287.46646 272.13365 H 212.9314 195.91689 V 254.9946 237.85558 h -16.07283 z m 121.00426,0 v 17.13902 17.13905 h -17.95945 -17.96254 v 15.33281 15.33588 h 17.96254 17.95945 v 17.13903 17.13599 h 15.12793 15.12482 v -17.13599 -17.13903 h 17.96254 17.95945 V 287.46646 272.13365 H 332.99397 315.03143 V 254.9946 237.85558 h -15.12482 z" />
|
||||
</svg>
|
After Width: | Height: | Size: 3.7 KiB |
@@ -5,8 +5,7 @@
|
||||
width="64.000000pt" height="64.000000pt" viewBox="0 0 53.000000 60.000000"
|
||||
preserveAspectRatio="xMidYMid meet">
|
||||
|
||||
<g transform="translate(0.000000,63.000000) scale(0.100000,-0.100000)"
|
||||
fill="#000000" stroke="none">
|
||||
<g transform="translate(0.000000,63.000000) scale(0.100000,-0.100000)" stroke="none">
|
||||
<path d="M40 320 l0 -240 70 0 70 0 0 95 c0 95 0 95 25 95 23 0 25 -3 25 -50
|
||||
l0 -50 70 0 70 0 0 50 c0 47 2 50 25 50 25 0 25 0 25 -95 l0 -95 70 0 70 0 0
|
||||
240 0 240 -70 0 -70 0 0 -44 0 -45 -47 -3 -48 -3 -3 -47 c-3 -43 -5 -48 -28
|
||||
|
Before Width: | Height: | Size: 731 B After Width: | Height: | Size: 716 B |
56
public/img/ollama.svg
Normal file
56
public/img/ollama.svg
Normal file
@@ -0,0 +1,56 @@
|
||||
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
|
||||
<svg
|
||||
version="1.0"
|
||||
width="467.388pt"
|
||||
height="618.89093pt"
|
||||
viewBox="0 0 467.388 618.89093"
|
||||
preserveAspectRatio="xMidYMid"
|
||||
id="svg5"
|
||||
sodipodi:docname="ollama.svg"
|
||||
inkscape:version="1.3 (0e150ed, 2023-07-21)"
|
||||
xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape"
|
||||
xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd"
|
||||
xmlns="http://www.w3.org/2000/svg"
|
||||
xmlns:svg="http://www.w3.org/2000/svg">
|
||||
<defs
|
||||
id="defs5" />
|
||||
<sodipodi:namedview
|
||||
id="namedview5"
|
||||
pagecolor="#ffffff"
|
||||
bordercolor="#000000"
|
||||
borderopacity="0.25"
|
||||
inkscape:showpageshadow="2"
|
||||
inkscape:pageopacity="0.0"
|
||||
inkscape:pagecheckerboard="false"
|
||||
inkscape:deskcolor="#d1d1d1"
|
||||
inkscape:document-units="pt"
|
||||
inkscape:zoom="0.20971564"
|
||||
inkscape:cx="309.9435"
|
||||
inkscape:cy="278.94915"
|
||||
inkscape:window-width="1280"
|
||||
inkscape:window-height="688"
|
||||
inkscape:window-x="0"
|
||||
inkscape:window-y="25"
|
||||
inkscape:window-maximized="1"
|
||||
inkscape:current-layer="svg5" />
|
||||
<g
|
||||
transform="matrix(0.1,0,0,-0.1,-188.01849,632.89095)"
|
||||
stroke="none"
|
||||
id="g5">
|
||||
<path
|
||||
d="m 2849,6312 c -219,-73 -378,-347 -444,-768 -34,-213 -29,-629 9,-774 l 13,-49 -105,-103 c -143,-140 -201,-210 -265,-320 -85,-145 -143,-312 -167,-477 -20,-135 -8,-404 23,-522 34,-129 78,-237 138,-337 l 50,-83 -50,-117 c -96,-227 -130,-376 -138,-618 -12,-345 48,-589 208,-854 l 21,-35 -35,-57 c -43,-72 -100,-243 -122,-368 -26,-149 -31,-393 -11,-523 10,-59 22,-121 28,-138 l 10,-29 177,2 176,3 -1,40 c 0,22 -11,76 -23,120 -42,149 -26,433 34,610 13,39 51,120 84,179 33,60 63,122 67,138 10,46 -4,109 -34,154 -15,22 -46,69 -69,103 -171,254 -206,664 -88,1017 27,80 77,185 130,274 63,105 56,178 -25,260 -138,138 -221,394 -207,634 21,357 227,680 532,833 130,66 183,77 375,78 96,0 183,4 193,9 10,5 36,45 58,90 121,242 304,391 594,484 72,23 96,26 235,26 148,0 160,-1 250,-32 281,-94 469,-249 577,-478 50,-105 54,-107 215,-99 153,8 244,-6 365,-57 143,-59 293,-181 389,-314 62,-87 130,-236 161,-351 22,-84 26,-119 26,-243 0,-124 -4,-159 -26,-242 -31,-118 -101,-257 -167,-332 -83,-95 -88,-166 -19,-277 128,-206 190,-431 191,-689 1,-277 -53,-446 -217,-684 -36,-52 -51,-114 -41,-164 4,-16 34,-78 67,-138 33,-59 71,-140 84,-178 60,-182 76,-461 34,-611 -12,-44 -23,-98 -23,-120 l -1,-40 176,-3 177,-2 11,31 c 46,134 52,474 11,683 -25,129 -78,281 -121,351 l -31,50 21,35 c 159,261 219,507 208,848 -8,252 -53,444 -155,663 l -40,86 31,49 c 59,94 119,235 150,352 29,112 31,126 31,317 1,224 -9,294 -70,472 -19,55 -34,106 -34,113 0,21 -109,198 -159,257 -26,32 -98,107 -159,167 -61,60 -109,113 -106,118 16,25 35,205 41,368 8,260 -15,478 -72,675 -88,303 -214,474 -393,534 -207,70 -405,-47 -542,-318 -75,-151 -139,-379 -156,-558 l -7,-72 -99,50 c -189,95 -399,149 -578,149 -173,0 -383,-52 -560,-138 -52,-26 -98,-48 -101,-50 -3,-1 -9,28 -13,65 -29,288 -146,595 -282,742 -121,130 -274,179 -415,133 z m 153,-374 c 119,-127 208,-471 208,-804 0,-85 -4,-112 -20,-144 -17,-34 -25,-40 -53,-40 -51,0 -267,-30 -326,-45 -30,-8 -56,-13 -58,-12 -1,2 -7,67 -14,145 -16,215 7,467 62,657 39,133 121,275 159,275 7,0 25,-14 42,-32 z m 2529,1 c 124,-133 208,-558 179,-909 -6,-74 -13,-136 -15,-138 -2,-2 -25,3 -52,11 -39,12 -122,24 -352,50 -7,1 -22,18 -33,37 -18,32 -19,50 -15,200 8,255 53,468 132,635 34,71 93,145 115,145 7,0 25,-14 41,-31 z"
|
||||
id="path1" />
|
||||
<path
|
||||
d="m 4115,3729 c -390,-29 -735,-284 -824,-609 -26,-93 -28,-244 -5,-334 38,-149 171,-324 306,-404 85,-50 204,-99 288,-117 99,-22 453,-32 584,-16 350,41 626,253 700,538 20,78 21,240 1,318 -36,140 -144,303 -266,401 -218,174 -474,247 -784,223 z m 329,-258 c 291,-76 497,-291 500,-521 3,-227 -192,-414 -479,-460 -80,-13 -403,-13 -485,1 -212,34 -390,160 -452,319 -29,77 -29,194 1,272 79,206 278,353 544,404 97,18 269,11 371,-15 z"
|
||||
id="path2" />
|
||||
<path
|
||||
d="m 4038,3151 c -58,-52 -40,-123 47,-177 43,-27 45,-31 40,-64 -19,-120 -19,-127 8,-154 22,-22 35,-26 85,-26 91,0 123,41 103,130 -17,74 -15,83 33,113 56,35 76,66 76,116 0,32 -6,44 -31,65 -39,33 -81,33 -136,1 l -43,-24 -42,24 c -58,33 -100,32 -140,-4 z"
|
||||
id="path3" />
|
||||
<path
|
||||
d="m 2932,3664 c -107,-53 -169,-209 -128,-319 44,-115 194,-177 303,-124 89,43 153,148 153,250 0,171 -171,271 -328,193 z"
|
||||
id="path4" />
|
||||
<path
|
||||
d="m 5320,3675 c -119,-54 -165,-193 -104,-320 27,-58 88,-118 141,-141 68,-29 162,-10 227,47 86,76 97,174 35,297 -45,89 -101,125 -198,129 -44,2 -78,-2 -101,-12 z"
|
||||
id="path5" />
|
||||
</g>
|
||||
</svg>
|
After Width: | Height: | Size: 4.5 KiB |
23
public/img/tabby.svg
Normal file
23
public/img/tabby.svg
Normal file
@@ -0,0 +1,23 @@
|
||||
<?xml version="1.0" standalone="no"?>
|
||||
<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 20010904//EN"
|
||||
"http://www.w3.org/TR/2001/REC-SVG-20010904/DTD/svg10.dtd">
|
||||
<svg version="1.0" xmlns="http://www.w3.org/2000/svg"
|
||||
width="176.000000pt" height="176.000000pt" viewBox="0 0 176.000000 176.000000"
|
||||
preserveAspectRatio="xMidYMid meet">
|
||||
|
||||
<g transform="translate(0.000000,176.000000) scale(0.100000,-0.100000)" stroke="none">
|
||||
<path d="M197 1670 c-16 -19 -32 -58 -43 -107 -19 -87 -16 -222 11 -422 21
|
||||
-162 19 -218 -10 -306 -49 -144 -43 -332 14 -443 54 -106 160 -180 297 -207
|
||||
164 -33 202 -44 270 -77 59 -28 80 -33 144 -33 66 0 84 4 154 38 53 25 110 43
|
||||
170 53 122 21 177 38 241 74 158 90 225 282 180 515 -8 42 -21 90 -30 107 -20
|
||||
41 -19 144 1 284 9 60 17 177 17 259 1 134 -1 156 -21 206 -31 77 -50 93 -104
|
||||
85 -84 -13 -183 -89 -319 -243 l-54 -62 -75 19 c-100 26 -224 26 -321 0 l-74
|
||||
-20 -54 63 c-95 109 -182 186 -244 217 -79 39 -117 39 -150 0z m1121 -897 c2
|
||||
-18 -5 -52 -16 -76 -25 -55 -61 -73 -171 -83 l-84 -7 5 51 c7 74 45 114 138
|
||||
146 8 3 40 4 70 3 54 -2 55 -2 58 -34z m-693 16 c24 -7 55 -27 78 -51 33 -34
|
||||
37 -45 37 -88 0 -57 5 -56 -119 -40 -96 13 -136 48 -141 125 -5 64 -4 65 53
|
||||
65 28 0 70 -5 92 -11z m391 -384 c21 -28 18 -33 -31 -63 -32 -19 -48 -36 -53
|
||||
-57 -6 -23 -14 -30 -32 -30 -18 0 -26 7 -32 32 -6 24 -19 38 -48 53 -31 16
|
||||
-40 26 -40 46 0 34 27 42 134 40 73 -2 91 -6 102 -21z"/>
|
||||
</g>
|
||||
</svg>
|
After Width: | Height: | Size: 1.3 KiB |
55
public/img/togetherai.svg
Normal file
55
public/img/togetherai.svg
Normal file
@@ -0,0 +1,55 @@
|
||||
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
|
||||
<svg
|
||||
width="32"
|
||||
height="32"
|
||||
viewBox="0 0 32 32"
|
||||
version="1.1"
|
||||
id="svg4"
|
||||
sodipodi:docname="togetherai.svg"
|
||||
inkscape:version="1.3 (0e150ed, 2023-07-21)"
|
||||
xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape"
|
||||
xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd"
|
||||
xmlns="http://www.w3.org/2000/svg"
|
||||
xmlns:svg="http://www.w3.org/2000/svg">
|
||||
<sodipodi:namedview
|
||||
id="namedview4"
|
||||
pagecolor="#ffffff"
|
||||
bordercolor="#000000"
|
||||
borderopacity="0.25"
|
||||
inkscape:showpageshadow="2"
|
||||
inkscape:pageopacity="0.0"
|
||||
inkscape:pagecheckerboard="0"
|
||||
inkscape:deskcolor="#d1d1d1"
|
||||
inkscape:zoom="7.375"
|
||||
inkscape:cx="15.932203"
|
||||
inkscape:cy="15.932203"
|
||||
inkscape:window-width="1280"
|
||||
inkscape:window-height="688"
|
||||
inkscape:window-x="0"
|
||||
inkscape:window-y="25"
|
||||
inkscape:window-maximized="1"
|
||||
inkscape:current-layer="g4" />
|
||||
<g
|
||||
clip-path="url(#clip0_542_18748)"
|
||||
id="g4">
|
||||
<path
|
||||
id="rect1"
|
||||
d="M 5.6464844 0 C 2.5180256 -1.1842331e-15 3.5527101e-15 2.5180256 0 5.6464844 L 0 26.353516 C -1.1842331e-15 29.481971 2.5180256 32 5.6464844 32 L 26.353516 32 C 29.481971 32 32 29.481971 32 26.353516 L 32 5.6464844 C 32 2.5180256 29.481971 3.5527101e-15 26.353516 0 L 5.6464844 0 z M 9.6464844 4 A 5.6470599 5.6470599 0 0 1 15.294922 9.6464844 A 5.6470599 5.6470599 0 0 1 9.6464844 15.294922 A 5.6470599 5.6470599 0 0 1 4 9.6464844 A 5.6470599 5.6470599 0 0 1 9.6464844 4 z M 22.824219 4 A 5.6470599 5.6470599 0 0 1 28.470703 9.6464844 A 5.6470599 5.6470599 0 0 1 22.824219 15.294922 A 5.6470599 5.6470599 0 0 1 17.175781 9.6464844 A 5.6470599 5.6470599 0 0 1 22.824219 4 z M 9.6464844 17.175781 A 5.6470599 5.6470599 0 0 1 15.294922 22.824219 A 5.6470599 5.6470599 0 0 1 9.6464844 28.470703 A 5.6470599 5.6470599 0 0 1 4 22.824219 A 5.6470599 5.6470599 0 0 1 9.6464844 17.175781 z M 22.824219 17.175781 A 5.6470599 5.6470599 0 0 1 28.470703 22.824219 A 5.6470599 5.6470599 0 0 1 22.824219 28.470703 A 5.6470599 5.6470599 0 0 1 17.175781 22.824219 A 5.6470599 5.6470599 0 0 1 22.824219 17.175781 z " />
|
||||
<circle
|
||||
cx="9.64706"
|
||||
cy="9.64706"
|
||||
r="5.64706"
|
||||
opacity="0.45"
|
||||
id="circle9" />
|
||||
</g>
|
||||
<defs
|
||||
id="defs4">
|
||||
<clipPath
|
||||
id="clip0_542_18748">
|
||||
<rect
|
||||
width="32"
|
||||
height="32"
|
||||
id="rect4" />
|
||||
</clipPath>
|
||||
</defs>
|
||||
</svg>
|
After Width: | Height: | Size: 2.4 KiB |
@@ -69,7 +69,7 @@
|
||||
<script type="module" src="scripts/group-chats.js"></script>
|
||||
<script type="module" src="scripts/kai-settings.js"></script>
|
||||
<script type="module" src="scripts/textgen-settings.js"></script>
|
||||
<script type="module" src="scripts/mancer-settings.js"></script>
|
||||
<script type="module" src="scripts/textgen-models.js"></script>
|
||||
<script type="module" src="scripts/bookmarks.js"></script>
|
||||
<script type="module" src="scripts/horde.js"></script>
|
||||
<script type="module" src="scripts/RossAscends-mods.js"></script>
|
||||
@@ -437,14 +437,15 @@
|
||||
Streaming</span>
|
||||
</label>
|
||||
<div class="toggle-description justifyLeft">
|
||||
<span data-i18n="Display the response bit by bit as it is generated.">Display
|
||||
the response bit by bit as it is generated.</span><br>
|
||||
<span data-i18n="When this is off, responses will be displayed all at once when they are complete.">When
|
||||
this is off, responses will be displayed all at once when they are
|
||||
complete.</span>
|
||||
<span data-i18n="Display the response bit by bit as it is generated.">
|
||||
Display the response bit by bit as it is generated.
|
||||
</span><br>
|
||||
<span data-i18n="When this is off, responses will be displayed all at once when they are complete.">
|
||||
When this is off, responses will be displayed all at once when they are complete.
|
||||
</span>
|
||||
</div>
|
||||
</div>
|
||||
<div class="range-block" data-source="openai,claude,windowai,openrouter,ai21,scale,makersuite,mistralai">
|
||||
<div class="range-block" data-source="openai,claude,windowai,openrouter,ai21,scale,makersuite,mistralai,custom">
|
||||
<div class="range-block-title" data-i18n="Temperature">
|
||||
Temperature
|
||||
</div>
|
||||
@@ -457,7 +458,7 @@
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<div data-newbie-hidden class="range-block" data-source="openai,openrouter,ai21">
|
||||
<div data-newbie-hidden class="range-block" data-source="openai,openrouter,ai21,custom">
|
||||
<div class="range-block-title" data-i18n="Frequency Penalty">
|
||||
Frequency Penalty
|
||||
</div>
|
||||
@@ -470,7 +471,7 @@
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<div data-newbie-hidden class="range-block" data-source="openai,openrouter,ai21">
|
||||
<div data-newbie-hidden class="range-block" data-source="openai,openrouter,ai21,custom">
|
||||
<div class="range-block-title" data-i18n="Presence Penalty">
|
||||
Presence Penalty
|
||||
</div>
|
||||
@@ -509,7 +510,7 @@
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<div data-newbie-hidden class="range-block" data-source="openai,claude,openrouter,ai21,scale,makersuite,mistralai">
|
||||
<div data-newbie-hidden class="range-block" data-source="openai,claude,openrouter,ai21,scale,makersuite,mistralai,custom">
|
||||
<div class="range-block-title" data-i18n="Top-p">
|
||||
Top P
|
||||
</div>
|
||||
@@ -546,10 +547,6 @@
|
||||
<textarea id="jailbreak_prompt_quick_edit_textarea" class="text_pole textarea_compact autoSetHeight" rows="6" placeholder="—" data-pm-prompt="jailbreak"></textarea>
|
||||
</div>
|
||||
</div>
|
||||
<div id="claude_assistant_prefill_block" data-source="claude" class="range-block">
|
||||
<span id="claude_assistant_prefill_text" data-i18n="Assistant Prefill">Assistant Prefill</span>
|
||||
<textarea id="claude_assistant_prefill" class="text_pole textarea_compact" name="assistant_prefill autoSetHeight" rows="3" maxlength="10000" data-i18n="[placeholder]Start Claude's answer with..." placeholder="Start Claude's answer with..."></textarea>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<div data-newbie-hidden class="inline-drawer wide100p">
|
||||
@@ -581,7 +578,7 @@
|
||||
</div>
|
||||
<div class="toggle-description justifyLeft" data-i18n="Wraps activated World Info entries before inserting into the prompt.">
|
||||
Wraps activated World Info entries before inserting into the prompt. Use
|
||||
<tt>{0}</tt> to mark a place where the content is inserted.
|
||||
<code>{0}</code> to mark a place where the content is inserted.
|
||||
</div>
|
||||
<div class="wide100p">
|
||||
<textarea id="wi_format_textarea" class="text_pole textarea_compact autoSetHeight" rows="3" placeholder="—"></textarea>
|
||||
@@ -595,7 +592,7 @@
|
||||
</div>
|
||||
</div>
|
||||
<div class="toggle-description justifyLeft" data-i18n="Use scenario to mark a place where the content is inserted.">
|
||||
Use <tt>{{scenario}}</tt> to mark a place where the content is inserted.
|
||||
Use <code>{{scenario}}</code> to mark a place where the content is inserted.
|
||||
</div>
|
||||
<div class="wide100p">
|
||||
<textarea id="scenario_format_textarea" class="text_pole textarea_compact autoSetHeight" rows="3" placeholder="—"></textarea>
|
||||
@@ -609,7 +606,7 @@
|
||||
</div>
|
||||
</div>
|
||||
<div class="toggle-description justifyLeft" data-i18n="Use personality to mark a place where the content is inserted.">
|
||||
Use <tt>{{personality}}</tt> to mark a place where the content is inserted.
|
||||
Use <code>{{personality}}</code> to mark a place where the content is inserted.
|
||||
</div>
|
||||
<div class="wide100p">
|
||||
<textarea id="personality_format_textarea" class="text_pole textarea_compact autoSetHeight" rows="3" placeholder="—"></textarea>
|
||||
@@ -733,6 +730,9 @@
|
||||
</div>
|
||||
<div class="wide100p">
|
||||
<input id="openai_reverse_proxy" type="text" class="text_pole" placeholder="https://api.openai.com/v1" maxlength="500" />
|
||||
<small class="reverse_proxy_warning">
|
||||
Doesn't work? Try adding <code>/v1</code> at the end!
|
||||
</small>
|
||||
</div>
|
||||
</div>
|
||||
<div class="range-block" data-source="openai,claude">
|
||||
@@ -749,7 +749,7 @@
|
||||
<div id="openai_proxy_password_show" title="Peek a password" class="menu_button fa-solid fa-eye-slash fa-fw"></div>
|
||||
</div>
|
||||
</div>
|
||||
<div data-newbie-hidden class="range-block" data-source="openai,openrouter,mistralai">
|
||||
<div data-newbie-hidden class="range-block" data-source="openai,openrouter,mistralai,custom">
|
||||
<div class="range-block-title justifyLeft" data-i18n="Seed">
|
||||
Seed
|
||||
</div>
|
||||
@@ -978,7 +978,7 @@
|
||||
Helps to ban or reinforce the usage of certain tokens.
|
||||
</div>
|
||||
<div class="flex-container flexFlowColumn wide100p">
|
||||
<div class="novelai_logit_bias_list"></div>
|
||||
<div class="logit_bias_list"></div>
|
||||
</div>
|
||||
</div>
|
||||
<div class="range-block">
|
||||
@@ -1383,6 +1383,21 @@
|
||||
<textarea id="banned_tokens_textgenerationwebui" class="text_pole textarea_compact" name="banned_tokens_textgenerationwebui" rows="3" placeholder="Example: some text [42, 69, 1337]"></textarea>
|
||||
</div>
|
||||
</div>
|
||||
<div class="range-block wide100p">
|
||||
<div class="range-block-title title_restorable">
|
||||
<span data-i18n="Logit Bias">Logit Bias</span>
|
||||
<div id="textgen_logit_bias_new_entry" class="menu_button menu_button_icon">
|
||||
<i class="fa-xs fa-solid fa-plus"></i>
|
||||
<small data-i18n="Add">Add</small>
|
||||
</div>
|
||||
</div>
|
||||
<div class="toggle-description justifyLeft" data-i18n="Helps to ban or reenforce the usage of certain words">
|
||||
Helps to ban or reinforce the usage of certain tokens.
|
||||
</div>
|
||||
<div class="flex-container flexFlowColumn wide100p">
|
||||
<div class="logit_bias_list"></div>
|
||||
</div>
|
||||
</div>
|
||||
<div data-newbie-hidden data-forAphro=False class="wide100p">
|
||||
<hr class="width100p">
|
||||
<h4 data-i18n="CFG" class="textAlignCenter">CFG
|
||||
@@ -1480,7 +1495,18 @@
|
||||
<input id="names_in_completion" type="checkbox" /><span data-i18n="Add character names">Add character names</span>
|
||||
</label>
|
||||
<div class="toggle-description justifyLeft">
|
||||
<span data-i18n="Send names in the ChatML objects.">Send names in the ChatML objects. Helps the model to associate messages with characters.</span>
|
||||
<span data-i18n="Send names in the message objects.">Send names in the message objects. Helps the model to associate messages with characters.</span>
|
||||
</div>
|
||||
</div>
|
||||
<div class="range-block">
|
||||
<label for="continue_prefill" class="checkbox_label widthFreeExpand">
|
||||
<input id="continue_prefill" type="checkbox" />
|
||||
<span data-i18n="Continue prefill">Continue prefill</span>
|
||||
</label>
|
||||
<div class="toggle-description justifyLeft">
|
||||
<span data-i18n="Continue sends the last message.">
|
||||
Continue sends the last message as assistant role instead of system message with instruction.
|
||||
</span>
|
||||
</div>
|
||||
</div>
|
||||
<div class="range-block">
|
||||
@@ -1497,7 +1523,7 @@
|
||||
</span>
|
||||
</div>
|
||||
</div>
|
||||
<div class="range-block" data-source="openai,openrouter,makersuite">
|
||||
<div class="range-block" data-source="openai,openrouter,makersuite,custom">
|
||||
<label for="openai_image_inlining" class="checkbox_label flexWrap widthFreeExpand">
|
||||
<input id="openai_image_inlining" type="checkbox" />
|
||||
<span data-i18n="Send inline images">Send inline images</span>
|
||||
@@ -1526,10 +1552,38 @@
|
||||
</div>
|
||||
<div data-newbie-hidden class="range-block" data-source="claude">
|
||||
<label for="exclude_assistant" title="Exclude Assistant suffix" class="checkbox_label widthFreeExpand">
|
||||
<input id="exclude_assistant" type="checkbox" /><span data-i18n="Exclude Assistant suffix">Exclude Assistant suffix</span>
|
||||
<input id="exclude_assistant" type="checkbox" />
|
||||
<span data-i18n="Exclude Assistant suffix">Exclude Assistant suffix</span>
|
||||
</label>
|
||||
<div class="toggle-description justifyLeft">
|
||||
<span data-i18n="Exclude the assistant suffix from being added to the end of prompt.">Exclude the assistant suffix from being added to the end of prompt (Requires jailbreak with 'Assistant:' in it).</span>
|
||||
<span data-i18n="Exclude the assistant suffix from being added to the end of prompt.">
|
||||
Exclude the assistant suffix from being added to the end of prompt (Requires jailbreak with 'Assistant:' in it).
|
||||
</span>
|
||||
</div>
|
||||
<div id="claude_assistant_prefill_block" class="wide100p">
|
||||
<span id="claude_assistant_prefill_text" data-i18n="Assistant Prefill">Assistant Prefill</span>
|
||||
<textarea id="claude_assistant_prefill" class="text_pole textarea_compact" name="assistant_prefill autoSetHeight" rows="3" maxlength="10000" data-i18n="[placeholder]Start Claude's answer with..." placeholder="Start Claude's answer with..."></textarea>
|
||||
</div>
|
||||
<label for="claude_use_sysprompt" class="checkbox_label widthFreeExpand">
|
||||
<input id="claude_use_sysprompt" type="checkbox" />
|
||||
<span data-i18n="Use system prompt (Claude 2.1+ only)">
|
||||
Use system prompt (Claude 2.1+ only)
|
||||
</span>
|
||||
</label>
|
||||
<div class="toggle-description justifyLeft">
|
||||
<span data-i18n="Exclude the 'Human: ' prefix from being added to the beginning of the prompt.">
|
||||
Exclude the 'Human: ' prefix from being added to the beginning of the prompt.
|
||||
Instead, place it between the system prompt and the first message with the role 'assistant' (right before 'Chat History' by default).
|
||||
</span>
|
||||
</div>
|
||||
<div id="claude_human_sysprompt_message_block" class="wide100p">
|
||||
<div class="range-block-title openai_restorable">
|
||||
<span data-i18n="Human: first message">Human: first message</span>
|
||||
<div id="claude_human_sysprompt_message_restore" title="Restore Human: first message" class="right_menu_button">
|
||||
<div class="fa-solid fa-clock-rotate-left"></div>
|
||||
</div>
|
||||
</div>
|
||||
<textarea id="claude_human_sysprompt_textarea" class="text_pole textarea_compact" rows="4" maxlength="10000" data-i18n="[placeholder]Human message" placeholder="Human message, instruction, etc. Adds nothing when empty, i.e. requires a new prompt with the role 'user' or manually adding the 'Human: ' prefix."></textarea>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
@@ -1592,8 +1646,8 @@
|
||||
<option value="kobold"><span data-i18n="KoboldAI">KoboldAI Classic</span></option>
|
||||
<option value="koboldhorde"><span data-i18n="KoboldAI Horde">KoboldAI Horde</span></option>
|
||||
<option value="novel"><span data-i18n="NovelAI">NovelAI</span></option>
|
||||
<option value="textgenerationwebui"><span data-i18n="Text Completion">Text Completion (ooba, Mancer, Aphrodite, TabbyAPI, KoboldCpp)</span></option>
|
||||
<option value="openai"><span data-i18n="Chat Completion (OpenAI, Claude, Window/OpenRouter, Scale, AI21, Google MakerSuite, MistralAI)">Chat Completion (OpenAI, Claude, Window, OpenRouter, Scale, AI21, Google MakerSuite, MistralAI)</span></option>
|
||||
<option value="textgenerationwebui"><span data-i18n="Text Completion">Text Completion</span></option>
|
||||
<option value="openai"><span data-i18n="Chat Completion">Chat Completion</span></option>
|
||||
</select>
|
||||
</div>
|
||||
<div id="kobold_horde" style="position: relative;"> <!-- shows the kobold settings -->
|
||||
@@ -1723,8 +1777,29 @@
|
||||
<option value="aphrodite">Aphrodite</option>
|
||||
<option value="tabby">TabbyAPI</option>
|
||||
<option value="koboldcpp">KoboldCpp</option>
|
||||
<option value="llamacpp">llama.cpp</option>
|
||||
<option value="ollama">Ollama</option>
|
||||
<option value="togetherai">TogetherAI</option>
|
||||
</select>
|
||||
</div>
|
||||
<div data-tg-type="togetherai" class="flex-container flexFlowColumn">
|
||||
<h4 data-i18n="TogetherAI API Key">TogetherAI API Key</h4>
|
||||
<div class="flex-container">
|
||||
<input id="api_key_togetherai" name="api_key_togetherai" class="text_pole flex1" maxlength="500" value="" type="text" autocomplete="off">
|
||||
<div title="Clear your API key" data-i18n="[title]Clear your API key" class="menu_button fa-solid fa-circle-xmark clear-api-key" data-key="api_key_togetherai"></div>
|
||||
</div>
|
||||
<div data-for="api_key_togetherai" class="neutral_warning">
|
||||
For privacy reasons, your API key will be hidden after you reload the page.
|
||||
</div>
|
||||
<div>
|
||||
<h4 data-i18n="TogetherAI Model">TogetherAI Model</h4>
|
||||
<select id="model_togetherai_select">
|
||||
<option>
|
||||
-- Connect to the API --
|
||||
</option>
|
||||
</select>
|
||||
</div>
|
||||
</div>
|
||||
<div data-tg-type="mancer" class="flex-container flexFlowColumn">
|
||||
<div class="flex-container flexFlowColumn">
|
||||
</div>
|
||||
@@ -1744,7 +1819,11 @@
|
||||
</div>
|
||||
<div class="flex1">
|
||||
<h4>Mancer Model</h4>
|
||||
<select id="mancer_model"></select>
|
||||
<select id="mancer_model">
|
||||
<option>
|
||||
-- Connect to the API --
|
||||
</option>
|
||||
</select>
|
||||
</div>
|
||||
</div>
|
||||
<div data-tg-type="ooba" class="flex-container flexFlowColumn">
|
||||
@@ -1753,7 +1832,7 @@
|
||||
oobabooga/text-generation-webui
|
||||
</a>
|
||||
<span data-i18n="Make sure you run it with">
|
||||
Make sure you run it with <tt>--api</tt> flag
|
||||
Make sure you run it with <code>--api</code> flag
|
||||
</span>
|
||||
</div>
|
||||
<div class="flex1">
|
||||
@@ -1783,6 +1862,44 @@
|
||||
<input id="aphrodite_api_url_text" class="text_pole wide100p" maxlength="500" value="" autocomplete="off" data-server-history="aphrodite">
|
||||
</div>
|
||||
</div>
|
||||
<div data-tg-type="llamacpp">
|
||||
<div class="flex-container flexFlowColumn">
|
||||
<a href="https://github.com/ggerganov/llama.cpp" target="_blank">
|
||||
ggerganov/llama.cpp (inference server)
|
||||
</a>
|
||||
</div>
|
||||
<div class="flex1">
|
||||
<h4 data-i18n="API url">API URL</h4>
|
||||
<small data-i18n="Example: http://127.0.0.1:8080">Example: http://127.0.0.1:8080</small>
|
||||
<input id="llamacpp_api_url_text" class="text_pole wide100p" maxlength="500" value="" autocomplete="off" data-server-history="llamacpp">
|
||||
</div>
|
||||
</div>
|
||||
<div data-tg-type="ollama">
|
||||
<div class="flex-container flexFlowColumn">
|
||||
<a href="https://github.com/jmorganca/ollama" target="_blank">
|
||||
jmorganca/ollama
|
||||
</a>
|
||||
</div>
|
||||
<div class="flex1">
|
||||
<h4 data-i18n="API url">API URL</h4>
|
||||
<small data-i18n="Example: http://127.0.0.1:11434">Example: http://127.0.0.1:11434</small>
|
||||
<input id="ollama_api_url_text" class="text_pole wide100p" maxlength="500" value="" autocomplete="off" data-server-history="ollama">
|
||||
</div>
|
||||
<div class="flex1">
|
||||
<h4>
|
||||
<span data-i18n="Ollama Model">Ollama Model</h4>
|
||||
</h4>
|
||||
<select id="ollama_model">
|
||||
<option>
|
||||
-- Connect to the API --
|
||||
</option>
|
||||
</select>
|
||||
<div id="ollama_download_model" class="menu_button menu_button_icon">
|
||||
<i class="fa-solid fa-download"></i>
|
||||
<span data-i18n="Download">Download</span>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<div data-tg-type="tabby">
|
||||
<div class="flex-container flexFlowColumn">
|
||||
<a href="https://github.com/theroyallab/tabbyAPI" target="_blank">
|
||||
@@ -1820,7 +1937,7 @@
|
||||
<div id="api_button_textgenerationwebui" class="api_button menu_button" type="submit" data-i18n="Connect" data-server-connect="ooba_blocking,aphrodite,tabby,koboldcpp">Connect</div>
|
||||
<div class="api_loading menu_button" data-i18n="Cancel">Cancel</div>
|
||||
</div>
|
||||
<label class="checkbox_label margin-bot-10px" for="legacy_api_textgenerationwebui">
|
||||
<label data-tg-type="ooba,aphrodite" class="checkbox_label margin-bot-10px" for="legacy_api_textgenerationwebui">
|
||||
<input type="checkbox" id="legacy_api_textgenerationwebui" />
|
||||
<span data-i18n="Legacy API (pre-OAI, no streaming)">Legacy API (pre-OAI, no streaming)</span>
|
||||
</label>
|
||||
@@ -1843,6 +1960,7 @@
|
||||
<option value="ai21">AI21</option>
|
||||
<option value="makersuite">Google MakerSuite</option>
|
||||
<option value="mistralai">MistralAI</option>
|
||||
<option value="custom">Custom (OpenAI-compatible)</option>
|
||||
</select>
|
||||
<form id="openai_form" data-source="openai" action="javascript:void(null);" method="post" enctype="multipart/form-data">
|
||||
<h4><span data-i18n="OpenAI API key">OpenAI API key</span></h4>
|
||||
@@ -2147,9 +2265,40 @@
|
||||
</select>
|
||||
</div>
|
||||
</form>
|
||||
<form id="custom_form" data-source="custom">
|
||||
<h4 data-i18n="Custom Endpoint (Base URL)">Custom Endpoint (Base URL)</h4>
|
||||
<div class="flex-container">
|
||||
<input id="custom_api_url_text" class="text_pole wide100p" maxlength="500" value="" autocomplete="off" placeholder="Example: http://localhost:1234/v1">
|
||||
</div>
|
||||
<div>
|
||||
<small>
|
||||
Doesn't work? Try adding <code>/v1</code> at the end of the URL!
|
||||
</small>
|
||||
</div>
|
||||
<h4>
|
||||
<span data-i18n="Custom API Key">Custom API Key</span>
|
||||
<small>(Optional)</small>
|
||||
</h4>
|
||||
<div class="flex-container">
|
||||
<input id="api_key_custom" name="api_key_custom" class="text_pole flex1" maxlength="500" value="" type="text" autocomplete="off">
|
||||
<div title="Clear your API key" data-i18n="[title]Clear your API key" class="menu_button fa-solid fa-circle-xmark clear-api-key" data-key="api_key_custom"></div>
|
||||
</div>
|
||||
<div data-for="api_key_custom" class="neutral_warning">
|
||||
For privacy reasons, your API key will be hidden after you reload the page.
|
||||
</div>
|
||||
<h4>Enter a Model ID</h4>
|
||||
<div class="flex-container">
|
||||
<input id="custom_model_id" class="text_pole wide100p" maxlength="500" value="" autocomplete="off" placeholder="Example: gpt-3.5-turbo">
|
||||
</div>
|
||||
<h4 data-i18n="Available Models">Available Models</h4>
|
||||
<div class="flex-container">
|
||||
<select id="model_custom_select" class="text_pole"></select>
|
||||
</div>
|
||||
</form>
|
||||
<div class="flex-container flex">
|
||||
<div id="api_button_openai" class="api_button menu_button menu_button_icon" type="submit" data-i18n="Connect">Connect</div>
|
||||
<div class="api_loading menu_button" data-i18n="Cancel">Cancel</div>
|
||||
<div data-source="custom" id="customize_additional_parameters" class="menu_button menu_button_icon">Additional Parameters</div>
|
||||
<div data-source="openrouter" id="openrouter_authorize" class="menu_button menu_button_icon" title="Get your OpenRouter API token using OAuth flow. You will be redirected to openrouter.ai" data-i18n="[title]Get your OpenRouter API token using OAuth flow. You will be redirected to openrouter.ai">Authorize</div>
|
||||
<div id="test_api_button" class="menu_button menu_button_icon" title="Verifies your API connection by sending a short test message. Be aware that you'll be credited for it!" data-i18n="[title]Verifies your API connection by sending a short test message. Be aware that you'll be credited for it!">Test Message</div>
|
||||
</div>
|
||||
@@ -2266,11 +2415,15 @@
|
||||
<span class="fa-solid fa-circle-question note-link-span"></span>
|
||||
</a>
|
||||
</h4>
|
||||
<div>
|
||||
<label for="instruct_enabled" class="checkbox_label">
|
||||
<div class="flex-container">
|
||||
<label for="instruct_enabled" class="checkbox_label flex1">
|
||||
<input id="instruct_enabled" type="checkbox" />
|
||||
<span data-i18n="Enabled">Enabled</span>
|
||||
</label>
|
||||
<label for="instruct_bind_to_context" class="checkbox_label flex1" title="If enabled, Context templates will be automatically selected based on selected Instruct template name or by preference.">
|
||||
<input id="instruct_bind_to_context" type="checkbox" />
|
||||
<span data-i18n="Bind to Context">Bind to Context</span>
|
||||
</label>
|
||||
</div>
|
||||
<label for="instruct_presets">
|
||||
<span data-i18n="Presets">Presets</span>
|
||||
@@ -3696,7 +3849,7 @@
|
||||
</div>
|
||||
<div id="rm_character_import" class="right_menu" style="display: none;">
|
||||
<form id="form_import" action="javascript:void(null);" method="post" enctype="multipart/form-data">
|
||||
<input multiple type="file" id="character_import_file" accept=".json, image/png" name="avatar">
|
||||
<input multiple type="file" id="character_import_file" accept=".json, image/png, .yaml, .yml" name="avatar">
|
||||
<input id="character_import_file_type" name="file_type" class="text_pole" maxlength="999" size="2" value="" autocomplete="off">
|
||||
</form>
|
||||
</div>
|
||||
@@ -4253,11 +4406,11 @@
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<div id="novelai_logit_bias_template" class="template_element">
|
||||
<div class="novelai_logit_bias_form">
|
||||
<input class="novelai_logit_bias_text text_pole" data-i18n="[placeholder]Type here..." placeholder="type here..." />
|
||||
<input class="novelai_logit_bias_value text_pole" type="number" min="-2" value="0" max="2" step="0.01" />
|
||||
<i class="menu_button fa-solid fa-xmark novelai_logit_bias_remove"></i>
|
||||
<div id="logit_bias_template" class="template_element">
|
||||
<div class="logit_bias_form">
|
||||
<input class="logit_bias_text text_pole" data-i18n="[placeholder]Type here..." placeholder="type here..." />
|
||||
<input class="logit_bias_value text_pole" type="number" min="-2" value="0" max="2" step="0.01" />
|
||||
<i class="menu_button fa-solid fa-xmark logit_bias_remove"></i>
|
||||
</div>
|
||||
</div>
|
||||
<div id="completion_prompt_manager_popup" class="drawer-content" style="display:none;">
|
||||
|
@@ -8,7 +8,7 @@
|
||||
"system_sequence_prefix": "[INST] <<SYS>>\n",
|
||||
"system_sequence_suffix": "\n<</SYS>>\n",
|
||||
"stop_sequence": "",
|
||||
"separator_sequence": "\n",
|
||||
"separator_sequence": " ",
|
||||
"wrap": false,
|
||||
"macro": true,
|
||||
"names": false,
|
||||
|
1
public/lib/css-parser.map
Normal file
1
public/lib/css-parser.map
Normal file
File diff suppressed because one or more lines are too long
765
public/lib/css-parser.mjs
Normal file
765
public/lib/css-parser.mjs
Normal file
@@ -0,0 +1,765 @@
|
||||
|
||||
function $parcel$defineInteropFlag(a) {
|
||||
Object.defineProperty(a, '__esModule', {value: true, configurable: true});
|
||||
}
|
||||
|
||||
function $parcel$export(e, n, v, s) {
|
||||
Object.defineProperty(e, n, {get: v, set: s, enumerable: true, configurable: true});
|
||||
}
|
||||
var $009ddb00d3ec72b8$exports = {};
|
||||
|
||||
$parcel$defineInteropFlag($009ddb00d3ec72b8$exports);
|
||||
|
||||
$parcel$export($009ddb00d3ec72b8$exports, "default", () => $009ddb00d3ec72b8$export$2e2bcd8739ae039);
|
||||
class $009ddb00d3ec72b8$export$2e2bcd8739ae039 extends Error {
|
||||
constructor(filename, msg, lineno, column, css){
|
||||
super(filename + ":" + lineno + ":" + column + ": " + msg);
|
||||
this.reason = msg;
|
||||
this.filename = filename;
|
||||
this.line = lineno;
|
||||
this.column = column;
|
||||
this.source = css;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
var $0865a9fb4cc365fe$exports = {};
|
||||
|
||||
$parcel$defineInteropFlag($0865a9fb4cc365fe$exports);
|
||||
|
||||
$parcel$export($0865a9fb4cc365fe$exports, "default", () => $0865a9fb4cc365fe$export$2e2bcd8739ae039);
|
||||
/**
|
||||
* Store position information for a node
|
||||
*/ class $0865a9fb4cc365fe$export$2e2bcd8739ae039 {
|
||||
constructor(start, end, source){
|
||||
this.start = start;
|
||||
this.end = end;
|
||||
this.source = source;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
var $b2e137848b48cf4f$exports = {};
|
||||
|
||||
$parcel$export($b2e137848b48cf4f$exports, "CssTypes", () => $b2e137848b48cf4f$export$9be5dd6e61d5d73a);
|
||||
var $b2e137848b48cf4f$export$9be5dd6e61d5d73a;
|
||||
(function(CssTypes) {
|
||||
CssTypes["stylesheet"] = "stylesheet";
|
||||
CssTypes["rule"] = "rule";
|
||||
CssTypes["declaration"] = "declaration";
|
||||
CssTypes["comment"] = "comment";
|
||||
CssTypes["container"] = "container";
|
||||
CssTypes["charset"] = "charset";
|
||||
CssTypes["document"] = "document";
|
||||
CssTypes["customMedia"] = "custom-media";
|
||||
CssTypes["fontFace"] = "font-face";
|
||||
CssTypes["host"] = "host";
|
||||
CssTypes["import"] = "import";
|
||||
CssTypes["keyframes"] = "keyframes";
|
||||
CssTypes["keyframe"] = "keyframe";
|
||||
CssTypes["layer"] = "layer";
|
||||
CssTypes["media"] = "media";
|
||||
CssTypes["namespace"] = "namespace";
|
||||
CssTypes["page"] = "page";
|
||||
CssTypes["supports"] = "supports";
|
||||
})($b2e137848b48cf4f$export$9be5dd6e61d5d73a || ($b2e137848b48cf4f$export$9be5dd6e61d5d73a = {}));
|
||||
|
||||
|
||||
// http://www.w3.org/TR/CSS21/grammar.html
|
||||
// https://github.com/visionmedia/css-parse/pull/49#issuecomment-30088027
|
||||
// New rule => https://www.w3.org/TR/CSS22/syndata.html#comments
|
||||
// [^] is equivalent to [.\n\r]
|
||||
const $d708735ed1303b43$var$commentre = /\/\*[^]*?(?:\*\/|$)/g;
|
||||
const $d708735ed1303b43$export$98e6a39c04603d36 = (css, options)=>{
|
||||
options = options || {};
|
||||
/**
|
||||
* Positional.
|
||||
*/ let lineno = 1;
|
||||
let column = 1;
|
||||
/**
|
||||
* Update lineno and column based on `str`.
|
||||
*/ function updatePosition(str) {
|
||||
const lines = str.match(/\n/g);
|
||||
if (lines) lineno += lines.length;
|
||||
const i = str.lastIndexOf("\n");
|
||||
column = ~i ? str.length - i : column + str.length;
|
||||
}
|
||||
/**
|
||||
* Mark position and patch `node.position`.
|
||||
*/ function position() {
|
||||
const start = {
|
||||
line: lineno,
|
||||
column: column
|
||||
};
|
||||
return function(node) {
|
||||
node.position = new (0, $0865a9fb4cc365fe$export$2e2bcd8739ae039)(start, {
|
||||
line: lineno,
|
||||
column: column
|
||||
}, options?.source || "");
|
||||
whitespace();
|
||||
return node;
|
||||
};
|
||||
}
|
||||
/**
|
||||
* Error `msg`.
|
||||
*/ const errorsList = [];
|
||||
function error(msg) {
|
||||
const err = new (0, $009ddb00d3ec72b8$export$2e2bcd8739ae039)(options?.source || "", msg, lineno, column, css);
|
||||
if (options?.silent) errorsList.push(err);
|
||||
else throw err;
|
||||
}
|
||||
/**
|
||||
* Parse stylesheet.
|
||||
*/ function stylesheet() {
|
||||
const rulesList = rules();
|
||||
const result = {
|
||||
type: (0, $b2e137848b48cf4f$export$9be5dd6e61d5d73a).stylesheet,
|
||||
stylesheet: {
|
||||
source: options?.source,
|
||||
rules: rulesList,
|
||||
parsingErrors: errorsList
|
||||
}
|
||||
};
|
||||
return result;
|
||||
}
|
||||
/**
|
||||
* Opening brace.
|
||||
*/ function open() {
|
||||
return match(/^{\s*/);
|
||||
}
|
||||
/**
|
||||
* Closing brace.
|
||||
*/ function close() {
|
||||
return match(/^}/);
|
||||
}
|
||||
/**
|
||||
* Parse ruleset.
|
||||
*/ function rules() {
|
||||
let node;
|
||||
const rules = [];
|
||||
whitespace();
|
||||
comments(rules);
|
||||
while(css.length && css.charAt(0) !== "}" && (node = atrule() || rule()))if (node) {
|
||||
rules.push(node);
|
||||
comments(rules);
|
||||
}
|
||||
return rules;
|
||||
}
|
||||
/**
|
||||
* Match `re` and return captures.
|
||||
*/ function match(re) {
|
||||
const m = re.exec(css);
|
||||
if (!m) return;
|
||||
const str = m[0];
|
||||
updatePosition(str);
|
||||
css = css.slice(str.length);
|
||||
return m;
|
||||
}
|
||||
/**
|
||||
* Parse whitespace.
|
||||
*/ function whitespace() {
|
||||
match(/^\s*/);
|
||||
}
|
||||
/**
|
||||
* Parse comments;
|
||||
*/ function comments(rules) {
|
||||
let c;
|
||||
rules = rules || [];
|
||||
while(c = comment())if (c) rules.push(c);
|
||||
return rules;
|
||||
}
|
||||
/**
|
||||
* Parse comment.
|
||||
*/ function comment() {
|
||||
const pos = position();
|
||||
if ("/" !== css.charAt(0) || "*" !== css.charAt(1)) return;
|
||||
const m = match(/^\/\*[^]*?\*\//);
|
||||
if (!m) return error("End of comment missing");
|
||||
return pos({
|
||||
type: (0, $b2e137848b48cf4f$export$9be5dd6e61d5d73a).comment,
|
||||
comment: m[0].slice(2, -2)
|
||||
});
|
||||
}
|
||||
function findClosingParenthese(str, start, depth) {
|
||||
let ptr = start + 1;
|
||||
let found = false;
|
||||
let closeParentheses = str.indexOf(")", ptr);
|
||||
while(!found && closeParentheses !== -1){
|
||||
const nextParentheses = str.indexOf("(", ptr);
|
||||
if (nextParentheses !== -1 && nextParentheses < closeParentheses) {
|
||||
const nextSearch = findClosingParenthese(str, nextParentheses + 1, depth + 1);
|
||||
ptr = nextSearch + 1;
|
||||
closeParentheses = str.indexOf(")", ptr);
|
||||
} else found = true;
|
||||
}
|
||||
if (found && closeParentheses !== -1) return closeParentheses;
|
||||
else return -1;
|
||||
}
|
||||
/**
|
||||
* Parse selector.
|
||||
*/ function selector() {
|
||||
const m = match(/^([^{]+)/);
|
||||
if (!m) return;
|
||||
// remove comment in selector;
|
||||
let res = $d708735ed1303b43$var$trim(m[0]).replace($d708735ed1303b43$var$commentre, "");
|
||||
// Optimisation: If there is no ',' no need to split or post-process (this is less costly)
|
||||
if (res.indexOf(",") === -1) return [
|
||||
res
|
||||
];
|
||||
// Replace all the , in the parentheses by \u200C
|
||||
let ptr = 0;
|
||||
let startParentheses = res.indexOf("(", ptr);
|
||||
while(startParentheses !== -1){
|
||||
const closeParentheses = findClosingParenthese(res, startParentheses, 0);
|
||||
if (closeParentheses === -1) break;
|
||||
ptr = closeParentheses + 1;
|
||||
res = res.substring(0, startParentheses) + res.substring(startParentheses, closeParentheses).replace(/,/g, "\u200C") + res.substring(closeParentheses);
|
||||
startParentheses = res.indexOf("(", ptr);
|
||||
}
|
||||
// Replace all the , in ' and " by \u200C
|
||||
res = res/**
|
||||
* replace ',' by \u200C for data selector (div[data-lang="fr,de,us"])
|
||||
*
|
||||
* Examples:
|
||||
* div[data-lang="fr,\"de,us"]
|
||||
* div[data-lang='fr,\'de,us']
|
||||
*
|
||||
* Regex logic:
|
||||
* ("|')(?:\\\1|.)*?\1 => Handle the " and '
|
||||
*
|
||||
* Optimization 1:
|
||||
* No greedy capture (see docs about the difference between .* and .*?)
|
||||
*
|
||||
* Optimization 2:
|
||||
* ("|')(?:\\\1|.)*?\1 this use reference to capture group, it work faster.
|
||||
*/ .replace(/("|')(?:\\\1|.)*?\1/g, (m)=>m.replace(/,/g, "\u200C"));
|
||||
// Split all the left , and replace all the \u200C by ,
|
||||
return res// Split the selector by ','
|
||||
.split(",")// Replace back \u200C by ','
|
||||
.map((s)=>{
|
||||
return $d708735ed1303b43$var$trim(s.replace(/\u200C/g, ","));
|
||||
});
|
||||
}
|
||||
/**
|
||||
* Parse declaration.
|
||||
*/ function declaration() {
|
||||
const pos = position();
|
||||
// prop
|
||||
const propMatch = match(/^(\*?[-#/*\\\w]+(\[[0-9a-z_-]+\])?)\s*/);
|
||||
if (!propMatch) return;
|
||||
const propValue = $d708735ed1303b43$var$trim(propMatch[0]);
|
||||
// :
|
||||
if (!match(/^:\s*/)) return error("property missing ':'");
|
||||
// val
|
||||
const val = match(/^((?:'(?:\\'|.)*?'|"(?:\\"|.)*?"|\([^)]*?\)|[^};])+)/);
|
||||
const ret = pos({
|
||||
type: (0, $b2e137848b48cf4f$export$9be5dd6e61d5d73a).declaration,
|
||||
property: propValue.replace($d708735ed1303b43$var$commentre, ""),
|
||||
value: val ? $d708735ed1303b43$var$trim(val[0]).replace($d708735ed1303b43$var$commentre, "") : ""
|
||||
});
|
||||
// ;
|
||||
match(/^[;\s]*/);
|
||||
return ret;
|
||||
}
|
||||
/**
|
||||
* Parse declarations.
|
||||
*/ function declarations() {
|
||||
const decls = [];
|
||||
if (!open()) return error("missing '{'");
|
||||
comments(decls);
|
||||
// declarations
|
||||
let decl;
|
||||
while(decl = declaration())if (decl) {
|
||||
decls.push(decl);
|
||||
comments(decls);
|
||||
}
|
||||
if (!close()) return error("missing '}'");
|
||||
return decls;
|
||||
}
|
||||
/**
|
||||
* Parse keyframe.
|
||||
*/ function keyframe() {
|
||||
let m;
|
||||
const vals = [];
|
||||
const pos = position();
|
||||
while(m = match(/^((\d+\.\d+|\.\d+|\d+)%?|[a-z]+)\s*/)){
|
||||
vals.push(m[1]);
|
||||
match(/^,\s*/);
|
||||
}
|
||||
if (!vals.length) return;
|
||||
return pos({
|
||||
type: (0, $b2e137848b48cf4f$export$9be5dd6e61d5d73a).keyframe,
|
||||
values: vals,
|
||||
declarations: declarations() || []
|
||||
});
|
||||
}
|
||||
/**
|
||||
* Parse keyframes.
|
||||
*/ function atkeyframes() {
|
||||
const pos = position();
|
||||
const m1 = match(/^@([-\w]+)?keyframes\s*/);
|
||||
if (!m1) return;
|
||||
const vendor = m1[1];
|
||||
// identifier
|
||||
const m2 = match(/^([-\w]+)\s*/);
|
||||
if (!m2) return error("@keyframes missing name");
|
||||
const name = m2[1];
|
||||
if (!open()) return error("@keyframes missing '{'");
|
||||
let frame;
|
||||
let frames = comments();
|
||||
while(frame = keyframe()){
|
||||
frames.push(frame);
|
||||
frames = frames.concat(comments());
|
||||
}
|
||||
if (!close()) return error("@keyframes missing '}'");
|
||||
return pos({
|
||||
type: (0, $b2e137848b48cf4f$export$9be5dd6e61d5d73a).keyframes,
|
||||
name: name,
|
||||
vendor: vendor,
|
||||
keyframes: frames
|
||||
});
|
||||
}
|
||||
/**
|
||||
* Parse supports.
|
||||
*/ function atsupports() {
|
||||
const pos = position();
|
||||
const m = match(/^@supports *([^{]+)/);
|
||||
if (!m) return;
|
||||
const supports = $d708735ed1303b43$var$trim(m[1]);
|
||||
if (!open()) return error("@supports missing '{'");
|
||||
const style = comments().concat(rules());
|
||||
if (!close()) return error("@supports missing '}'");
|
||||
return pos({
|
||||
type: (0, $b2e137848b48cf4f$export$9be5dd6e61d5d73a).supports,
|
||||
supports: supports,
|
||||
rules: style
|
||||
});
|
||||
}
|
||||
/**
|
||||
* Parse host.
|
||||
*/ function athost() {
|
||||
const pos = position();
|
||||
const m = match(/^@host\s*/);
|
||||
if (!m) return;
|
||||
if (!open()) return error("@host missing '{'");
|
||||
const style = comments().concat(rules());
|
||||
if (!close()) return error("@host missing '}'");
|
||||
return pos({
|
||||
type: (0, $b2e137848b48cf4f$export$9be5dd6e61d5d73a).host,
|
||||
rules: style
|
||||
});
|
||||
}
|
||||
/**
|
||||
* Parse container.
|
||||
*/ function atcontainer() {
|
||||
const pos = position();
|
||||
const m = match(/^@container *([^{]+)/);
|
||||
if (!m) return;
|
||||
const container = $d708735ed1303b43$var$trim(m[1]);
|
||||
if (!open()) return error("@container missing '{'");
|
||||
const style = comments().concat(rules());
|
||||
if (!close()) return error("@container missing '}'");
|
||||
return pos({
|
||||
type: (0, $b2e137848b48cf4f$export$9be5dd6e61d5d73a).container,
|
||||
container: container,
|
||||
rules: style
|
||||
});
|
||||
}
|
||||
/**
|
||||
* Parse container.
|
||||
*/ function atlayer() {
|
||||
const pos = position();
|
||||
const m = match(/^@layer *([^{;@]+)/);
|
||||
if (!m) return;
|
||||
const layer = $d708735ed1303b43$var$trim(m[1]);
|
||||
if (!open()) {
|
||||
match(/^[;\s]*/);
|
||||
return pos({
|
||||
type: (0, $b2e137848b48cf4f$export$9be5dd6e61d5d73a).layer,
|
||||
layer: layer
|
||||
});
|
||||
}
|
||||
const style = comments().concat(rules());
|
||||
if (!close()) return error("@layer missing '}'");
|
||||
return pos({
|
||||
type: (0, $b2e137848b48cf4f$export$9be5dd6e61d5d73a).layer,
|
||||
layer: layer,
|
||||
rules: style
|
||||
});
|
||||
}
|
||||
/**
|
||||
* Parse media.
|
||||
*/ function atmedia() {
|
||||
const pos = position();
|
||||
const m = match(/^@media *([^{]+)/);
|
||||
if (!m) return;
|
||||
const media = $d708735ed1303b43$var$trim(m[1]);
|
||||
if (!open()) return error("@media missing '{'");
|
||||
const style = comments().concat(rules());
|
||||
if (!close()) return error("@media missing '}'");
|
||||
return pos({
|
||||
type: (0, $b2e137848b48cf4f$export$9be5dd6e61d5d73a).media,
|
||||
media: media,
|
||||
rules: style
|
||||
});
|
||||
}
|
||||
/**
|
||||
* Parse custom-media.
|
||||
*/ function atcustommedia() {
|
||||
const pos = position();
|
||||
const m = match(/^@custom-media\s+(--\S+)\s*([^{;\s][^{;]*);/);
|
||||
if (!m) return;
|
||||
return pos({
|
||||
type: (0, $b2e137848b48cf4f$export$9be5dd6e61d5d73a).customMedia,
|
||||
name: $d708735ed1303b43$var$trim(m[1]),
|
||||
media: $d708735ed1303b43$var$trim(m[2])
|
||||
});
|
||||
}
|
||||
/**
|
||||
* Parse paged media.
|
||||
*/ function atpage() {
|
||||
const pos = position();
|
||||
const m = match(/^@page */);
|
||||
if (!m) return;
|
||||
const sel = selector() || [];
|
||||
if (!open()) return error("@page missing '{'");
|
||||
let decls = comments();
|
||||
// declarations
|
||||
let decl;
|
||||
while(decl = declaration()){
|
||||
decls.push(decl);
|
||||
decls = decls.concat(comments());
|
||||
}
|
||||
if (!close()) return error("@page missing '}'");
|
||||
return pos({
|
||||
type: (0, $b2e137848b48cf4f$export$9be5dd6e61d5d73a).page,
|
||||
selectors: sel,
|
||||
declarations: decls
|
||||
});
|
||||
}
|
||||
/**
|
||||
* Parse document.
|
||||
*/ function atdocument() {
|
||||
const pos = position();
|
||||
const m = match(/^@([-\w]+)?document *([^{]+)/);
|
||||
if (!m) return;
|
||||
const vendor = $d708735ed1303b43$var$trim(m[1]);
|
||||
const doc = $d708735ed1303b43$var$trim(m[2]);
|
||||
if (!open()) return error("@document missing '{'");
|
||||
const style = comments().concat(rules());
|
||||
if (!close()) return error("@document missing '}'");
|
||||
return pos({
|
||||
type: (0, $b2e137848b48cf4f$export$9be5dd6e61d5d73a).document,
|
||||
document: doc,
|
||||
vendor: vendor,
|
||||
rules: style
|
||||
});
|
||||
}
|
||||
/**
|
||||
* Parse font-face.
|
||||
*/ function atfontface() {
|
||||
const pos = position();
|
||||
const m = match(/^@font-face\s*/);
|
||||
if (!m) return;
|
||||
if (!open()) return error("@font-face missing '{'");
|
||||
let decls = comments();
|
||||
// declarations
|
||||
let decl;
|
||||
while(decl = declaration()){
|
||||
decls.push(decl);
|
||||
decls = decls.concat(comments());
|
||||
}
|
||||
if (!close()) return error("@font-face missing '}'");
|
||||
return pos({
|
||||
type: (0, $b2e137848b48cf4f$export$9be5dd6e61d5d73a).fontFace,
|
||||
declarations: decls
|
||||
});
|
||||
}
|
||||
/**
|
||||
* Parse import
|
||||
*/ const atimport = _compileAtrule("import");
|
||||
/**
|
||||
* Parse charset
|
||||
*/ const atcharset = _compileAtrule("charset");
|
||||
/**
|
||||
* Parse namespace
|
||||
*/ const atnamespace = _compileAtrule("namespace");
|
||||
/**
|
||||
* Parse non-block at-rules
|
||||
*/ function _compileAtrule(name) {
|
||||
const re = new RegExp("^@" + name + "\\s*((?::?[^;'\"]|\"(?:\\\\\"|[^\"])*?\"|'(?:\\\\'|[^'])*?')+)(?:;|$)");
|
||||
// ^@import\s*([^;"']|("|')(?:\\\2|.)*?\2)+(;|$)
|
||||
return function() {
|
||||
const pos = position();
|
||||
const m = match(re);
|
||||
if (!m) return;
|
||||
const ret = {
|
||||
type: name
|
||||
};
|
||||
ret[name] = m[1].trim();
|
||||
return pos(ret);
|
||||
};
|
||||
}
|
||||
/**
|
||||
* Parse at rule.
|
||||
*/ function atrule() {
|
||||
if (css[0] !== "@") return;
|
||||
return atkeyframes() || atmedia() || atcustommedia() || atsupports() || atimport() || atcharset() || atnamespace() || atdocument() || atpage() || athost() || atfontface() || atcontainer() || atlayer();
|
||||
}
|
||||
/**
|
||||
* Parse rule.
|
||||
*/ function rule() {
|
||||
const pos = position();
|
||||
const sel = selector();
|
||||
if (!sel) return error("selector missing");
|
||||
comments();
|
||||
return pos({
|
||||
type: (0, $b2e137848b48cf4f$export$9be5dd6e61d5d73a).rule,
|
||||
selectors: sel,
|
||||
declarations: declarations() || []
|
||||
});
|
||||
}
|
||||
return $d708735ed1303b43$var$addParent(stylesheet());
|
||||
};
|
||||
/**
|
||||
* Trim `str`.
|
||||
*/ function $d708735ed1303b43$var$trim(str) {
|
||||
return str ? str.trim() : "";
|
||||
}
|
||||
/**
|
||||
* Adds non-enumerable parent node reference to each node.
|
||||
*/ function $d708735ed1303b43$var$addParent(obj, parent) {
|
||||
const isNode = obj && typeof obj.type === "string";
|
||||
const childParent = isNode ? obj : parent;
|
||||
for(const k in obj){
|
||||
const value = obj[k];
|
||||
if (Array.isArray(value)) value.forEach((v)=>{
|
||||
$d708735ed1303b43$var$addParent(v, childParent);
|
||||
});
|
||||
else if (value && typeof value === "object") $d708735ed1303b43$var$addParent(value, childParent);
|
||||
}
|
||||
if (isNode) Object.defineProperty(obj, "parent", {
|
||||
configurable: true,
|
||||
writable: true,
|
||||
enumerable: false,
|
||||
value: parent || null
|
||||
});
|
||||
return obj;
|
||||
}
|
||||
var $d708735ed1303b43$export$2e2bcd8739ae039 = $d708735ed1303b43$export$98e6a39c04603d36;
|
||||
|
||||
|
||||
|
||||
class $de9540138ed1fd01$var$Compiler {
|
||||
constructor(options){
|
||||
this.level = 0;
|
||||
this.indentation = " ";
|
||||
this.compress = false;
|
||||
if (typeof options?.indent === "string") this.indentation = options?.indent;
|
||||
if (options?.compress) this.compress = true;
|
||||
}
|
||||
// We disable no-unused-vars for _position. We keep position for potential reintroduction of source-map
|
||||
// eslint-disable-next-line @typescript-eslint/no-unused-vars
|
||||
emit(str, _position) {
|
||||
return str;
|
||||
}
|
||||
/**
|
||||
* Increase, decrease or return current indentation.
|
||||
*/ indent(level) {
|
||||
this.level = this.level || 1;
|
||||
if (level) {
|
||||
this.level += level;
|
||||
return "";
|
||||
}
|
||||
return Array(this.level).join(this.indentation);
|
||||
}
|
||||
visit(node) {
|
||||
switch(node.type){
|
||||
case (0, $b2e137848b48cf4f$export$9be5dd6e61d5d73a).stylesheet:
|
||||
return this.stylesheet(node);
|
||||
case (0, $b2e137848b48cf4f$export$9be5dd6e61d5d73a).rule:
|
||||
return this.rule(node);
|
||||
case (0, $b2e137848b48cf4f$export$9be5dd6e61d5d73a).declaration:
|
||||
return this.declaration(node);
|
||||
case (0, $b2e137848b48cf4f$export$9be5dd6e61d5d73a).comment:
|
||||
return this.comment(node);
|
||||
case (0, $b2e137848b48cf4f$export$9be5dd6e61d5d73a).container:
|
||||
return this.container(node);
|
||||
case (0, $b2e137848b48cf4f$export$9be5dd6e61d5d73a).charset:
|
||||
return this.charset(node);
|
||||
case (0, $b2e137848b48cf4f$export$9be5dd6e61d5d73a).document:
|
||||
return this.document(node);
|
||||
case (0, $b2e137848b48cf4f$export$9be5dd6e61d5d73a).customMedia:
|
||||
return this.customMedia(node);
|
||||
case (0, $b2e137848b48cf4f$export$9be5dd6e61d5d73a).fontFace:
|
||||
return this.fontFace(node);
|
||||
case (0, $b2e137848b48cf4f$export$9be5dd6e61d5d73a).host:
|
||||
return this.host(node);
|
||||
case (0, $b2e137848b48cf4f$export$9be5dd6e61d5d73a).import:
|
||||
return this.import(node);
|
||||
case (0, $b2e137848b48cf4f$export$9be5dd6e61d5d73a).keyframes:
|
||||
return this.keyframes(node);
|
||||
case (0, $b2e137848b48cf4f$export$9be5dd6e61d5d73a).keyframe:
|
||||
return this.keyframe(node);
|
||||
case (0, $b2e137848b48cf4f$export$9be5dd6e61d5d73a).layer:
|
||||
return this.layer(node);
|
||||
case (0, $b2e137848b48cf4f$export$9be5dd6e61d5d73a).media:
|
||||
return this.media(node);
|
||||
case (0, $b2e137848b48cf4f$export$9be5dd6e61d5d73a).namespace:
|
||||
return this.namespace(node);
|
||||
case (0, $b2e137848b48cf4f$export$9be5dd6e61d5d73a).page:
|
||||
return this.page(node);
|
||||
case (0, $b2e137848b48cf4f$export$9be5dd6e61d5d73a).supports:
|
||||
return this.supports(node);
|
||||
}
|
||||
}
|
||||
mapVisit(nodes, delim) {
|
||||
let buf = "";
|
||||
delim = delim || "";
|
||||
for(let i = 0, length = nodes.length; i < length; i++){
|
||||
buf += this.visit(nodes[i]);
|
||||
if (delim && i < length - 1) buf += this.emit(delim);
|
||||
}
|
||||
return buf;
|
||||
}
|
||||
compile(node) {
|
||||
if (this.compress) return node.stylesheet.rules.map(this.visit, this).join("");
|
||||
return this.stylesheet(node);
|
||||
}
|
||||
/**
|
||||
* Visit stylesheet node.
|
||||
*/ stylesheet(node) {
|
||||
return this.mapVisit(node.stylesheet.rules, "\n\n");
|
||||
}
|
||||
/**
|
||||
* Visit comment node.
|
||||
*/ comment(node) {
|
||||
if (this.compress) return this.emit("", node.position);
|
||||
return this.emit(this.indent() + "/*" + node.comment + "*/", node.position);
|
||||
}
|
||||
/**
|
||||
* Visit container node.
|
||||
*/ container(node) {
|
||||
if (this.compress) return this.emit("@container " + node.container, node.position) + this.emit("{") + this.mapVisit(node.rules) + this.emit("}");
|
||||
return this.emit(this.indent() + "@container " + node.container, node.position) + this.emit(" {\n" + this.indent(1)) + this.mapVisit(node.rules, "\n\n") + this.emit("\n" + this.indent(-1) + this.indent() + "}");
|
||||
}
|
||||
/**
|
||||
* Visit container node.
|
||||
*/ layer(node) {
|
||||
if (this.compress) return this.emit("@layer " + node.layer, node.position) + (node.rules ? this.emit("{") + this.mapVisit(node.rules) + this.emit("}") : ";");
|
||||
return this.emit(this.indent() + "@layer " + node.layer, node.position) + (node.rules ? this.emit(" {\n" + this.indent(1)) + this.mapVisit(node.rules, "\n\n") + this.emit("\n" + this.indent(-1) + this.indent() + "}") : ";");
|
||||
}
|
||||
/**
|
||||
* Visit import node.
|
||||
*/ import(node) {
|
||||
return this.emit("@import " + node.import + ";", node.position);
|
||||
}
|
||||
/**
|
||||
* Visit media node.
|
||||
*/ media(node) {
|
||||
if (this.compress) return this.emit("@media " + node.media, node.position) + this.emit("{") + this.mapVisit(node.rules) + this.emit("}");
|
||||
return this.emit(this.indent() + "@media " + node.media, node.position) + this.emit(" {\n" + this.indent(1)) + this.mapVisit(node.rules, "\n\n") + this.emit("\n" + this.indent(-1) + this.indent() + "}");
|
||||
}
|
||||
/**
|
||||
* Visit document node.
|
||||
*/ document(node) {
|
||||
const doc = "@" + (node.vendor || "") + "document " + node.document;
|
||||
if (this.compress) return this.emit(doc, node.position) + this.emit("{") + this.mapVisit(node.rules) + this.emit("}");
|
||||
return this.emit(doc, node.position) + this.emit(" {\n" + this.indent(1)) + this.mapVisit(node.rules, "\n\n") + this.emit(this.indent(-1) + "\n}");
|
||||
}
|
||||
/**
|
||||
* Visit charset node.
|
||||
*/ charset(node) {
|
||||
return this.emit("@charset " + node.charset + ";", node.position);
|
||||
}
|
||||
/**
|
||||
* Visit namespace node.
|
||||
*/ namespace(node) {
|
||||
return this.emit("@namespace " + node.namespace + ";", node.position);
|
||||
}
|
||||
/**
|
||||
* Visit supports node.
|
||||
*/ supports(node) {
|
||||
if (this.compress) return this.emit("@supports " + node.supports, node.position) + this.emit("{") + this.mapVisit(node.rules) + this.emit("}");
|
||||
return this.emit(this.indent() + "@supports " + node.supports, node.position) + this.emit(" {\n" + this.indent(1)) + this.mapVisit(node.rules, "\n\n") + this.emit("\n" + this.indent(-1) + this.indent() + "}");
|
||||
}
|
||||
/**
|
||||
* Visit keyframes node.
|
||||
*/ keyframes(node) {
|
||||
if (this.compress) return this.emit("@" + (node.vendor || "") + "keyframes " + node.name, node.position) + this.emit("{") + this.mapVisit(node.keyframes) + this.emit("}");
|
||||
return this.emit("@" + (node.vendor || "") + "keyframes " + node.name, node.position) + this.emit(" {\n" + this.indent(1)) + this.mapVisit(node.keyframes, "\n") + this.emit(this.indent(-1) + "}");
|
||||
}
|
||||
/**
|
||||
* Visit keyframe node.
|
||||
*/ keyframe(node) {
|
||||
const decls = node.declarations;
|
||||
if (this.compress) return this.emit(node.values.join(","), node.position) + this.emit("{") + this.mapVisit(decls) + this.emit("}");
|
||||
return this.emit(this.indent()) + this.emit(node.values.join(", "), node.position) + this.emit(" {\n" + this.indent(1)) + this.mapVisit(decls, "\n") + this.emit(this.indent(-1) + "\n" + this.indent() + "}\n");
|
||||
}
|
||||
/**
|
||||
* Visit page node.
|
||||
*/ page(node) {
|
||||
if (this.compress) {
|
||||
const sel = node.selectors.length ? node.selectors.join(", ") : "";
|
||||
return this.emit("@page " + sel, node.position) + this.emit("{") + this.mapVisit(node.declarations) + this.emit("}");
|
||||
}
|
||||
const sel = node.selectors.length ? node.selectors.join(", ") + " " : "";
|
||||
return this.emit("@page " + sel, node.position) + this.emit("{\n") + this.emit(this.indent(1)) + this.mapVisit(node.declarations, "\n") + this.emit(this.indent(-1)) + this.emit("\n}");
|
||||
}
|
||||
/**
|
||||
* Visit font-face node.
|
||||
*/ fontFace(node) {
|
||||
if (this.compress) return this.emit("@font-face", node.position) + this.emit("{") + this.mapVisit(node.declarations) + this.emit("}");
|
||||
return this.emit("@font-face ", node.position) + this.emit("{\n") + this.emit(this.indent(1)) + this.mapVisit(node.declarations, "\n") + this.emit(this.indent(-1)) + this.emit("\n}");
|
||||
}
|
||||
/**
|
||||
* Visit host node.
|
||||
*/ host(node) {
|
||||
if (this.compress) return this.emit("@host", node.position) + this.emit("{") + this.mapVisit(node.rules) + this.emit("}");
|
||||
return this.emit("@host", node.position) + this.emit(" {\n" + this.indent(1)) + this.mapVisit(node.rules, "\n\n") + this.emit(this.indent(-1) + "\n}");
|
||||
}
|
||||
/**
|
||||
* Visit custom-media node.
|
||||
*/ customMedia(node) {
|
||||
return this.emit("@custom-media " + node.name + " " + node.media + ";", node.position);
|
||||
}
|
||||
/**
|
||||
* Visit rule node.
|
||||
*/ rule(node) {
|
||||
const decls = node.declarations;
|
||||
if (!decls.length) return "";
|
||||
if (this.compress) return this.emit(node.selectors.join(","), node.position) + this.emit("{") + this.mapVisit(decls) + this.emit("}");
|
||||
const indent = this.indent();
|
||||
return this.emit(node.selectors.map((s)=>{
|
||||
return indent + s;
|
||||
}).join(",\n"), node.position) + this.emit(" {\n") + this.emit(this.indent(1)) + this.mapVisit(decls, "\n") + this.emit(this.indent(-1)) + this.emit("\n" + this.indent() + "}");
|
||||
}
|
||||
/**
|
||||
* Visit declaration node.
|
||||
*/ declaration(node) {
|
||||
if (this.compress) return this.emit(node.property + ":" + node.value, node.position) + this.emit(";");
|
||||
return this.emit(this.indent()) + this.emit(node.property + ": " + node.value, node.position) + this.emit(";");
|
||||
}
|
||||
}
|
||||
var $de9540138ed1fd01$export$2e2bcd8739ae039 = $de9540138ed1fd01$var$Compiler;
|
||||
|
||||
|
||||
var $fdf773ab87e20450$export$2e2bcd8739ae039 = (node, options)=>{
|
||||
const compiler = new (0, $de9540138ed1fd01$export$2e2bcd8739ae039)(options || {});
|
||||
return compiler.compile(node);
|
||||
};
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
const $149c1bd638913645$export$98e6a39c04603d36 = (0, $d708735ed1303b43$export$2e2bcd8739ae039);
|
||||
const $149c1bd638913645$export$fac44ee5b035f737 = (0, $fdf773ab87e20450$export$2e2bcd8739ae039);
|
||||
var $149c1bd638913645$export$2e2bcd8739ae039 = {
|
||||
parse: $149c1bd638913645$export$98e6a39c04603d36,
|
||||
stringify: $149c1bd638913645$export$fac44ee5b035f737
|
||||
};
|
||||
|
||||
|
||||
export {$149c1bd638913645$export$98e6a39c04603d36 as parse, $149c1bd638913645$export$fac44ee5b035f737 as stringify, $149c1bd638913645$export$2e2bcd8739ae039 as default, $b2e137848b48cf4f$export$9be5dd6e61d5d73a as CssTypes};
|
||||
//# sourceMappingURL=index.mjs.map
|
@@ -69,6 +69,27 @@ EventEmitter.prototype.emit = async function (event) {
|
||||
}
|
||||
};
|
||||
|
||||
EventEmitter.prototype.emitAndWait = function (event) {
|
||||
console.debug('Event emitted: ' + event);
|
||||
|
||||
var i, listeners, length, args = [].slice.call(arguments, 1);
|
||||
|
||||
if (typeof this.events[event] === 'object') {
|
||||
listeners = this.events[event].slice();
|
||||
length = listeners.length;
|
||||
|
||||
for (i = 0; i < length; i++) {
|
||||
try {
|
||||
listeners[i].apply(this, args);
|
||||
}
|
||||
catch (err) {
|
||||
console.error(err);
|
||||
console.trace('Error in event listener');
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
EventEmitter.prototype.once = function (event, listener) {
|
||||
this.on(event, function g () {
|
||||
this.removeListener(event, g);
|
||||
|
1548
public/script.js
1548
public/script.js
File diff suppressed because it is too large
Load Diff
@@ -4,7 +4,6 @@ import {
|
||||
online_status,
|
||||
main_api,
|
||||
api_server,
|
||||
api_server_textgenerationwebui,
|
||||
is_send_press,
|
||||
max_context,
|
||||
saveSettingsDebounced,
|
||||
@@ -18,6 +17,7 @@ import {
|
||||
eventSource,
|
||||
menu_type,
|
||||
substituteParams,
|
||||
callPopup,
|
||||
} from '../script.js';
|
||||
|
||||
import {
|
||||
@@ -34,7 +34,7 @@ import {
|
||||
import { debounce, delay, getStringHash, isValidUrl } from './utils.js';
|
||||
import { chat_completion_sources, oai_settings } from './openai.js';
|
||||
import { getTokenCount } from './tokenizers.js';
|
||||
import { textgen_types, textgenerationwebui_settings as textgen_settings } from './textgen-settings.js';
|
||||
import { textgen_types, textgenerationwebui_settings as textgen_settings, getTextGenServer } from './textgen-settings.js';
|
||||
|
||||
import Bowser from '../lib/bowser.min.js';
|
||||
|
||||
@@ -381,10 +381,12 @@ function RA_autoconnect(PrevApi) {
|
||||
}
|
||||
break;
|
||||
case 'textgenerationwebui':
|
||||
if (textgen_settings.type === textgen_types.MANCER && secret_state[SECRET_KEYS.MANCER]) {
|
||||
if ((textgen_settings.type === textgen_types.MANCER && secret_state[SECRET_KEYS.MANCER]) ||
|
||||
(textgen_settings.type === textgen_types.TOGETHERAI && secret_state[SECRET_KEYS.TOGETHERAI])
|
||||
) {
|
||||
$('#api_button_textgenerationwebui').trigger('click');
|
||||
}
|
||||
else if (api_server_textgenerationwebui && isValidUrl(api_server_textgenerationwebui)) {
|
||||
else if (isValidUrl(getTextGenServer())) {
|
||||
$('#api_button_textgenerationwebui').trigger('click');
|
||||
}
|
||||
break;
|
||||
@@ -397,6 +399,7 @@ function RA_autoconnect(PrevApi) {
|
||||
|| (secret_state[SECRET_KEYS.AI21] && oai_settings.chat_completion_source == chat_completion_sources.AI21)
|
||||
|| (secret_state[SECRET_KEYS.MAKERSUITE] && oai_settings.chat_completion_source == chat_completion_sources.MAKERSUITE)
|
||||
|| (secret_state[SECRET_KEYS.MISTRALAI] && oai_settings.chat_completion_source == chat_completion_sources.MISTRALAI)
|
||||
|| (isValidUrl(oai_settings.custom_url) && oai_settings.chat_completion_source == chat_completion_sources.CUSTOM)
|
||||
) {
|
||||
$('#api_button_openai').trigger('click');
|
||||
}
|
||||
@@ -995,9 +998,31 @@ export function initRossMods() {
|
||||
console.debug('Accepting edits with Ctrl+Enter');
|
||||
editMesDone.trigger('click');
|
||||
} else if (is_send_press == false) {
|
||||
console.debug('Regenerating with Ctrl+Enter');
|
||||
$('#option_regenerate').click();
|
||||
$('#options').hide();
|
||||
const skipConfirmKey = 'RegenerateWithCtrlEnter';
|
||||
const skipConfirm = LoadLocalBool(skipConfirmKey);
|
||||
function doRegenerate() {
|
||||
console.debug('Regenerating with Ctrl+Enter');
|
||||
$('#option_regenerate').trigger('click');
|
||||
$('#options').hide();
|
||||
}
|
||||
if (skipConfirm) {
|
||||
doRegenerate();
|
||||
} else {
|
||||
const popupText = `
|
||||
<div class="marginBot10">Are you sure you want to regenerate the latest message?</div>
|
||||
<label class="checkbox_label justifyCenter" for="regenerateWithCtrlEnter">
|
||||
<input type="checkbox" id="regenerateWithCtrlEnter">
|
||||
Don't ask again
|
||||
</label>`;
|
||||
callPopup(popupText, 'confirm').then(result =>{
|
||||
if (!result) {
|
||||
return;
|
||||
}
|
||||
const regenerateWithCtrlEnter = $('#regenerateWithCtrlEnter').prop('checked');
|
||||
SaveLocal(skipConfirmKey, regenerateWithCtrlEnter);
|
||||
doRegenerate();
|
||||
});
|
||||
}
|
||||
} else {
|
||||
console.debug('Ctrl+Enter ignored');
|
||||
}
|
||||
|
@@ -237,6 +237,12 @@ async function convertSoloToGroupChat() {
|
||||
return;
|
||||
}
|
||||
|
||||
const confirm = await callPopup('Are you sure you want to convert this chat to a group chat?', 'confirm');
|
||||
|
||||
if (!confirm) {
|
||||
return;
|
||||
}
|
||||
|
||||
const character = characters[this_chid];
|
||||
|
||||
// Populate group required fields
|
||||
|
@@ -1,5 +1,6 @@
|
||||
// Move chat functions here from script.js (eventually)
|
||||
|
||||
import css from '../lib/css-parser.mjs';
|
||||
import {
|
||||
addCopyToCodeBlocks,
|
||||
appendMediaToMessage,
|
||||
@@ -360,6 +361,61 @@ export async function appendFileContent(message, messageText) {
|
||||
return messageText;
|
||||
}
|
||||
|
||||
/**
|
||||
* Replaces style tags in the message text with custom tags with encoded content.
|
||||
* @param {string} text
|
||||
* @returns {string} Encoded message text
|
||||
* @copyright https://github.com/kwaroran/risuAI
|
||||
*/
|
||||
export function encodeStyleTags(text) {
|
||||
const styleRegex = /<style>(.+?)<\/style>/gms;
|
||||
return text.replaceAll(styleRegex, (_, match) => {
|
||||
return `<custom-style>${escape(match)}</custom-style>`;
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Sanitizes custom style tags in the message text to prevent DOM pollution.
|
||||
* @param {string} text Message text
|
||||
* @returns {string} Sanitized message text
|
||||
* @copyright https://github.com/kwaroran/risuAI
|
||||
*/
|
||||
export function decodeStyleTags(text) {
|
||||
const styleDecodeRegex = /<custom-style>(.+?)<\/custom-style>/gms;
|
||||
|
||||
return text.replaceAll(styleDecodeRegex, (_, style) => {
|
||||
try {
|
||||
const ast = css.parse(unescape(style));
|
||||
const rules = ast?.stylesheet?.rules;
|
||||
if (rules) {
|
||||
for (const rule of rules) {
|
||||
|
||||
if (rule.type === 'rule') {
|
||||
if (rule.selectors) {
|
||||
for (let i = 0; i < rule.selectors.length; i++) {
|
||||
let selector = rule.selectors[i];
|
||||
if (selector) {
|
||||
let selectors = (selector.split(' ') ?? []).map((v) => {
|
||||
if (v.startsWith('.')) {
|
||||
return '.custom-' + v.substring(1);
|
||||
}
|
||||
return v;
|
||||
}).join(' ');
|
||||
|
||||
rule.selectors[i] = '.mes_text ' + selectors;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return `<style>${css.stringify(ast)}</style>`;
|
||||
} catch (error) {
|
||||
return `CSS ERROR: ${error}`;
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
jQuery(function () {
|
||||
$(document).on('click', '.mes_hide', async function () {
|
||||
const messageBlock = $(this).closest('.mes');
|
||||
|
@@ -47,8 +47,6 @@ export function saveMetadataDebounced() {
|
||||
}, 1000);
|
||||
}
|
||||
|
||||
export const extensionsHandlebars = Handlebars.create();
|
||||
|
||||
/**
|
||||
* Provides an ability for extensions to render HTML templates.
|
||||
* Templates sanitation and localization is forced.
|
||||
@@ -61,40 +59,6 @@ export function renderExtensionTemplate(extensionName, templateId, templateData
|
||||
return renderTemplate(`scripts/extensions/${extensionName}/${templateId}.html`, templateData, sanitize, localize, true);
|
||||
}
|
||||
|
||||
/**
|
||||
* Registers a Handlebars helper for use in extensions.
|
||||
* @param {string} name Handlebars helper name
|
||||
* @param {function} helper Handlebars helper function
|
||||
*/
|
||||
export function registerExtensionHelper(name, helper) {
|
||||
extensionsHandlebars.registerHelper(name, helper);
|
||||
}
|
||||
|
||||
/**
|
||||
* Applies handlebars extension helpers to a message.
|
||||
* @param {number} messageId Message index in the chat.
|
||||
*/
|
||||
export function processExtensionHelpers(messageId) {
|
||||
const context = getContext();
|
||||
const message = context.chat[messageId];
|
||||
|
||||
if (!message?.mes || typeof message.mes !== 'string') {
|
||||
return;
|
||||
}
|
||||
|
||||
// Don't waste time if there are no mustaches
|
||||
if (!substituteParams(message.mes).includes('{{')) {
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
const template = extensionsHandlebars.compile(substituteParams(message.mes), { noEscape: true });
|
||||
message.mes = template({});
|
||||
} catch {
|
||||
// Ignore
|
||||
}
|
||||
}
|
||||
|
||||
// Disables parallel updates
|
||||
class ModuleWorkerWrapper {
|
||||
constructor(callback) {
|
||||
|
@@ -4,6 +4,7 @@ import { callPopup, getRequestHeaders, saveSettingsDebounced, substituteParams }
|
||||
import { getMessageTimeStamp } from '../../RossAscends-mods.js';
|
||||
import { SECRET_KEYS, secret_state } from '../../secrets.js';
|
||||
import { getMultimodalCaption } from '../shared.js';
|
||||
import { textgen_types, textgenerationwebui_settings } from '../../textgen-settings.js';
|
||||
export { MODULE_NAME };
|
||||
|
||||
const MODULE_NAME = 'caption';
|
||||
@@ -134,7 +135,7 @@ async function doCaptionRequest(base64Img, fileData) {
|
||||
case 'horde':
|
||||
return await captionHorde(base64Img);
|
||||
case 'multimodal':
|
||||
return await captionMultimodal(extension_settings.caption.multimodal_api === 'google' ? base64Img : fileData);
|
||||
return await captionMultimodal(fileData);
|
||||
default:
|
||||
throw new Error('Unknown caption source.');
|
||||
}
|
||||
@@ -216,7 +217,16 @@ async function captionHorde(base64Img) {
|
||||
* @returns {Promise<{caption: string}>} Generated caption
|
||||
*/
|
||||
async function captionMultimodal(base64Img) {
|
||||
const prompt = extension_settings.caption.prompt || PROMPT_DEFAULT;
|
||||
let prompt = extension_settings.caption.prompt || PROMPT_DEFAULT;
|
||||
|
||||
if (extension_settings.caption.prompt_ask) {
|
||||
const customPrompt = await callPopup('<h3>Enter a comment or question:</h3>', 'input', prompt, { rows: 2 });
|
||||
if (!customPrompt) {
|
||||
throw new Error('User aborted the caption sending.');
|
||||
}
|
||||
prompt = String(customPrompt).trim();
|
||||
}
|
||||
|
||||
const caption = await getMultimodalCaption(base64Img, prompt);
|
||||
return { caption };
|
||||
}
|
||||
@@ -271,9 +281,12 @@ jQuery(function () {
|
||||
$(sendButton).on('click', () => {
|
||||
const hasCaptionModule =
|
||||
(modules.includes('caption') && extension_settings.caption.source === 'extras') ||
|
||||
(extension_settings.caption.source === 'multimodal' && extension_settings.caption.multimodal_api === 'openai' && secret_state[SECRET_KEYS.OPENAI]) ||
|
||||
(extension_settings.caption.source === 'multimodal' && extension_settings.caption.multimodal_api === 'openai' && (secret_state[SECRET_KEYS.OPENAI] || extension_settings.caption.allow_reverse_proxy)) ||
|
||||
(extension_settings.caption.source === 'multimodal' && extension_settings.caption.multimodal_api === 'openrouter' && secret_state[SECRET_KEYS.OPENROUTER]) ||
|
||||
(extension_settings.caption.source === 'multimodal' && extension_settings.caption.multimodal_api === 'google' && secret_state[SECRET_KEYS.MAKERSUITE]) ||
|
||||
(extension_settings.caption.source === 'multimodal' && extension_settings.caption.multimodal_api === 'ollama' && textgenerationwebui_settings.server_urls[textgen_types.OLLAMA]) ||
|
||||
(extension_settings.caption.source === 'multimodal' && extension_settings.caption.multimodal_api === 'llamacpp' && textgenerationwebui_settings.server_urls[textgen_types.LLAMACPP]) ||
|
||||
(extension_settings.caption.source === 'multimodal' && extension_settings.caption.multimodal_api === 'custom') ||
|
||||
extension_settings.caption.source === 'local' ||
|
||||
extension_settings.caption.source === 'horde';
|
||||
|
||||
@@ -300,7 +313,7 @@ jQuery(function () {
|
||||
$('#caption_prompt_block').toggle(isMultimodal);
|
||||
$('#caption_multimodal_api').val(extension_settings.caption.multimodal_api);
|
||||
$('#caption_multimodal_model').val(extension_settings.caption.multimodal_model);
|
||||
$('#caption_multimodal_model option').each(function () {
|
||||
$('#caption_multimodal_block [data-type]').each(function () {
|
||||
const type = $(this).data('type');
|
||||
$(this).toggle(type === extension_settings.caption.multimodal_api);
|
||||
});
|
||||
@@ -329,7 +342,7 @@ jQuery(function () {
|
||||
<label for="caption_source">Source</label>
|
||||
<select id="caption_source" class="text_pole">
|
||||
<option value="local">Local</option>
|
||||
<option value="multimodal">Multimodal (OpenAI / OpenRouter / Google)</option>
|
||||
<option value="multimodal">Multimodal (OpenAI / llama / Google)</option>
|
||||
<option value="extras">Extras</option>
|
||||
<option value="horde">Horde</option>
|
||||
</select>
|
||||
@@ -337,9 +350,12 @@ jQuery(function () {
|
||||
<div class="flex1 flex-container flexFlowColumn flexNoGap">
|
||||
<label for="caption_multimodal_api">API</label>
|
||||
<select id="caption_multimodal_api" class="flex1 text_pole">
|
||||
<option value="llamacpp">llama.cpp</option>
|
||||
<option value="ollama">Ollama</option>
|
||||
<option value="openai">OpenAI</option>
|
||||
<option value="openrouter">OpenRouter</option>
|
||||
<option value="google">Google</option>
|
||||
<option value="google">Google MakerSuite</option>
|
||||
<option value="custom">Custom (OpenAI-compatible)</option>
|
||||
</select>
|
||||
</div>
|
||||
<div class="flex1 flex-container flexFlowColumn flexNoGap">
|
||||
@@ -349,12 +365,28 @@ jQuery(function () {
|
||||
<option data-type="google" value="gemini-pro-vision">gemini-pro-vision</option>
|
||||
<option data-type="openrouter" value="openai/gpt-4-vision-preview">openai/gpt-4-vision-preview</option>
|
||||
<option data-type="openrouter" value="haotian-liu/llava-13b">haotian-liu/llava-13b</option>
|
||||
<option data-type="ollama" value="ollama_current">[Currently selected]</option>
|
||||
<option data-type="ollama" value="bakllava:latest">bakllava:latest</option>
|
||||
<option data-type="ollama" value="llava:latest">llava:latest</option>
|
||||
<option data-type="llamacpp" value="llamacpp_current">[Currently loaded]</option>
|
||||
<option data-type="custom" value="custom_current">[Currently selected]</option>
|
||||
</select>
|
||||
</div>
|
||||
<label data-type="openai" class="checkbox_label flexBasis100p" for="caption_allow_reverse_proxy" title="Allow using reverse proxy if defined and valid.">
|
||||
<input id="caption_allow_reverse_proxy" type="checkbox" class="checkbox">
|
||||
Allow reverse proxy
|
||||
</label>
|
||||
<div class="flexBasis100p m-b-1">
|
||||
<small><b>Hint:</b> Set your API keys and endpoints in the 'API Connections' tab first.</small>
|
||||
</div>
|
||||
</div>
|
||||
<div id="caption_prompt_block">
|
||||
<label for="caption_prompt">Caption Prompt</label>
|
||||
<textarea id="caption_prompt" class="text_pole" rows="1" placeholder="< Use default >">${PROMPT_DEFAULT}</textarea>
|
||||
<label class="checkbox_label margin-bot-10px" for="caption_prompt_ask" title="Ask for a custom prompt every time an image is captioned.">
|
||||
<input id="caption_prompt_ask" type="checkbox" class="checkbox">
|
||||
Ask every time
|
||||
</label>
|
||||
</div>
|
||||
<label for="caption_template">Message Template <small>(use <code>{{caption}}</code> macro)</small></label>
|
||||
<textarea id="caption_template" class="text_pole" rows="2" placeholder="< Use default >">${TEMPLATE_DEFAULT}</textarea>
|
||||
@@ -377,6 +409,8 @@ jQuery(function () {
|
||||
switchMultimodalBlocks();
|
||||
|
||||
$('#caption_refine_mode').prop('checked', !!(extension_settings.caption.refine_mode));
|
||||
$('#caption_allow_reverse_proxy').prop('checked', !!(extension_settings.caption.allow_reverse_proxy));
|
||||
$('#caption_prompt_ask').prop('checked', !!(extension_settings.caption.prompt_ask));
|
||||
$('#caption_source').val(extension_settings.caption.source);
|
||||
$('#caption_prompt').val(extension_settings.caption.prompt);
|
||||
$('#caption_template').val(extension_settings.caption.template);
|
||||
@@ -394,4 +428,12 @@ jQuery(function () {
|
||||
extension_settings.caption.template = String($('#caption_template').val());
|
||||
saveSettingsDebounced();
|
||||
});
|
||||
$('#caption_allow_reverse_proxy').on('input', () => {
|
||||
extension_settings.caption.allow_reverse_proxy = $('#caption_allow_reverse_proxy').prop('checked');
|
||||
saveSettingsDebounced();
|
||||
});
|
||||
$('#caption_prompt_ask').on('input', () => {
|
||||
extension_settings.caption.prompt_ask = $('#caption_prompt_ask').prop('checked');
|
||||
saveSettingsDebounced();
|
||||
});
|
||||
});
|
||||
|
@@ -992,8 +992,7 @@ async function getExpressionsList() {
|
||||
}
|
||||
|
||||
const result = await resolveExpressionsList();
|
||||
result.push(...extension_settings.expressions.custom);
|
||||
return result;
|
||||
return [...result, ...extension_settings.expressions.custom];
|
||||
}
|
||||
|
||||
async function setExpression(character, expression, force) {
|
||||
|
@@ -5,6 +5,7 @@ import { is_group_generating, selected_group } from '../../group-chats.js';
|
||||
import { registerSlashCommand } from '../../slash-commands.js';
|
||||
import { loadMovingUIState } from '../../power-user.js';
|
||||
import { dragElement } from '../../RossAscends-mods.js';
|
||||
import { getTextTokens, tokenizers } from '../../tokenizers.js';
|
||||
export { MODULE_NAME };
|
||||
|
||||
const MODULE_NAME = '1_memory';
|
||||
@@ -42,26 +43,6 @@ const defaultPrompt = '[Pause your roleplay. Summarize the most important facts
|
||||
const defaultTemplate = '[Summary: {{summary}}]';
|
||||
|
||||
const defaultSettings = {
|
||||
minLongMemory: 16,
|
||||
maxLongMemory: 1024,
|
||||
longMemoryLength: 128,
|
||||
shortMemoryLength: 512,
|
||||
minShortMemory: 128,
|
||||
maxShortMemory: 1024,
|
||||
shortMemoryStep: 16,
|
||||
longMemoryStep: 8,
|
||||
repetitionPenaltyStep: 0.05,
|
||||
repetitionPenalty: 1.2,
|
||||
maxRepetitionPenalty: 2.0,
|
||||
minRepetitionPenalty: 1.0,
|
||||
temperature: 1.0,
|
||||
minTemperature: 0.1,
|
||||
maxTemperature: 2.0,
|
||||
temperatureStep: 0.05,
|
||||
lengthPenalty: 1,
|
||||
minLengthPenalty: -4,
|
||||
maxLengthPenalty: 4,
|
||||
lengthPenaltyStep: 0.1,
|
||||
memoryFrozen: false,
|
||||
SkipWIAN: false,
|
||||
source: summary_sources.extras,
|
||||
@@ -95,11 +76,6 @@ function loadSettings() {
|
||||
}
|
||||
|
||||
$('#summary_source').val(extension_settings.memory.source).trigger('change');
|
||||
$('#memory_long_length').val(extension_settings.memory.longMemoryLength).trigger('input');
|
||||
$('#memory_short_length').val(extension_settings.memory.shortMemoryLength).trigger('input');
|
||||
$('#memory_repetition_penalty').val(extension_settings.memory.repetitionPenalty).trigger('input');
|
||||
$('#memory_temperature').val(extension_settings.memory.temperature).trigger('input');
|
||||
$('#memory_length_penalty').val(extension_settings.memory.lengthPenalty).trigger('input');
|
||||
$('#memory_frozen').prop('checked', extension_settings.memory.memoryFrozen).trigger('input');
|
||||
$('#memory_skipWIAN').prop('checked', extension_settings.memory.SkipWIAN).trigger('input');
|
||||
$('#memory_prompt').val(extension_settings.memory.prompt).trigger('input');
|
||||
@@ -126,51 +102,6 @@ function switchSourceControls(value) {
|
||||
});
|
||||
}
|
||||
|
||||
function onMemoryShortInput() {
|
||||
const value = $(this).val();
|
||||
extension_settings.memory.shortMemoryLength = Number(value);
|
||||
$('#memory_short_length_tokens').text(value);
|
||||
saveSettingsDebounced();
|
||||
|
||||
// Don't let long buffer be bigger than short
|
||||
if (extension_settings.memory.longMemoryLength > extension_settings.memory.shortMemoryLength) {
|
||||
$('#memory_long_length').val(extension_settings.memory.shortMemoryLength).trigger('input');
|
||||
}
|
||||
}
|
||||
|
||||
function onMemoryLongInput() {
|
||||
const value = $(this).val();
|
||||
extension_settings.memory.longMemoryLength = Number(value);
|
||||
$('#memory_long_length_tokens').text(value);
|
||||
saveSettingsDebounced();
|
||||
|
||||
// Don't let long buffer be bigger than short
|
||||
if (extension_settings.memory.longMemoryLength > extension_settings.memory.shortMemoryLength) {
|
||||
$('#memory_short_length').val(extension_settings.memory.longMemoryLength).trigger('input');
|
||||
}
|
||||
}
|
||||
|
||||
function onMemoryRepetitionPenaltyInput() {
|
||||
const value = $(this).val();
|
||||
extension_settings.memory.repetitionPenalty = Number(value);
|
||||
$('#memory_repetition_penalty_value').text(extension_settings.memory.repetitionPenalty.toFixed(2));
|
||||
saveSettingsDebounced();
|
||||
}
|
||||
|
||||
function onMemoryTemperatureInput() {
|
||||
const value = $(this).val();
|
||||
extension_settings.memory.temperature = Number(value);
|
||||
$('#memory_temperature_value').text(extension_settings.memory.temperature.toFixed(2));
|
||||
saveSettingsDebounced();
|
||||
}
|
||||
|
||||
function onMemoryLengthPenaltyInput() {
|
||||
const value = $(this).val();
|
||||
extension_settings.memory.lengthPenalty = Number(value);
|
||||
$('#memory_length_penalty_value').text(extension_settings.memory.lengthPenalty.toFixed(2));
|
||||
saveSettingsDebounced();
|
||||
}
|
||||
|
||||
function onMemoryFrozenInput() {
|
||||
const value = Boolean($(this).prop('checked'));
|
||||
extension_settings.memory.memoryFrozen = value;
|
||||
@@ -444,33 +375,36 @@ async function summarizeChatExtras(context) {
|
||||
const longMemory = getLatestMemoryFromChat(chat);
|
||||
const reversedChat = chat.slice().reverse();
|
||||
reversedChat.shift();
|
||||
let memoryBuffer = [];
|
||||
const memoryBuffer = [];
|
||||
const CONTEXT_SIZE = 1024 - 64;
|
||||
|
||||
for (let mes of reversedChat) {
|
||||
for (const message of reversedChat) {
|
||||
// we reached the point of latest memory
|
||||
if (longMemory && mes.extra && mes.extra.memory == longMemory) {
|
||||
if (longMemory && message.extra && message.extra.memory == longMemory) {
|
||||
break;
|
||||
}
|
||||
|
||||
// don't care about system
|
||||
if (mes.is_system) {
|
||||
if (message.is_system) {
|
||||
continue;
|
||||
}
|
||||
|
||||
// determine the sender's name
|
||||
const name = mes.is_user ? (context.name1 ?? 'You') : (mes.force_avatar ? mes.name : context.name2);
|
||||
const entry = `${name}:\n${mes['mes']}`;
|
||||
const entry = `${message.name}:\n${message.mes}`;
|
||||
memoryBuffer.push(entry);
|
||||
|
||||
// check if token limit was reached
|
||||
if (context.getTokenCount(getMemoryString()) >= extension_settings.memory.shortMemoryLength) {
|
||||
const tokens = getTextTokens(tokenizers.GPT2, getMemoryString()).length;
|
||||
if (tokens >= CONTEXT_SIZE) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
const resultingString = getMemoryString();
|
||||
const resultingTokens = getTextTokens(tokenizers.GPT2, resultingString).length;
|
||||
|
||||
if (context.getTokenCount(resultingString) < extension_settings.memory.shortMemoryLength) {
|
||||
if (!resultingString || resultingTokens < CONTEXT_SIZE) {
|
||||
console.debug('Not enough context to summarize');
|
||||
return;
|
||||
}
|
||||
|
||||
@@ -488,13 +422,7 @@ async function summarizeChatExtras(context) {
|
||||
},
|
||||
body: JSON.stringify({
|
||||
text: resultingString,
|
||||
params: {
|
||||
min_length: extension_settings.memory.longMemoryLength * 0, // testing how it behaves 0 min length
|
||||
max_length: extension_settings.memory.longMemoryLength,
|
||||
repetition_penalty: extension_settings.memory.repetitionPenalty,
|
||||
temperature: extension_settings.memory.temperature,
|
||||
length_penalty: extension_settings.memory.lengthPenalty,
|
||||
},
|
||||
params: {},
|
||||
}),
|
||||
});
|
||||
|
||||
@@ -623,11 +551,6 @@ function setupListeners() {
|
||||
//setup shared listeners for popout and regular ext menu
|
||||
$('#memory_restore').off('click').on('click', onMemoryRestoreClick);
|
||||
$('#memory_contents').off('click').on('input', onMemoryContentInput);
|
||||
$('#memory_long_length').off('click').on('input', onMemoryLongInput);
|
||||
$('#memory_short_length').off('click').on('input', onMemoryShortInput);
|
||||
$('#memory_repetition_penalty').off('click').on('input', onMemoryRepetitionPenaltyInput);
|
||||
$('#memory_temperature').off('click').on('input', onMemoryTemperatureInput);
|
||||
$('#memory_length_penalty').off('click').on('input', onMemoryLengthPenaltyInput);
|
||||
$('#memory_frozen').off('click').on('input', onMemoryFrozenInput);
|
||||
$('#memory_skipWIAN').off('click').on('input', onMemorySkipWIANInput);
|
||||
$('#summary_source').off('click').on('change', onSummarySourceChange);
|
||||
@@ -720,18 +643,6 @@ jQuery(function () {
|
||||
<input id="memory_prompt_words_force" type="range" value="${defaultSettings.promptForceWords}" min="${defaultSettings.promptMinForceWords}" max="${defaultSettings.promptMaxForceWords}" step="${defaultSettings.promptForceWordsStep}" />
|
||||
<small>If both sliders are non-zero, then both will trigger summary updates a their respective intervals.</small>
|
||||
</div>
|
||||
<div data-source="extras">
|
||||
<label for="memory_short_length">Chat to Summarize buffer length (<span id="memory_short_length_tokens"></span> tokens)</label>
|
||||
<input id="memory_short_length" type="range" value="${defaultSettings.shortMemoryLength}" min="${defaultSettings.minShortMemory}" max="${defaultSettings.maxShortMemory}" step="${defaultSettings.shortMemoryStep}" />
|
||||
<label for="memory_long_length">Summary output length (<span id="memory_long_length_tokens"></span> tokens)</label>
|
||||
<input id="memory_long_length" type="range" value="${defaultSettings.longMemoryLength}" min="${defaultSettings.minLongMemory}" max="${defaultSettings.maxLongMemory}" step="${defaultSettings.longMemoryStep}" />
|
||||
<label for="memory_temperature">Temperature (<span id="memory_temperature_value"></span>)</label>
|
||||
<input id="memory_temperature" type="range" value="${defaultSettings.temperature}" min="${defaultSettings.minTemperature}" max="${defaultSettings.maxTemperature}" step="${defaultSettings.temperatureStep}" />
|
||||
<label for="memory_repetition_penalty">Repetition penalty (<span id="memory_repetition_penalty_value"></span>)</label>
|
||||
<input id="memory_repetition_penalty" type="range" value="${defaultSettings.repetitionPenalty}" min="${defaultSettings.minRepetitionPenalty}" max="${defaultSettings.maxRepetitionPenalty}" step="${defaultSettings.repetitionPenaltyStep}" />
|
||||
<label for="memory_length_penalty">Length preference <small>[higher = longer summaries]</small> (<span id="memory_length_penalty_value"></span>)</label>
|
||||
<input id="memory_length_penalty" type="range" value="${defaultSettings.lengthPenalty}" min="${defaultSettings.minLengthPenalty}" max="${defaultSettings.maxLengthPenalty}" step="${defaultSettings.lengthPenaltyStep}" />
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
@@ -798,6 +798,10 @@ async function qrDeleteCallback(args, label) {
|
||||
}
|
||||
|
||||
const idx = preset.quickReplySlots.findIndex(x => x.label == label);
|
||||
if (idx === -1) {
|
||||
toastr.warning('Confirm you are using proper case sensitivity!', `QR with label '${label}' not found`);
|
||||
return '';
|
||||
};
|
||||
preset.quickReplySlots.splice(idx, 1);
|
||||
preset.numberOfSlots--;
|
||||
await fetch('/savequickreply', {
|
||||
|
@@ -1,7 +1,9 @@
|
||||
import { getRequestHeaders } from '../../script.js';
|
||||
import { extension_settings } from '../extensions.js';
|
||||
import { oai_settings } from '../openai.js';
|
||||
import { SECRET_KEYS, secret_state } from '../secrets.js';
|
||||
import { createThumbnail } from '../utils.js';
|
||||
import { textgen_types, textgenerationwebui_settings } from '../textgen-settings.js';
|
||||
import { createThumbnail, isValidUrl } from '../utils.js';
|
||||
|
||||
/**
|
||||
* Generates a caption for an image using a multimodal model.
|
||||
@@ -10,6 +12,99 @@ import { createThumbnail } from '../utils.js';
|
||||
* @returns {Promise<string>} Generated caption
|
||||
*/
|
||||
export async function getMultimodalCaption(base64Img, prompt) {
|
||||
throwIfInvalidModel();
|
||||
|
||||
const noPrefix = ['google', 'ollama', 'llamacpp'].includes(extension_settings.caption.multimodal_api);
|
||||
|
||||
if (noPrefix && base64Img.startsWith('data:image/')) {
|
||||
base64Img = base64Img.split(',')[1];
|
||||
}
|
||||
|
||||
// OpenRouter has a payload limit of ~2MB. Google is 4MB, but we love democracy.
|
||||
const isGoogle = extension_settings.caption.multimodal_api === 'google';
|
||||
const isOllama = extension_settings.caption.multimodal_api === 'ollama';
|
||||
const isLlamaCpp = extension_settings.caption.multimodal_api === 'llamacpp';
|
||||
const isCustom = extension_settings.caption.multimodal_api === 'custom';
|
||||
const base64Bytes = base64Img.length * 0.75;
|
||||
const compressionLimit = 2 * 1024 * 1024;
|
||||
if (['google', 'openrouter'].includes(extension_settings.caption.multimodal_api) && base64Bytes > compressionLimit) {
|
||||
const maxSide = 1024;
|
||||
base64Img = await createThumbnail(base64Img, maxSide, maxSide, 'image/jpeg');
|
||||
|
||||
if (isGoogle) {
|
||||
base64Img = base64Img.split(',')[1];
|
||||
}
|
||||
}
|
||||
|
||||
const useReverseProxy =
|
||||
extension_settings.caption.multimodal_api === 'openai'
|
||||
&& extension_settings.caption.allow_reverse_proxy
|
||||
&& oai_settings.reverse_proxy
|
||||
&& isValidUrl(oai_settings.reverse_proxy);
|
||||
|
||||
const proxyUrl = useReverseProxy ? oai_settings.reverse_proxy : '';
|
||||
const proxyPassword = useReverseProxy ? oai_settings.proxy_password : '';
|
||||
|
||||
const requestBody = {
|
||||
image: base64Img,
|
||||
prompt: prompt,
|
||||
};
|
||||
|
||||
if (!isGoogle) {
|
||||
requestBody.api = extension_settings.caption.multimodal_api || 'openai';
|
||||
requestBody.model = extension_settings.caption.multimodal_model || 'gpt-4-vision-preview';
|
||||
requestBody.reverse_proxy = proxyUrl;
|
||||
requestBody.proxy_password = proxyPassword;
|
||||
}
|
||||
|
||||
if (isOllama) {
|
||||
if (extension_settings.caption.multimodal_model === 'ollama_current') {
|
||||
requestBody.model = textgenerationwebui_settings.ollama_model;
|
||||
}
|
||||
|
||||
requestBody.server_url = textgenerationwebui_settings.server_urls[textgen_types.OLLAMA];
|
||||
}
|
||||
|
||||
if (isLlamaCpp) {
|
||||
requestBody.server_url = textgenerationwebui_settings.server_urls[textgen_types.LLAMACPP];
|
||||
}
|
||||
|
||||
if (isCustom) {
|
||||
requestBody.server_url = oai_settings.custom_url;
|
||||
requestBody.model = oai_settings.custom_model || 'gpt-4-vision-preview';
|
||||
requestBody.custom_include_headers = oai_settings.custom_include_headers;
|
||||
requestBody.custom_include_body = oai_settings.custom_include_body;
|
||||
requestBody.custom_exclude_body = oai_settings.custom_exclude_body;
|
||||
}
|
||||
|
||||
function getEndpointUrl() {
|
||||
switch (extension_settings.caption.multimodal_api) {
|
||||
case 'google':
|
||||
return '/api/google/caption-image';
|
||||
case 'llamacpp':
|
||||
return '/api/backends/text-completions/llamacpp/caption-image';
|
||||
case 'ollama':
|
||||
return '/api/backends/text-completions/ollama/caption-image';
|
||||
default:
|
||||
return '/api/openai/caption-image';
|
||||
}
|
||||
}
|
||||
|
||||
const apiResult = await fetch(getEndpointUrl(), {
|
||||
method: 'POST',
|
||||
headers: getRequestHeaders(),
|
||||
body: JSON.stringify(requestBody),
|
||||
});
|
||||
|
||||
if (!apiResult.ok) {
|
||||
throw new Error('Failed to caption image via Multimodal API.');
|
||||
}
|
||||
|
||||
const { caption } = await apiResult.json();
|
||||
return String(caption).trim();
|
||||
}
|
||||
|
||||
function throwIfInvalidModel() {
|
||||
if (extension_settings.caption.multimodal_api === 'openai' && !secret_state[SECRET_KEYS.OPENAI]) {
|
||||
throw new Error('OpenAI API key is not set.');
|
||||
}
|
||||
@@ -22,38 +117,19 @@ export async function getMultimodalCaption(base64Img, prompt) {
|
||||
throw new Error('MakerSuite API key is not set.');
|
||||
}
|
||||
|
||||
// OpenRouter has a payload limit of ~2MB. Google is 4MB, but we love democracy.
|
||||
const isGoogle = extension_settings.caption.multimodal_api === 'google';
|
||||
const base64Bytes = base64Img.length * 0.75;
|
||||
const compressionLimit = 2 * 1024 * 1024;
|
||||
if (['google', 'openrouter'].includes(extension_settings.caption.multimodal_api) && base64Bytes > compressionLimit) {
|
||||
const maxSide = 1024;
|
||||
base64Img = await createThumbnail(base64Img, maxSide, maxSide, 'image/jpeg');
|
||||
|
||||
if (isGoogle) {
|
||||
base64Img = base64Img.split(',')[1];
|
||||
}
|
||||
if (extension_settings.caption.multimodal_api === 'ollama' && !textgenerationwebui_settings.server_urls[textgen_types.OLLAMA]) {
|
||||
throw new Error('Ollama server URL is not set.');
|
||||
}
|
||||
|
||||
const apiResult = await fetch(`/api/${isGoogle ? 'google' : 'openai'}/caption-image`, {
|
||||
method: 'POST',
|
||||
headers: getRequestHeaders(),
|
||||
body: JSON.stringify({
|
||||
image: base64Img,
|
||||
prompt: prompt,
|
||||
...(isGoogle
|
||||
? {}
|
||||
: {
|
||||
api: extension_settings.caption.multimodal_api || 'openai',
|
||||
model: extension_settings.caption.multimodal_model || 'gpt-4-vision-preview',
|
||||
}),
|
||||
}),
|
||||
});
|
||||
|
||||
if (!apiResult.ok) {
|
||||
throw new Error('Failed to caption image via OpenAI.');
|
||||
if (extension_settings.caption.multimodal_api === 'ollama' && extension_settings.caption.multimodal_model === 'ollama_current' && !textgenerationwebui_settings.ollama_model) {
|
||||
throw new Error('Ollama model is not set.');
|
||||
}
|
||||
|
||||
const { caption } = await apiResult.json();
|
||||
return caption;
|
||||
if (extension_settings.caption.multimodal_api === 'llamacpp' && !textgenerationwebui_settings.server_urls[textgen_types.LLAMACPP]) {
|
||||
throw new Error('LlamaCPP server URL is not set.');
|
||||
}
|
||||
|
||||
if (extension_settings.caption.multimodal_api === 'custom' && !oai_settings.custom_url) {
|
||||
throw new Error('Custom API URL is not set.');
|
||||
}
|
||||
}
|
||||
|
@@ -25,6 +25,12 @@
|
||||
<a href="javascript:;" class="notes-link"><span class="note-link-span" title="Will generate a new random seed in SillyTavern that is then used in the ComfyUI workflow.">?</span></a>
|
||||
</li>
|
||||
</ul>
|
||||
<div>Custom</div>
|
||||
<div class="sd_comfy_workflow_editor_placeholder_actions">
|
||||
<span id="sd_comfy_workflow_editor_placeholder_add" title="Add custom placeholder">+</span>
|
||||
</div>
|
||||
<ul class="sd_comfy_workflow_editor_placeholder_list" id="sd_comfy_workflow_editor_placeholder_list_custom">
|
||||
</ul>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
@@ -16,6 +16,7 @@ import {
|
||||
user_avatar,
|
||||
getCharacterAvatar,
|
||||
formatCharacterAvatar,
|
||||
substituteParams,
|
||||
} from '../../../script.js';
|
||||
import { getApiUrl, getContext, extension_settings, doExtrasFetch, modules, renderExtensionTemplate } from '../../extensions.js';
|
||||
import { selected_group } from '../../group-chats.js';
|
||||
@@ -24,6 +25,7 @@ import { getMessageTimeStamp, humanizedDateTime } from '../../RossAscends-mods.j
|
||||
import { SECRET_KEYS, secret_state } from '../../secrets.js';
|
||||
import { getNovelUnlimitedImageGeneration, getNovelAnlas, loadNovelSubscriptionData } from '../../nai-settings.js';
|
||||
import { getMultimodalCaption } from '../shared.js';
|
||||
import { registerSlashCommand } from '../../slash-commands.js';
|
||||
export { MODULE_NAME };
|
||||
|
||||
// Wraps a string into monospace font-face span
|
||||
@@ -44,6 +46,7 @@ const sources = {
|
||||
vlad: 'vlad',
|
||||
openai: 'openai',
|
||||
comfy: 'comfy',
|
||||
togetherai: 'togetherai',
|
||||
};
|
||||
|
||||
const generationMode = {
|
||||
@@ -830,6 +833,16 @@ function onComfyWorkflowChange() {
|
||||
extension_settings.sd.comfy_workflow = $('#sd_comfy_workflow').find(':selected').val();
|
||||
saveSettingsDebounced();
|
||||
}
|
||||
async function changeComfyWorkflow(_, name) {
|
||||
name = name.replace(/(\.json)?$/i, '.json');
|
||||
if ($(`#sd_comfy_workflow > [value="${name}"]`).length > 0) {
|
||||
extension_settings.sd.comfy_workflow = name;
|
||||
$('#sd_comfy_workflow').val(extension_settings.sd.comfy_workflow);
|
||||
saveSettingsDebounced();
|
||||
} else {
|
||||
toastr.error(`ComfyUI Workflow "${name}" does not exist.`);
|
||||
}
|
||||
}
|
||||
|
||||
async function validateAutoUrl() {
|
||||
try {
|
||||
@@ -905,7 +918,7 @@ async function onModelChange() {
|
||||
extension_settings.sd.model = $('#sd_model').find(':selected').val();
|
||||
saveSettingsDebounced();
|
||||
|
||||
const cloudSources = [sources.horde, sources.novel, sources.openai];
|
||||
const cloudSources = [sources.horde, sources.novel, sources.openai, sources.togetherai];
|
||||
|
||||
if (cloudSources.includes(extension_settings.sd.source)) {
|
||||
return;
|
||||
@@ -1038,11 +1051,14 @@ async function loadSamplers() {
|
||||
samplers = await loadVladSamplers();
|
||||
break;
|
||||
case sources.openai:
|
||||
samplers = await loadOpenAiSamplers();
|
||||
samplers = ['N/A'];
|
||||
break;
|
||||
case sources.comfy:
|
||||
samplers = await loadComfySamplers();
|
||||
break;
|
||||
case sources.togetherai:
|
||||
samplers = ['N/A'];
|
||||
break;
|
||||
}
|
||||
|
||||
for (const sampler of samplers) {
|
||||
@@ -1052,6 +1068,11 @@ async function loadSamplers() {
|
||||
option.selected = sampler === extension_settings.sd.sampler;
|
||||
$('#sd_sampler').append(option);
|
||||
}
|
||||
|
||||
if (!extension_settings.sd.sampler && samplers.length > 0) {
|
||||
extension_settings.sd.sampler = samplers[0];
|
||||
$('#sd_sampler').val(extension_settings.sd.sampler).trigger('change');
|
||||
}
|
||||
}
|
||||
|
||||
async function loadHordeSamplers() {
|
||||
@@ -1108,10 +1129,6 @@ async function loadAutoSamplers() {
|
||||
}
|
||||
}
|
||||
|
||||
async function loadOpenAiSamplers() {
|
||||
return ['N/A'];
|
||||
}
|
||||
|
||||
async function loadVladSamplers() {
|
||||
if (!extension_settings.sd.vlad_url) {
|
||||
return [];
|
||||
@@ -1200,6 +1217,9 @@ async function loadModels() {
|
||||
case sources.comfy:
|
||||
models = await loadComfyModels();
|
||||
break;
|
||||
case sources.togetherai:
|
||||
models = await loadTogetherAIModels();
|
||||
break;
|
||||
}
|
||||
|
||||
for (const model of models) {
|
||||
@@ -1209,6 +1229,30 @@ async function loadModels() {
|
||||
option.selected = model.value === extension_settings.sd.model;
|
||||
$('#sd_model').append(option);
|
||||
}
|
||||
|
||||
if (!extension_settings.sd.model && models.length > 0) {
|
||||
extension_settings.sd.model = models[0].value;
|
||||
$('#sd_model').val(extension_settings.sd.model).trigger('change');
|
||||
}
|
||||
}
|
||||
|
||||
async function loadTogetherAIModels() {
|
||||
if (!secret_state[SECRET_KEYS.TOGETHERAI]) {
|
||||
console.debug('TogetherAI API key is not set.');
|
||||
return [];
|
||||
}
|
||||
|
||||
const result = await fetch('/api/sd/together/models', {
|
||||
method: 'POST',
|
||||
headers: getRequestHeaders(),
|
||||
});
|
||||
|
||||
if (result.ok) {
|
||||
const data = await result.json();
|
||||
return data;
|
||||
}
|
||||
|
||||
return [];
|
||||
}
|
||||
|
||||
async function loadHordeModels() {
|
||||
@@ -1422,6 +1466,9 @@ async function loadSchedulers() {
|
||||
case sources.openai:
|
||||
schedulers = ['N/A'];
|
||||
break;
|
||||
case sources.togetherai:
|
||||
schedulers = ['N/A'];
|
||||
break;
|
||||
case sources.comfy:
|
||||
schedulers = await loadComfySchedulers();
|
||||
break;
|
||||
@@ -1481,6 +1528,9 @@ async function loadVaes() {
|
||||
case sources.openai:
|
||||
vaes = ['N/A'];
|
||||
break;
|
||||
case sources.togetherai:
|
||||
vaes = ['N/A'];
|
||||
break;
|
||||
case sources.comfy:
|
||||
vaes = await loadComfyVaes();
|
||||
break;
|
||||
@@ -1861,6 +1911,9 @@ async function sendGenerationRequest(generationType, prompt, characterName = nul
|
||||
case sources.comfy:
|
||||
result = await generateComfyImage(prefixedPrompt);
|
||||
break;
|
||||
case sources.togetherai:
|
||||
result = await generateTogetherAIImage(prefixedPrompt);
|
||||
break;
|
||||
}
|
||||
|
||||
if (!result.data) {
|
||||
@@ -1883,6 +1936,29 @@ async function sendGenerationRequest(generationType, prompt, characterName = nul
|
||||
callback ? callback(prompt, base64Image, generationType) : sendMessage(prompt, base64Image, generationType);
|
||||
}
|
||||
|
||||
async function generateTogetherAIImage(prompt) {
|
||||
const result = await fetch('/api/sd/together/generate', {
|
||||
method: 'POST',
|
||||
headers: getRequestHeaders(),
|
||||
body: JSON.stringify({
|
||||
prompt: prompt,
|
||||
negative_prompt: extension_settings.sd.negative_prompt,
|
||||
model: extension_settings.sd.model,
|
||||
steps: extension_settings.sd.steps,
|
||||
width: extension_settings.sd.width,
|
||||
height: extension_settings.sd.height,
|
||||
}),
|
||||
});
|
||||
|
||||
if (result.ok) {
|
||||
const data = await result.json();
|
||||
return { format: 'jpg', data: data?.output?.choices?.[0]?.image_base64 };
|
||||
} else {
|
||||
const text = await result.text();
|
||||
throw new Error(text);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Generates an "extras" image using a provided prompt and other settings.
|
||||
*
|
||||
@@ -2180,6 +2256,9 @@ async function generateComfyImage(prompt) {
|
||||
placeholders.forEach(ph => {
|
||||
workflow = workflow.replace(`"%${ph}%"`, JSON.stringify(extension_settings.sd[ph]));
|
||||
});
|
||||
(extension_settings.sd.comfy_placeholders ?? []).forEach(ph => {
|
||||
workflow = workflow.replace(`"%${ph.find}%"`, JSON.stringify(substituteParams(ph.replace)));
|
||||
});
|
||||
console.log(`{
|
||||
"prompt": ${workflow}
|
||||
}`);
|
||||
@@ -2216,6 +2295,50 @@ async function onComfyOpenWorkflowEditorClick() {
|
||||
};
|
||||
$('#sd_comfy_workflow_editor_name').text(extension_settings.sd.comfy_workflow);
|
||||
$('#sd_comfy_workflow_editor_workflow').val(workflow);
|
||||
const addPlaceholderDom = (placeholder) => {
|
||||
const el = $(`
|
||||
<li class="sd_comfy_workflow_editor_not_found" data-placeholder="${placeholder.find}">
|
||||
<span class="sd_comfy_workflow_editor_custom_remove" title="Remove custom placeholder">⊘</span>
|
||||
<span class="sd_comfy_workflow_editor_custom_final">"%${placeholder.find}%"</span><br>
|
||||
<input placeholder="find" title="find" type="text" class="text_pole sd_comfy_workflow_editor_custom_find" value=""><br>
|
||||
<input placeholder="replace" title="replace" type="text" class="text_pole sd_comfy_workflow_editor_custom_replace">
|
||||
</li>
|
||||
`);
|
||||
$('#sd_comfy_workflow_editor_placeholder_list_custom').append(el);
|
||||
el.find('.sd_comfy_workflow_editor_custom_find').val(placeholder.find);
|
||||
el.find('.sd_comfy_workflow_editor_custom_find').on('input', function() {
|
||||
placeholder.find = this.value;
|
||||
el.find('.sd_comfy_workflow_editor_custom_final').text(`"%${this.value}%"`);
|
||||
el.attr('data-placeholder', `${this.value}`);
|
||||
checkPlaceholders();
|
||||
saveSettingsDebounced();
|
||||
});
|
||||
el.find('.sd_comfy_workflow_editor_custom_replace').val(placeholder.replace);
|
||||
el.find('.sd_comfy_workflow_editor_custom_replace').on('input', function() {
|
||||
placeholder.replace = this.value;
|
||||
saveSettingsDebounced();
|
||||
});
|
||||
el.find('.sd_comfy_workflow_editor_custom_remove').on('click', () => {
|
||||
el.remove();
|
||||
extension_settings.sd.comfy_placeholders.splice(extension_settings.sd.comfy_placeholders.indexOf(placeholder));
|
||||
saveSettingsDebounced();
|
||||
});
|
||||
};
|
||||
$('#sd_comfy_workflow_editor_placeholder_add').on('click', () => {
|
||||
if (!extension_settings.sd.comfy_placeholders) {
|
||||
extension_settings.sd.comfy_placeholders = [];
|
||||
}
|
||||
const placeholder = {
|
||||
find: '',
|
||||
replace: '',
|
||||
};
|
||||
extension_settings.sd.comfy_placeholders.push(placeholder);
|
||||
addPlaceholderDom(placeholder);
|
||||
saveSettingsDebounced();
|
||||
});
|
||||
(extension_settings.sd.comfy_placeholders ?? []).forEach(placeholder=>{
|
||||
addPlaceholderDom(placeholder);
|
||||
});
|
||||
checkPlaceholders();
|
||||
$('#sd_comfy_workflow_editor_workflow').on('input', checkPlaceholders);
|
||||
if (await popupResult) {
|
||||
@@ -2376,6 +2499,8 @@ function isValidState() {
|
||||
return secret_state[SECRET_KEYS.OPENAI];
|
||||
case sources.comfy:
|
||||
return true;
|
||||
case sources.togetherai:
|
||||
return secret_state[SECRET_KEYS.TOGETHERAI];
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2481,7 +2606,8 @@ $('#sd_dropdown [id]').on('click', function () {
|
||||
});
|
||||
|
||||
jQuery(async () => {
|
||||
getContext().registerSlashCommand('imagine', generatePicture, ['sd', 'img', 'image'], helpString, true, true);
|
||||
registerSlashCommand('imagine', generatePicture, ['sd', 'img', 'image'], helpString, true, true);
|
||||
registerSlashCommand('imagine-comfy-workflow', changeComfyWorkflow, ['icw'], '(workflowName) - change the workflow to be used for image generation with ComfyUI, e.g. <tt>/imagine-comfy-workflow MyWorkflow</tt>')
|
||||
|
||||
$('#extensions_settings').append(renderExtensionTemplate('stable-diffusion', 'settings', defaultSettings));
|
||||
$('#sd_source').on('change', onSourceChange);
|
||||
|
@@ -35,6 +35,7 @@
|
||||
<option value="novel">NovelAI Diffusion</option>
|
||||
<option value="openai">OpenAI (DALL-E)</option>
|
||||
<option value="comfy">ComfyUI</option>
|
||||
<option value="togetherai">TogetherAI</option>
|
||||
</select>
|
||||
<div data-sd-source="auto">
|
||||
<label for="sd_auto_url">SD Web UI URL</label>
|
||||
|
@@ -82,3 +82,17 @@
|
||||
.sd_comfy_workflow_editor_placeholder_list>li>.notes-link {
|
||||
cursor: help;
|
||||
}
|
||||
|
||||
.sd_comfy_workflow_editor_placeholder_list input {
|
||||
font-size: inherit;
|
||||
margin: 0;
|
||||
}
|
||||
.sd_comfy_workflow_editor_custom_remove, #sd_comfy_workflow_editor_placeholder_add {
|
||||
cursor: pointer;
|
||||
font-weight: bold;
|
||||
width: 1em;
|
||||
opacity: 0.5;
|
||||
&:hover {
|
||||
opacity: 1;
|
||||
}
|
||||
}
|
||||
|
@@ -12,6 +12,7 @@ import {
|
||||
} from '../../../script.js';
|
||||
import { extension_settings, getContext } from '../../extensions.js';
|
||||
import { secret_state, writeSecret } from '../../secrets.js';
|
||||
import { splitRecursive } from '../../utils.js';
|
||||
|
||||
export const autoModeOptions = {
|
||||
NONE: 'none',
|
||||
@@ -315,6 +316,28 @@ async function translateProviderBing(text, lang) {
|
||||
throw new Error(response.statusText);
|
||||
}
|
||||
|
||||
/**
|
||||
* Splits text into chunks and translates each chunk separately
|
||||
* @param {string} text Text to translate
|
||||
* @param {string} lang Target language code
|
||||
* @param {(text: string, lang: string) => Promise<string>} translateFn Function to translate a single chunk (must return a Promise)
|
||||
* @param {number} chunkSize Maximum chunk size
|
||||
* @returns {Promise<string>} Translated text
|
||||
*/
|
||||
async function chunkedTranslate(text, lang, translateFn, chunkSize = 5000) {
|
||||
if (text.length <= chunkSize) {
|
||||
return await translateFn(text, lang);
|
||||
}
|
||||
|
||||
const chunks = splitRecursive(text, chunkSize);
|
||||
|
||||
let result = '';
|
||||
for (const chunk of chunks) {
|
||||
result += await translateFn(chunk, lang);
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* Translates text using the selected translation provider
|
||||
* @param {string} text Text to translate
|
||||
@@ -331,15 +354,15 @@ async function translate(text, lang) {
|
||||
case 'libre':
|
||||
return await translateProviderLibre(text, lang);
|
||||
case 'google':
|
||||
return await translateProviderGoogle(text, lang);
|
||||
return await chunkedTranslate(text, lang, translateProviderGoogle, 5000);
|
||||
case 'deepl':
|
||||
return await translateProviderDeepl(text, lang);
|
||||
case 'deeplx':
|
||||
return await translateProviderDeepLX(text, lang);
|
||||
return await chunkedTranslate(text, lang, translateProviderDeepLX, 1500);
|
||||
case 'oneringtranslator':
|
||||
return await translateProviderOneRing(text, lang);
|
||||
case 'bing':
|
||||
return await translateProviderBing(text, lang);
|
||||
return await chunkedTranslate(text, lang, translateProviderBing, 1000);
|
||||
default:
|
||||
console.error('Unknown translation provider', extension_settings.translate.provider);
|
||||
return text;
|
||||
|
@@ -1108,7 +1108,7 @@ function printGroupCandidates() {
|
||||
showNavigator: true,
|
||||
showSizeChanger: true,
|
||||
pageSize: Number(localStorage.getItem(storageKey)) || 5,
|
||||
sizeChangerOptions: [5, 10, 25, 50, 100, 200],
|
||||
sizeChangerOptions: [5, 10, 25, 50, 100, 200, 500, 1000],
|
||||
afterSizeSelectorChange: function (e) {
|
||||
localStorage.setItem(storageKey, e.target.value);
|
||||
},
|
||||
@@ -1135,7 +1135,7 @@ function printGroupMembers() {
|
||||
showNavigator: true,
|
||||
showSizeChanger: true,
|
||||
pageSize: Number(localStorage.getItem(storageKey)) || 5,
|
||||
sizeChangerOptions: [5, 10, 25, 50, 100, 200],
|
||||
sizeChangerOptions: [5, 10, 25, 50, 100, 200, 500, 1000],
|
||||
afterSizeSelectorChange: function (e) {
|
||||
localStorage.setItem(storageKey, e.target.value);
|
||||
},
|
||||
|
@@ -2,7 +2,6 @@ import {
|
||||
saveSettingsDebounced,
|
||||
callPopup,
|
||||
setGenerationProgress,
|
||||
CLIENT_VERSION,
|
||||
getRequestHeaders,
|
||||
max_context,
|
||||
amount_gen,
|
||||
@@ -34,19 +33,96 @@ let horde_settings = {
|
||||
const MAX_RETRIES = 480;
|
||||
const CHECK_INTERVAL = 2500;
|
||||
const MIN_LENGTH = 16;
|
||||
const getRequestArgs = () => ({
|
||||
method: 'GET',
|
||||
headers: {
|
||||
'Client-Agent': CLIENT_VERSION,
|
||||
},
|
||||
});
|
||||
|
||||
async function getWorkers(workerType) {
|
||||
const response = await fetch('https://horde.koboldai.net/api/v2/workers?type=text', getRequestArgs());
|
||||
/**
|
||||
* Gets the available workers from Horde.
|
||||
* @param {boolean} force Do a force refresh of the workers
|
||||
* @returns {Promise<Array>} Array of workers
|
||||
*/
|
||||
async function getWorkers(force) {
|
||||
const response = await fetch('/api/horde/text-workers', {
|
||||
method: 'POST',
|
||||
headers: getRequestHeaders(),
|
||||
body: JSON.stringify({ force }),
|
||||
});
|
||||
const data = await response.json();
|
||||
return data;
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the available models from Horde.
|
||||
* @param {boolean} force Do a force refresh of the models
|
||||
* @returns {Promise<Array>} Array of models
|
||||
*/
|
||||
async function getModels(force) {
|
||||
const response = await fetch('/api/horde/text-models', {
|
||||
method: 'POST',
|
||||
headers: getRequestHeaders(),
|
||||
body: JSON.stringify({ force }),
|
||||
});
|
||||
const data = await response.json();
|
||||
return data;
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the status of a Horde task.
|
||||
* @param {string} taskId Task ID
|
||||
* @returns {Promise<Object>} Task status
|
||||
*/
|
||||
async function getTaskStatus(taskId) {
|
||||
const response = await fetch('/api/horde/task-status', {
|
||||
method: 'POST',
|
||||
headers: getRequestHeaders(),
|
||||
body: JSON.stringify({ taskId }),
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error(`Failed to get task status: ${response.statusText}`);
|
||||
}
|
||||
|
||||
const data = await response.json();
|
||||
return data;
|
||||
}
|
||||
|
||||
/**
|
||||
* Cancels a Horde task.
|
||||
* @param {string} taskId Task ID
|
||||
*/
|
||||
async function cancelTask(taskId) {
|
||||
const response = await fetch('/api/horde/cancel-task', {
|
||||
method: 'POST',
|
||||
headers: getRequestHeaders(),
|
||||
body: JSON.stringify({ taskId }),
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error(`Failed to cancel task: ${response.statusText}`);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks if Horde is online.
|
||||
* @returns {Promise<boolean>} True if Horde is online, false otherwise
|
||||
*/
|
||||
async function checkHordeStatus() {
|
||||
try {
|
||||
const response = await fetch('/api/horde/status', {
|
||||
method: 'POST',
|
||||
headers: getRequestHeaders(),
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
return false;
|
||||
}
|
||||
|
||||
const data = await response.json();
|
||||
return data.ok;
|
||||
} catch (error) {
|
||||
console.error(error);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
function validateHordeModel() {
|
||||
let selectedModels = models.filter(m => horde_settings.models.includes(m.name));
|
||||
|
||||
@@ -60,7 +136,7 @@ function validateHordeModel() {
|
||||
|
||||
async function adjustHordeGenerationParams(max_context_length, max_length) {
|
||||
console.log(max_context_length, max_length);
|
||||
const workers = await getWorkers();
|
||||
const workers = await getWorkers(false);
|
||||
let maxContextLength = max_context_length;
|
||||
let maxLength = max_length;
|
||||
let availableWorkers = [];
|
||||
@@ -126,10 +202,7 @@ async function generateHorde(prompt, params, signal, reportProgress) {
|
||||
|
||||
const response = await fetch('/api/horde/generate-text', {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
...getRequestHeaders(),
|
||||
'Client-Agent': CLIENT_VERSION,
|
||||
},
|
||||
headers: getRequestHeaders(),
|
||||
body: JSON.stringify(payload),
|
||||
});
|
||||
|
||||
@@ -146,24 +219,17 @@ async function generateHorde(prompt, params, signal, reportProgress) {
|
||||
throw new Error(`Horde generation failed: ${reason}`);
|
||||
}
|
||||
|
||||
const task_id = responseJson.id;
|
||||
const taskId = responseJson.id;
|
||||
let queue_position_first = null;
|
||||
console.log(`Horde task id = ${task_id}`);
|
||||
console.log(`Horde task id = ${taskId}`);
|
||||
|
||||
for (let retryNumber = 0; retryNumber < MAX_RETRIES; retryNumber++) {
|
||||
if (signal.aborted) {
|
||||
fetch(`https://horde.koboldai.net/api/v2/generate/text/status/${task_id}`, {
|
||||
method: 'DELETE',
|
||||
headers: {
|
||||
'Client-Agent': CLIENT_VERSION,
|
||||
},
|
||||
});
|
||||
cancelTask(taskId);
|
||||
throw new Error('Request aborted');
|
||||
}
|
||||
|
||||
const statusCheckResponse = await fetch(`https://horde.koboldai.net/api/v2/generate/text/status/${task_id}`, getRequestArgs());
|
||||
|
||||
const statusCheckJson = await statusCheckResponse.json();
|
||||
const statusCheckJson = await getTaskStatus(taskId);
|
||||
console.log(statusCheckJson);
|
||||
|
||||
if (statusCheckJson.faulted === true) {
|
||||
@@ -202,18 +268,13 @@ async function generateHorde(prompt, params, signal, reportProgress) {
|
||||
throw new Error('Horde timeout');
|
||||
}
|
||||
|
||||
async function checkHordeStatus() {
|
||||
const response = await fetch('https://horde.koboldai.net/api/v2/status/heartbeat', getRequestArgs());
|
||||
return response.ok;
|
||||
}
|
||||
|
||||
async function getHordeModels() {
|
||||
/**
|
||||
* Displays the available models in the Horde model selection dropdown.
|
||||
* @param {boolean} force Force refresh of the models
|
||||
*/
|
||||
async function getHordeModels(force) {
|
||||
$('#horde_model').empty();
|
||||
const response = await fetch('https://horde.koboldai.net/api/v2/status/models?type=text', getRequestArgs());
|
||||
models = await response.json();
|
||||
models.sort((a, b) => {
|
||||
return b.performance - a.performance;
|
||||
});
|
||||
models = (await getModels(force)).sort((a, b) => b.performance - a.performance);
|
||||
for (const model of models) {
|
||||
const option = document.createElement('option');
|
||||
option.value = model.name;
|
||||
@@ -299,7 +360,7 @@ jQuery(function () {
|
||||
await writeSecret(SECRET_KEYS.HORDE, key);
|
||||
});
|
||||
|
||||
$('#horde_refresh').on('click', getHordeModels);
|
||||
$('#horde_refresh').on('click', () => getHordeModels(true));
|
||||
$('#horde_kudos').on('click', showKudos);
|
||||
|
||||
// Not needed on mobile
|
||||
|
@@ -29,6 +29,7 @@ const controls = [
|
||||
{ id: 'instruct_first_output_sequence', property: 'first_output_sequence', isCheckbox: false },
|
||||
{ id: 'instruct_last_output_sequence', property: 'last_output_sequence', isCheckbox: false },
|
||||
{ id: 'instruct_activation_regex', property: 'activation_regex', isCheckbox: false },
|
||||
{ id: 'instruct_bind_to_context', property: 'bind_to_context', isCheckbox: true },
|
||||
];
|
||||
|
||||
/**
|
||||
@@ -136,7 +137,7 @@ export function autoSelectInstructPreset(modelId) {
|
||||
let foundMatch = false;
|
||||
for (const instruct_preset of instruct_presets) {
|
||||
// If instruct preset matches the context template
|
||||
if (instruct_preset.name === power_user.context.preset) {
|
||||
if (power_user.instruct.bind_to_context && instruct_preset.name === power_user.context.preset) {
|
||||
foundMatch = true;
|
||||
selectInstructPreset(instruct_preset.name);
|
||||
break;
|
||||
@@ -163,7 +164,7 @@ export function autoSelectInstructPreset(modelId) {
|
||||
}
|
||||
}
|
||||
|
||||
if (power_user.default_instruct && power_user.instruct.preset !== power_user.default_instruct) {
|
||||
if (power_user.instruct.bind_to_context && power_user.default_instruct && power_user.instruct.preset !== power_user.default_instruct) {
|
||||
if (instruct_presets.some(p => p.name === power_user.default_instruct)) {
|
||||
console.log(`Instruct mode: default preset "${power_user.default_instruct}" selected`);
|
||||
$('#instruct_presets').val(power_user.default_instruct).trigger('change');
|
||||
@@ -409,6 +410,10 @@ jQuery(() => {
|
||||
});
|
||||
|
||||
$('#instruct_enabled').on('change', function () {
|
||||
if (!power_user.instruct.bind_to_context) {
|
||||
return;
|
||||
}
|
||||
|
||||
// When instruct mode gets enabled, select context template matching selected instruct preset
|
||||
if (power_user.instruct.enabled) {
|
||||
selectMatchingContextTemplate(power_user.instruct.preset);
|
||||
@@ -440,8 +445,10 @@ jQuery(() => {
|
||||
}
|
||||
});
|
||||
|
||||
// Select matching context template
|
||||
selectMatchingContextTemplate(name);
|
||||
if (power_user.instruct.bind_to_context) {
|
||||
// Select matching context template
|
||||
selectMatchingContextTemplate(name);
|
||||
}
|
||||
|
||||
highlightDefaultPreset();
|
||||
});
|
||||
|
126
public/scripts/logit-bias.js
Normal file
126
public/scripts/logit-bias.js
Normal file
@@ -0,0 +1,126 @@
|
||||
import { saveSettingsDebounced } from '../script.js';
|
||||
import { getTextTokens } from './tokenizers.js';
|
||||
import { uuidv4 } from './utils.js';
|
||||
|
||||
export const BIAS_CACHE = new Map();
|
||||
|
||||
/**
|
||||
* Displays the logit bias list in the specified container.
|
||||
* @param {object} logitBias Logit bias object
|
||||
* @param {string} containerSelector Container element selector
|
||||
* @returns
|
||||
*/
|
||||
export function displayLogitBias(logitBias, containerSelector) {
|
||||
if (!Array.isArray(logitBias)) {
|
||||
console.log('Logit bias set not found');
|
||||
return;
|
||||
}
|
||||
|
||||
$(containerSelector).find('.logit_bias_list').empty();
|
||||
|
||||
for (const entry of logitBias) {
|
||||
if (entry) {
|
||||
createLogitBiasListItem(entry, logitBias, containerSelector);
|
||||
}
|
||||
}
|
||||
|
||||
BIAS_CACHE.delete(containerSelector);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new logit bias entry
|
||||
* @param {object[]} logitBias Array of logit bias objects
|
||||
* @param {string} containerSelector Container element ID
|
||||
*/
|
||||
export function createNewLogitBiasEntry(logitBias, containerSelector) {
|
||||
const entry = { id: uuidv4(), text: '', value: 0 };
|
||||
logitBias.push(entry);
|
||||
BIAS_CACHE.delete(containerSelector);
|
||||
createLogitBiasListItem(entry, logitBias, containerSelector);
|
||||
saveSettingsDebounced();
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a logit bias list item.
|
||||
* @param {object} entry Logit bias entry
|
||||
* @param {object[]} logitBias Array of logit bias objects
|
||||
* @param {string} containerSelector Container element ID
|
||||
*/
|
||||
function createLogitBiasListItem(entry, logitBias, containerSelector) {
|
||||
const id = entry.id;
|
||||
const template = $('#logit_bias_template .logit_bias_form').clone();
|
||||
template.data('id', id);
|
||||
template.find('.logit_bias_text').val(entry.text).on('input', function () {
|
||||
entry.text = $(this).val();
|
||||
BIAS_CACHE.delete(containerSelector);
|
||||
saveSettingsDebounced();
|
||||
});
|
||||
template.find('.logit_bias_value').val(entry.value).on('input', function () {
|
||||
entry.value = Number($(this).val());
|
||||
BIAS_CACHE.delete(containerSelector);
|
||||
saveSettingsDebounced();
|
||||
});
|
||||
template.find('.logit_bias_remove').on('click', function () {
|
||||
$(this).closest('.logit_bias_form').remove();
|
||||
const index = logitBias.indexOf(entry);
|
||||
if (index > -1) {
|
||||
logitBias.splice(index, 1);
|
||||
}
|
||||
BIAS_CACHE.delete(containerSelector);
|
||||
saveSettingsDebounced();
|
||||
});
|
||||
$(containerSelector).find('.logit_bias_list').prepend(template);
|
||||
}
|
||||
|
||||
/**
|
||||
* Populate logit bias list from preset.
|
||||
* @param {object[]} biasPreset Bias preset
|
||||
* @param {number} tokenizerType Tokenizer type (see tokenizers.js)
|
||||
* @param {(bias: number, sequence: number[]) => object} getBiasObject Transformer function to create bias object
|
||||
* @returns {object[]} Array of logit bias objects
|
||||
*/
|
||||
export function getLogitBiasListResult(biasPreset, tokenizerType, getBiasObject) {
|
||||
const result = [];
|
||||
|
||||
for (const entry of biasPreset) {
|
||||
if (entry.text?.length > 0) {
|
||||
const text = entry.text.trim();
|
||||
|
||||
// Skip empty lines
|
||||
if (text.length === 0) {
|
||||
continue;
|
||||
}
|
||||
|
||||
// Verbatim text
|
||||
if (text.startsWith('{') && text.endsWith('}')) {
|
||||
const tokens = getTextTokens(tokenizerType, text.slice(1, -1));
|
||||
result.push(getBiasObject(entry.value, tokens));
|
||||
}
|
||||
|
||||
|
||||
// Raw token ids, JSON serialized
|
||||
else if (text.startsWith('[') && text.endsWith(']')) {
|
||||
try {
|
||||
const tokens = JSON.parse(text);
|
||||
|
||||
if (Array.isArray(tokens) && tokens.every(t => Number.isInteger(t))) {
|
||||
result.push(getBiasObject(entry.value, tokens));
|
||||
} else {
|
||||
throw new Error('Not an array of integers');
|
||||
}
|
||||
} catch (err) {
|
||||
console.log(`Failed to parse logit bias token list: ${text}`, err);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
// Text with a leading space
|
||||
else {
|
||||
const biasText = ` ${text}`;
|
||||
const tokens = getTextTokens(tokenizerType, biasText);
|
||||
result.push(getBiasObject(entry.value, tokens));
|
||||
}
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
@@ -1,64 +0,0 @@
|
||||
import { setGenerationParamsFromPreset } from '../script.js';
|
||||
import { isMobile } from './RossAscends-mods.js';
|
||||
import { textgenerationwebui_settings as textgen_settings } from './textgen-settings.js';
|
||||
|
||||
let models = [];
|
||||
|
||||
export async function loadMancerModels(data) {
|
||||
if (!Array.isArray(data)) {
|
||||
console.error('Invalid Mancer models data', data);
|
||||
return;
|
||||
}
|
||||
|
||||
models = data;
|
||||
|
||||
$('#mancer_model').empty();
|
||||
for (const model of data) {
|
||||
const option = document.createElement('option');
|
||||
option.value = model.id;
|
||||
option.text = model.name;
|
||||
option.selected = model.id === textgen_settings.mancer_model;
|
||||
$('#mancer_model').append(option);
|
||||
}
|
||||
}
|
||||
|
||||
function onMancerModelSelect() {
|
||||
const modelId = String($('#mancer_model').val());
|
||||
textgen_settings.mancer_model = modelId;
|
||||
$('#api_button_textgenerationwebui').trigger('click');
|
||||
|
||||
const limits = models.find(x => x.id === modelId)?.limits;
|
||||
setGenerationParamsFromPreset({ max_length: limits.context, genamt: limits.completion });
|
||||
}
|
||||
|
||||
function getMancerModelTemplate(option) {
|
||||
const model = models.find(x => x.id === option?.element?.value);
|
||||
|
||||
if (!option.id || !model) {
|
||||
return option.text;
|
||||
}
|
||||
|
||||
const creditsPerPrompt = (model.limits?.context - model.limits?.completion) * model.pricing?.prompt;
|
||||
const creditsPerCompletion = model.limits?.completion * model.pricing?.completion;
|
||||
const creditsTotal = Math.round(creditsPerPrompt + creditsPerCompletion).toFixed(0);
|
||||
|
||||
return $((`
|
||||
<div class="flex-container flexFlowColumn">
|
||||
<div><strong>${DOMPurify.sanitize(model.name)}</strong> | <span>${model.limits?.context} ctx</span> / <span>${model.limits?.completion} res</span> | <small>Credits per request (max): ${creditsTotal}</small></div>
|
||||
</div>
|
||||
`));
|
||||
}
|
||||
|
||||
jQuery(function () {
|
||||
$('#mancer_model').on('change', onMancerModelSelect);
|
||||
|
||||
if (!isMobile()) {
|
||||
$('#mancer_model').select2({
|
||||
placeholder: 'Select a model',
|
||||
searchInputPlaceholder: 'Search models...',
|
||||
searchInputCssClass: 'text_pole',
|
||||
width: '100%',
|
||||
templateResult: getMancerModelTemplate,
|
||||
});
|
||||
}
|
||||
});
|
@@ -8,15 +8,15 @@ import {
|
||||
substituteParams,
|
||||
} from '../script.js';
|
||||
import { getCfgPrompt } from './cfg-scale.js';
|
||||
import { MAX_CONTEXT_DEFAULT, MAX_RESPONSE_DEFAULT } from './power-user.js';
|
||||
import { MAX_CONTEXT_DEFAULT, MAX_RESPONSE_DEFAULT, power_user } from './power-user.js';
|
||||
import { getTextTokens, tokenizers } from './tokenizers.js';
|
||||
import EventSourceStream from './sse-stream.js';
|
||||
import {
|
||||
getSortableDelay,
|
||||
getStringHash,
|
||||
onlyUnique,
|
||||
uuidv4,
|
||||
} from './utils.js';
|
||||
import { BIAS_CACHE, createNewLogitBiasEntry, displayLogitBias, getLogitBiasListResult } from './logit-bias.js';
|
||||
|
||||
const default_preamble = '[ Style: chat, complex, sensory, visceral ]';
|
||||
const default_order = [1, 5, 0, 2, 3, 4];
|
||||
@@ -59,7 +59,7 @@ const nai_tiers = {
|
||||
|
||||
let novel_data = null;
|
||||
let badWordsCache = {};
|
||||
let biasCache = undefined;
|
||||
const BIAS_KEY = '#novel_api-settings';
|
||||
|
||||
export function setNovelData(data) {
|
||||
novel_data = data;
|
||||
@@ -145,7 +145,7 @@ export function loadNovelSettings(settings) {
|
||||
//load the rest of the Novel settings without any checks
|
||||
nai_settings.model_novel = settings.model_novel;
|
||||
$('#model_novel_select').val(nai_settings.model_novel);
|
||||
$(`#model_novel_select option[value=${nai_settings.model_novel}]`).attr('selected', true);
|
||||
$(`#model_novel_select option[value=${nai_settings.model_novel}]`).prop('selected', true);
|
||||
|
||||
if (settings.nai_preamble !== undefined) {
|
||||
nai_settings.preamble = settings.nai_preamble;
|
||||
@@ -217,7 +217,7 @@ function loadNovelSettingsUi(ui_settings) {
|
||||
|
||||
$('#streaming_novel').prop('checked', ui_settings.streaming_novel);
|
||||
sortItemsByOrder(ui_settings.order);
|
||||
displayLogitBias(ui_settings.logit_bias);
|
||||
displayLogitBias(ui_settings.logit_bias, BIAS_KEY);
|
||||
}
|
||||
|
||||
const sliders = [
|
||||
@@ -433,8 +433,12 @@ export function getNovelGenerationData(finalPrompt, settings, maxLength, isImper
|
||||
|
||||
let logitBias = [];
|
||||
if (tokenizerType !== tokenizers.NONE && Array.isArray(nai_settings.logit_bias) && nai_settings.logit_bias.length) {
|
||||
logitBias = biasCache || calculateLogitBias();
|
||||
biasCache = logitBias;
|
||||
logitBias = BIAS_CACHE.get(BIAS_KEY) || calculateLogitBias();
|
||||
BIAS_CACHE.set(BIAS_KEY, logitBias);
|
||||
}
|
||||
|
||||
if (power_user.console_log_prompts) {
|
||||
console.log(finalPrompt);
|
||||
}
|
||||
|
||||
return {
|
||||
@@ -525,65 +529,14 @@ function saveSamplingOrder() {
|
||||
saveSettingsDebounced();
|
||||
}
|
||||
|
||||
function displayLogitBias(logit_bias) {
|
||||
if (!Array.isArray(logit_bias)) {
|
||||
console.log('Logit bias set not found');
|
||||
return;
|
||||
}
|
||||
|
||||
$('.novelai_logit_bias_list').empty();
|
||||
|
||||
for (const entry of logit_bias) {
|
||||
if (entry) {
|
||||
createLogitBiasListItem(entry);
|
||||
}
|
||||
}
|
||||
|
||||
biasCache = undefined;
|
||||
}
|
||||
|
||||
function createNewLogitBiasEntry() {
|
||||
const entry = { id: uuidv4(), text: '', value: 0 };
|
||||
nai_settings.logit_bias.push(entry);
|
||||
biasCache = undefined;
|
||||
createLogitBiasListItem(entry);
|
||||
saveSettingsDebounced();
|
||||
}
|
||||
|
||||
function createLogitBiasListItem(entry) {
|
||||
const id = entry.id;
|
||||
const template = $('#novelai_logit_bias_template .novelai_logit_bias_form').clone();
|
||||
template.data('id', id);
|
||||
template.find('.novelai_logit_bias_text').val(entry.text).on('input', function () {
|
||||
entry.text = $(this).val();
|
||||
biasCache = undefined;
|
||||
saveSettingsDebounced();
|
||||
});
|
||||
template.find('.novelai_logit_bias_value').val(entry.value).on('input', function () {
|
||||
entry.value = Number($(this).val());
|
||||
biasCache = undefined;
|
||||
saveSettingsDebounced();
|
||||
});
|
||||
template.find('.novelai_logit_bias_remove').on('click', function () {
|
||||
$(this).closest('.novelai_logit_bias_form').remove();
|
||||
const index = nai_settings.logit_bias.indexOf(entry);
|
||||
if (index > -1) {
|
||||
nai_settings.logit_bias.splice(index, 1);
|
||||
}
|
||||
biasCache = undefined;
|
||||
saveSettingsDebounced();
|
||||
});
|
||||
$('.novelai_logit_bias_list').prepend(template);
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculates logit bias for Novel AI
|
||||
* @returns {object[]} Array of logit bias objects
|
||||
*/
|
||||
function calculateLogitBias() {
|
||||
const bias_preset = nai_settings.logit_bias;
|
||||
const biasPreset = nai_settings.logit_bias;
|
||||
|
||||
if (!Array.isArray(bias_preset) || bias_preset.length === 0) {
|
||||
if (!Array.isArray(biasPreset) || biasPreset.length === 0) {
|
||||
return [];
|
||||
}
|
||||
|
||||
@@ -605,47 +558,7 @@ function calculateLogitBias() {
|
||||
};
|
||||
}
|
||||
|
||||
const result = [];
|
||||
|
||||
for (const entry of bias_preset) {
|
||||
if (entry.text?.length > 0) {
|
||||
const text = entry.text.trim();
|
||||
|
||||
// Skip empty lines
|
||||
if (text.length === 0) {
|
||||
continue;
|
||||
}
|
||||
|
||||
// Verbatim text
|
||||
if (text.startsWith('{') && text.endsWith('}')) {
|
||||
const tokens = getTextTokens(tokenizerType, text.slice(1, -1));
|
||||
result.push(getBiasObject(entry.value, tokens));
|
||||
}
|
||||
|
||||
// Raw token ids, JSON serialized
|
||||
else if (text.startsWith('[') && text.endsWith(']')) {
|
||||
try {
|
||||
const tokens = JSON.parse(text);
|
||||
|
||||
if (Array.isArray(tokens) && tokens.every(t => Number.isInteger(t))) {
|
||||
result.push(getBiasObject(entry.value, tokens));
|
||||
} else {
|
||||
throw new Error('Not an array of integers');
|
||||
}
|
||||
} catch (err) {
|
||||
console.log(`Failed to parse logit bias token list: ${text}`, err);
|
||||
}
|
||||
}
|
||||
|
||||
// Text with a leading space
|
||||
else {
|
||||
const biasText = ` ${text}`;
|
||||
const tokens = getTextTokens(tokenizerType, biasText);
|
||||
result.push(getBiasObject(entry.value, tokens));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const result = getLogitBiasListResult(biasPreset, tokenizerType, getBiasObject);
|
||||
return result;
|
||||
}
|
||||
|
||||
@@ -778,5 +691,5 @@ jQuery(function () {
|
||||
saveSamplingOrder();
|
||||
});
|
||||
|
||||
$('#novelai_logit_bias_new_entry').on('click', createNewLogitBiasEntry);
|
||||
$('#novelai_logit_bias_new_entry').on('click', () => createNewLogitBiasEntry(nai_settings.logit_bias, BIAS_KEY));
|
||||
});
|
||||
|
@@ -21,7 +21,6 @@ import {
|
||||
MAX_INJECTION_DEPTH,
|
||||
name1,
|
||||
name2,
|
||||
replaceBiasMarkup,
|
||||
replaceItemizedPromptText,
|
||||
resultCheckStatus,
|
||||
saveSettingsDebounced,
|
||||
@@ -90,6 +89,7 @@ const default_wi_format = '[Details of the fictional world the RP is set in:\n{0
|
||||
const default_new_chat_prompt = '[Start a new Chat]';
|
||||
const default_new_group_chat_prompt = '[Start a new group chat. Group members: {{group}}]';
|
||||
const default_new_example_chat_prompt = '[Start a new Chat]';
|
||||
const default_claude_human_sysprompt_message = 'Let\'s get started. Please generate your response based on the information and instructions provided above.';
|
||||
const default_continue_nudge_prompt = '[Continue the following message. Do not include ANY parts of the original message. Use capitalization and punctuation as if your reply is a part of the original message: {{lastChatMessage}}]';
|
||||
const default_bias = 'Default (none)';
|
||||
const default_personality_format = '[{{char}}\'s personality: {{personality}}]';
|
||||
@@ -165,6 +165,7 @@ export const chat_completion_sources = {
|
||||
AI21: 'ai21',
|
||||
MAKERSUITE: 'makersuite',
|
||||
MISTRALAI: 'mistralai',
|
||||
CUSTOM: 'custom',
|
||||
};
|
||||
|
||||
const prefixMap = selected_group ? {
|
||||
@@ -210,6 +211,11 @@ const default_settings = {
|
||||
google_model: 'gemini-pro',
|
||||
ai21_model: 'j2-ultra',
|
||||
mistralai_model: 'mistral-medium',
|
||||
custom_model: '',
|
||||
custom_url: '',
|
||||
custom_include_body: '',
|
||||
custom_exclude_body: '',
|
||||
custom_include_headers: '',
|
||||
windowai_model: '',
|
||||
openrouter_model: openrouter_website_model,
|
||||
openrouter_use_fallback: false,
|
||||
@@ -224,13 +230,16 @@ const default_settings = {
|
||||
show_external_models: false,
|
||||
proxy_password: '',
|
||||
assistant_prefill: '',
|
||||
human_sysprompt_message: default_claude_human_sysprompt_message,
|
||||
use_ai21_tokenizer: false,
|
||||
use_google_tokenizer: false,
|
||||
exclude_assistant: false,
|
||||
claude_use_sysprompt: false,
|
||||
use_alt_scale: false,
|
||||
squash_system_messages: false,
|
||||
image_inlining: false,
|
||||
bypass_status_check: false,
|
||||
continue_prefill: false,
|
||||
seed: -1,
|
||||
};
|
||||
|
||||
@@ -266,6 +275,11 @@ const oai_settings = {
|
||||
google_model: 'gemini-pro',
|
||||
ai21_model: 'j2-ultra',
|
||||
mistralai_model: 'mistral-medium',
|
||||
custom_model: '',
|
||||
custom_url: '',
|
||||
custom_include_body: '',
|
||||
custom_exclude_body: '',
|
||||
custom_include_headers: '',
|
||||
windowai_model: '',
|
||||
openrouter_model: openrouter_website_model,
|
||||
openrouter_use_fallback: false,
|
||||
@@ -280,13 +294,16 @@ const oai_settings = {
|
||||
show_external_models: false,
|
||||
proxy_password: '',
|
||||
assistant_prefill: '',
|
||||
human_sysprompt_message: default_claude_human_sysprompt_message,
|
||||
use_ai21_tokenizer: false,
|
||||
use_google_tokenizer: false,
|
||||
exclude_assistant: false,
|
||||
claude_use_sysprompt: false,
|
||||
use_alt_scale: false,
|
||||
squash_system_messages: false,
|
||||
image_inlining: false,
|
||||
bypass_status_check: false,
|
||||
continue_prefill: false,
|
||||
seed: -1,
|
||||
};
|
||||
|
||||
@@ -425,8 +442,6 @@ function setOpenAIMessages(chat) {
|
||||
content = `${chat[j].name}: ${content}`;
|
||||
}
|
||||
}
|
||||
content = replaceBiasMarkup(content);
|
||||
|
||||
// remove caret return (waste of tokens)
|
||||
content = content.replace(/\r/gm, '');
|
||||
|
||||
@@ -647,12 +662,20 @@ async function populateChatHistory(messages, prompts, chatCompletion, type = nul
|
||||
let continueMessage = null;
|
||||
const instruct = isOpenRouterWithInstruct();
|
||||
if (type === 'continue' && cyclePrompt && !instruct) {
|
||||
const continuePrompt = new Prompt({
|
||||
identifier: 'continueNudge',
|
||||
role: 'system',
|
||||
content: oai_settings.continue_nudge_prompt.replace('{{lastChatMessage}}', cyclePrompt),
|
||||
system_prompt: true,
|
||||
});
|
||||
const promptObject = oai_settings.continue_prefill ?
|
||||
{
|
||||
identifier: 'continueNudge',
|
||||
role: 'assistant',
|
||||
content: cyclePrompt,
|
||||
system_prompt: true,
|
||||
} :
|
||||
{
|
||||
identifier: 'continueNudge',
|
||||
role: 'system',
|
||||
content: oai_settings.continue_nudge_prompt.replace('{{lastChatMessage}}', cyclePrompt),
|
||||
system_prompt: true,
|
||||
};
|
||||
const continuePrompt = new Prompt(promptObject);
|
||||
const preparedPrompt = promptManager.preparePrompt(continuePrompt);
|
||||
continueMessage = Message.fromPrompt(preparedPrompt);
|
||||
chatCompletion.reserveBudget(continueMessage);
|
||||
@@ -1266,6 +1289,8 @@ function getChatCompletionModel() {
|
||||
return oai_settings.ai21_model;
|
||||
case chat_completion_sources.MISTRALAI:
|
||||
return oai_settings.mistralai_model;
|
||||
case chat_completion_sources.CUSTOM:
|
||||
return oai_settings.custom_model;
|
||||
default:
|
||||
throw new Error(`Unknown chat completion source: ${oai_settings.chat_completion_source}`);
|
||||
}
|
||||
@@ -1326,6 +1351,23 @@ function saveModelList(data) {
|
||||
$('#model_openai_select').val(model).trigger('change');
|
||||
}
|
||||
}
|
||||
|
||||
if (oai_settings.chat_completion_source == chat_completion_sources.CUSTOM) {
|
||||
$('#model_custom_select').empty();
|
||||
$('#model_custom_select').append('<option value="">None</option>');
|
||||
model_list.forEach((model) => {
|
||||
$('#model_custom_select').append(
|
||||
$('<option>', {
|
||||
value: model.id,
|
||||
text: model.id,
|
||||
selected: model.id == oai_settings.custom_model,
|
||||
}));
|
||||
});
|
||||
|
||||
if (!oai_settings.custom_model && model_list.length > 0) {
|
||||
$('#model_custom_select').val(model_list[0].id).trigger('change');
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function appendOpenRouterOptions(model_list, groupModels = false, sort = false) {
|
||||
@@ -1454,6 +1496,7 @@ async function sendOpenAIRequest(type, messages, signal) {
|
||||
const isGoogle = oai_settings.chat_completion_source == chat_completion_sources.MAKERSUITE;
|
||||
const isOAI = oai_settings.chat_completion_source == chat_completion_sources.OPENAI;
|
||||
const isMistral = oai_settings.chat_completion_source == chat_completion_sources.MISTRALAI;
|
||||
const isCustom = oai_settings.chat_completion_source == chat_completion_sources.CUSTOM;
|
||||
const isTextCompletion = (isOAI && textCompletionModels.includes(oai_settings.openai_model)) || (isOpenRouter && oai_settings.openrouter_force_instruct && power_user.instruct.enabled);
|
||||
const isQuiet = type === 'quiet';
|
||||
const isImpersonate = type === 'impersonate';
|
||||
@@ -1480,7 +1523,7 @@ async function sendOpenAIRequest(type, messages, signal) {
|
||||
return sendWindowAIRequest(messages, signal, stream);
|
||||
}
|
||||
|
||||
const logitBiasSources = [chat_completion_sources.OPENAI, chat_completion_sources.OPENROUTER, chat_completion_sources.SCALE];
|
||||
const logitBiasSources = [chat_completion_sources.OPENAI, chat_completion_sources.OPENROUTER, chat_completion_sources.SCALE, chat_completion_sources.CUSTOM];
|
||||
if (oai_settings.bias_preset_selected
|
||||
&& logitBiasSources.includes(oai_settings.chat_completion_source)
|
||||
&& Array.isArray(oai_settings.bias_presets[oai_settings.bias_preset_selected])
|
||||
@@ -1528,7 +1571,9 @@ async function sendOpenAIRequest(type, messages, signal) {
|
||||
if (isClaude) {
|
||||
generate_data['top_k'] = Number(oai_settings.top_k_openai);
|
||||
generate_data['exclude_assistant'] = oai_settings.exclude_assistant;
|
||||
generate_data['claude_use_sysprompt'] = oai_settings.claude_use_sysprompt;
|
||||
generate_data['stop'] = getCustomStoppingStrings(); // Claude shouldn't have limits on stop strings.
|
||||
generate_data['human_sysprompt_message'] = substituteParams(oai_settings.human_sysprompt_message);
|
||||
// Don't add a prefill on quiet gens (summarization)
|
||||
if (!isQuiet && !oai_settings.exclude_assistant) {
|
||||
generate_data['assistant_prefill'] = substituteParams(oai_settings.assistant_prefill);
|
||||
@@ -1565,7 +1610,14 @@ async function sendOpenAIRequest(type, messages, signal) {
|
||||
generate_data['safe_mode'] = false; // already defaults to false, but just incase they change that in the future.
|
||||
}
|
||||
|
||||
if ((isOAI || isOpenRouter || isMistral) && oai_settings.seed >= 0) {
|
||||
if (isCustom) {
|
||||
generate_data['custom_url'] = oai_settings.custom_url;
|
||||
generate_data['custom_include_body'] = oai_settings.custom_include_body;
|
||||
generate_data['custom_exclude_body'] = oai_settings.custom_exclude_body;
|
||||
generate_data['custom_include_headers'] = oai_settings.custom_include_headers;
|
||||
}
|
||||
|
||||
if ((isOAI || isOpenRouter || isMistral || isCustom) && oai_settings.seed >= 0) {
|
||||
generate_data['seed'] = oai_settings.seed;
|
||||
}
|
||||
|
||||
@@ -1964,7 +2016,7 @@ class ChatCompletion {
|
||||
|
||||
for (let message of this.messages.collection) {
|
||||
if (!excludeList.includes(message.identifier) && message.role === 'system' && !message.name) {
|
||||
if (lastMessage && lastMessage.role === 'system') {
|
||||
if (lastMessage && message.content && lastMessage.role === 'system') {
|
||||
lastMessage.content += '\n' + message.content;
|
||||
lastMessage.tokens = tokenHandler.count({ role: lastMessage.role, content: lastMessage.content });
|
||||
}
|
||||
@@ -2311,12 +2363,18 @@ function loadOpenAISettings(data, settings) {
|
||||
oai_settings.openrouter_force_instruct = settings.openrouter_force_instruct ?? default_settings.openrouter_force_instruct;
|
||||
oai_settings.ai21_model = settings.ai21_model ?? default_settings.ai21_model;
|
||||
oai_settings.mistralai_model = settings.mistralai_model ?? default_settings.mistralai_model;
|
||||
oai_settings.custom_model = settings.custom_model ?? default_settings.custom_model;
|
||||
oai_settings.custom_url = settings.custom_url ?? default_settings.custom_url;
|
||||
oai_settings.custom_include_body = settings.custom_include_body ?? default_settings.custom_include_body;
|
||||
oai_settings.custom_exclude_body = settings.custom_exclude_body ?? default_settings.custom_exclude_body;
|
||||
oai_settings.custom_include_headers = settings.custom_include_headers ?? default_settings.custom_include_headers;
|
||||
oai_settings.google_model = settings.google_model ?? default_settings.google_model;
|
||||
oai_settings.chat_completion_source = settings.chat_completion_source ?? default_settings.chat_completion_source;
|
||||
oai_settings.api_url_scale = settings.api_url_scale ?? default_settings.api_url_scale;
|
||||
oai_settings.show_external_models = settings.show_external_models ?? default_settings.show_external_models;
|
||||
oai_settings.proxy_password = settings.proxy_password ?? default_settings.proxy_password;
|
||||
oai_settings.assistant_prefill = settings.assistant_prefill ?? default_settings.assistant_prefill;
|
||||
oai_settings.human_sysprompt_message = settings.human_sysprompt_message ?? default_settings.human_sysprompt_message;
|
||||
oai_settings.image_inlining = settings.image_inlining ?? default_settings.image_inlining;
|
||||
oai_settings.bypass_status_check = settings.bypass_status_check ?? default_settings.bypass_status_check;
|
||||
|
||||
@@ -2328,6 +2386,7 @@ function loadOpenAISettings(data, settings) {
|
||||
oai_settings.new_example_chat_prompt = settings.new_example_chat_prompt ?? default_settings.new_example_chat_prompt;
|
||||
oai_settings.continue_nudge_prompt = settings.continue_nudge_prompt ?? default_settings.continue_nudge_prompt;
|
||||
oai_settings.squash_system_messages = settings.squash_system_messages ?? default_settings.squash_system_messages;
|
||||
oai_settings.continue_prefill = settings.continue_prefill ?? default_settings.continue_prefill;
|
||||
|
||||
if (settings.wrap_in_quotes !== undefined) oai_settings.wrap_in_quotes = !!settings.wrap_in_quotes;
|
||||
if (settings.names_in_completion !== undefined) oai_settings.names_in_completion = !!settings.names_in_completion;
|
||||
@@ -2335,11 +2394,13 @@ function loadOpenAISettings(data, settings) {
|
||||
if (settings.use_ai21_tokenizer !== undefined) { oai_settings.use_ai21_tokenizer = !!settings.use_ai21_tokenizer; oai_settings.use_ai21_tokenizer ? ai21_max = 8191 : ai21_max = 9200; }
|
||||
if (settings.use_google_tokenizer !== undefined) oai_settings.use_google_tokenizer = !!settings.use_google_tokenizer;
|
||||
if (settings.exclude_assistant !== undefined) oai_settings.exclude_assistant = !!settings.exclude_assistant;
|
||||
if (settings.claude_use_sysprompt !== undefined) oai_settings.claude_use_sysprompt = !!settings.claude_use_sysprompt;
|
||||
if (settings.use_alt_scale !== undefined) { oai_settings.use_alt_scale = !!settings.use_alt_scale; updateScaleForm(); }
|
||||
$('#stream_toggle').prop('checked', oai_settings.stream_openai);
|
||||
$('#api_url_scale').val(oai_settings.api_url_scale);
|
||||
$('#openai_proxy_password').val(oai_settings.proxy_password);
|
||||
$('#claude_assistant_prefill').val(oai_settings.assistant_prefill);
|
||||
$('#claude_human_sysprompt_textarea').val(oai_settings.human_sysprompt_message);
|
||||
$('#openai_image_inlining').prop('checked', oai_settings.image_inlining);
|
||||
$('#openai_bypass_status_check').prop('checked', oai_settings.bypass_status_check);
|
||||
|
||||
@@ -2355,6 +2416,8 @@ function loadOpenAISettings(data, settings) {
|
||||
$(`#model_ai21_select option[value="${oai_settings.ai21_model}"`).attr('selected', true);
|
||||
$('#model_mistralai_select').val(oai_settings.mistralai_model);
|
||||
$(`#model_mistralai_select option[value="${oai_settings.mistralai_model}"`).attr('selected', true);
|
||||
$('#custom_model_id').val(oai_settings.custom_model);
|
||||
$('#custom_api_url_text').val(oai_settings.custom_url);
|
||||
$('#openai_max_context').val(oai_settings.openai_max_context);
|
||||
$('#openai_max_context_counter').val(`${oai_settings.openai_max_context}`);
|
||||
$('#model_openrouter_select').val(oai_settings.openrouter_model);
|
||||
@@ -2370,11 +2433,13 @@ function loadOpenAISettings(data, settings) {
|
||||
$('#use_ai21_tokenizer').prop('checked', oai_settings.use_ai21_tokenizer);
|
||||
$('#use_google_tokenizer').prop('checked', oai_settings.use_google_tokenizer);
|
||||
$('#exclude_assistant').prop('checked', oai_settings.exclude_assistant);
|
||||
$('#claude_use_sysprompt').prop('checked', oai_settings.claude_use_sysprompt);
|
||||
$('#scale-alt').prop('checked', oai_settings.use_alt_scale);
|
||||
$('#openrouter_use_fallback').prop('checked', oai_settings.openrouter_use_fallback);
|
||||
$('#openrouter_force_instruct').prop('checked', oai_settings.openrouter_force_instruct);
|
||||
$('#openrouter_group_models').prop('checked', oai_settings.openrouter_group_models);
|
||||
$('#squash_system_messages').prop('checked', oai_settings.squash_system_messages);
|
||||
$('#continue_prefill').prop('checked', oai_settings.continue_prefill);
|
||||
if (settings.impersonation_prompt !== undefined) oai_settings.impersonation_prompt = settings.impersonation_prompt;
|
||||
|
||||
$('#impersonation_prompt_textarea').val(oai_settings.impersonation_prompt);
|
||||
@@ -2466,7 +2531,13 @@ async function getStatusOpen() {
|
||||
validateReverseProxy();
|
||||
}
|
||||
|
||||
const canBypass = oai_settings.chat_completion_source === chat_completion_sources.OPENAI && oai_settings.bypass_status_check;
|
||||
if (oai_settings.chat_completion_source === chat_completion_sources.CUSTOM) {
|
||||
$('#model_custom_select').empty();
|
||||
data.custom_url = oai_settings.custom_url;
|
||||
data.custom_include_headers = oai_settings.custom_include_headers;
|
||||
}
|
||||
|
||||
const canBypass = (oai_settings.chat_completion_source === chat_completion_sources.OPENAI && oai_settings.bypass_status_check) || oai_settings.chat_completion_source === chat_completion_sources.CUSTOM;
|
||||
if (canBypass) {
|
||||
setOnlineStatus('Status check bypassed');
|
||||
}
|
||||
@@ -2533,6 +2604,11 @@ async function saveOpenAIPreset(name, settings, triggerUi = true) {
|
||||
openrouter_sort_models: settings.openrouter_sort_models,
|
||||
ai21_model: settings.ai21_model,
|
||||
mistralai_model: settings.mistralai_model,
|
||||
custom_model: settings.custom_model,
|
||||
custom_url: settings.custom_url,
|
||||
custom_include_body: settings.custom_include_body,
|
||||
custom_exclude_body: settings.custom_exclude_body,
|
||||
custom_include_headers: settings.custom_include_headers,
|
||||
google_model: settings.google_model,
|
||||
temperature: settings.temp_openai,
|
||||
frequency_penalty: settings.freq_pen_openai,
|
||||
@@ -2566,12 +2642,16 @@ async function saveOpenAIPreset(name, settings, triggerUi = true) {
|
||||
api_url_scale: settings.api_url_scale,
|
||||
show_external_models: settings.show_external_models,
|
||||
assistant_prefill: settings.assistant_prefill,
|
||||
human_sysprompt_message: settings.human_sysprompt_message,
|
||||
use_ai21_tokenizer: settings.use_ai21_tokenizer,
|
||||
use_google_tokenizer: settings.use_google_tokenizer,
|
||||
exclude_assistant: settings.exclude_assistant,
|
||||
claude_use_sysprompt: settings.claude_use_sysprompt,
|
||||
use_alt_scale: settings.use_alt_scale,
|
||||
squash_system_messages: settings.squash_system_messages,
|
||||
image_inlining: settings.image_inlining,
|
||||
bypass_status_check: settings.bypass_status_check,
|
||||
continue_prefill: settings.continue_prefill,
|
||||
seed: settings.seed,
|
||||
};
|
||||
|
||||
@@ -2905,6 +2985,11 @@ function onSettingsPresetChange() {
|
||||
openrouter_sort_models: ['#openrouter_sort_models', 'openrouter_sort_models', false],
|
||||
ai21_model: ['#model_ai21_select', 'ai21_model', false],
|
||||
mistralai_model: ['#model_mistralai_select', 'mistralai_model', false],
|
||||
custom_model: ['#custom_model_id', 'custom_model', false],
|
||||
custom_url: ['#custom_api_url_text', 'custom_url', false],
|
||||
custom_include_body: ['#custom_include_body', 'custom_include_body', false],
|
||||
custom_exclude_body: ['#custom_exclude_body', 'custom_exclude_body', false],
|
||||
custom_include_headers: ['#custom_include_headers', 'custom_include_headers', false],
|
||||
google_model: ['#model_google_select', 'google_model', false],
|
||||
openai_max_context: ['#openai_max_context', 'openai_max_context', false],
|
||||
openai_max_tokens: ['#openai_max_tokens', 'openai_max_tokens', false],
|
||||
@@ -2929,12 +3014,15 @@ function onSettingsPresetChange() {
|
||||
show_external_models: ['#openai_show_external_models', 'show_external_models', true],
|
||||
proxy_password: ['#openai_proxy_password', 'proxy_password', false],
|
||||
assistant_prefill: ['#claude_assistant_prefill', 'assistant_prefill', false],
|
||||
human_sysprompt_message: ['#claude_human_sysprompt_textarea', 'human_sysprompt_message', false],
|
||||
use_ai21_tokenizer: ['#use_ai21_tokenizer', 'use_ai21_tokenizer', true],
|
||||
use_google_tokenizer: ['#use_google_tokenizer', 'use_google_tokenizer', true],
|
||||
exclude_assistant: ['#exclude_assistant', 'exclude_assistant', true],
|
||||
claude_use_sysprompt: ['#claude_use_sysprompt', 'claude_use_sysprompt', true],
|
||||
use_alt_scale: ['#use_alt_scale', 'use_alt_scale', true],
|
||||
squash_system_messages: ['#squash_system_messages', 'squash_system_messages', true],
|
||||
image_inlining: ['#openai_image_inlining', 'image_inlining', true],
|
||||
continue_prefill: ['#continue_prefill', 'continue_prefill', true],
|
||||
seed: ['#seed_openai', 'seed', false],
|
||||
};
|
||||
|
||||
@@ -3094,6 +3182,12 @@ async function onModelChange() {
|
||||
oai_settings.mistralai_model = value;
|
||||
}
|
||||
|
||||
if (value && $(this).is('#model_custom_select')) {
|
||||
console.log('Custom model changed to', value);
|
||||
oai_settings.custom_model = value;
|
||||
$('#custom_model_id').val(value).trigger('input');
|
||||
}
|
||||
|
||||
if (oai_settings.chat_completion_source == chat_completion_sources.SCALE) {
|
||||
if (oai_settings.max_context_unlocked) {
|
||||
$('#openai_max_context').attr('max', unlocked_max);
|
||||
@@ -3241,6 +3335,12 @@ async function onModelChange() {
|
||||
$('#top_k_openai').attr('max', 200).val(oai_settings.top_k_openai).trigger('input');
|
||||
}
|
||||
|
||||
if (oai_settings.chat_completion_source == chat_completion_sources.CUSTOM) {
|
||||
$('#openai_max_context').attr('max', unlocked_max);
|
||||
oai_settings.openai_max_context = Math.min(Number($('#openai_max_context').attr('max')), oai_settings.openai_max_context);
|
||||
$('#openai_max_context').val(oai_settings.openai_max_context).trigger('input');
|
||||
}
|
||||
|
||||
$('#openai_max_context_counter').attr('max', Number($('#openai_max_context').attr('max')));
|
||||
|
||||
saveSettingsDebounced();
|
||||
@@ -3255,7 +3355,7 @@ async function onNewPresetClick() {
|
||||
const popupText = `
|
||||
<h3>Preset name:</h3>
|
||||
<h4>Hint: Use a character/group name to bind preset to a specific chat.</h4>`;
|
||||
const name = await callPopup(popupText, 'input');
|
||||
const name = await callPopup(popupText, 'input', oai_settings.preset_settings_openai);
|
||||
|
||||
if (!name) {
|
||||
return;
|
||||
@@ -3383,6 +3483,19 @@ async function onConnectButtonClick(e) {
|
||||
}
|
||||
}
|
||||
|
||||
if (oai_settings.chat_completion_source == chat_completion_sources.CUSTOM) {
|
||||
const api_key_custom = String($('#api_key_custom').val()).trim();
|
||||
|
||||
if (api_key_custom.length) {
|
||||
await writeSecret(SECRET_KEYS.CUSTOM, api_key_custom);
|
||||
}
|
||||
|
||||
if (!oai_settings.custom_url) {
|
||||
console.log('No API URL saved for Custom');
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
startStatusLoading();
|
||||
saveSettingsDebounced();
|
||||
await getStatusOpen();
|
||||
@@ -3418,6 +3531,9 @@ function toggleChatCompletionForms() {
|
||||
else if (oai_settings.chat_completion_source == chat_completion_sources.MISTRALAI) {
|
||||
$('#model_mistralai_select').trigger('change');
|
||||
}
|
||||
else if (oai_settings.chat_completion_source == chat_completion_sources.CUSTOM) {
|
||||
$('#model_custom_select').trigger('change');
|
||||
}
|
||||
$('[data-source]').each(function () {
|
||||
const validSources = $(this).data('source').split(',');
|
||||
$(this).toggle(validSources.includes(oai_settings.chat_completion_source));
|
||||
@@ -3425,6 +3541,7 @@ function toggleChatCompletionForms() {
|
||||
|
||||
if (chat_completion_sources.CLAUDE == oai_settings.chat_completion_source) {
|
||||
$('#claude_assistant_prefill_block').toggle(!oai_settings.exclude_assistant);
|
||||
$('#claude_human_sysprompt_message_block').toggle(oai_settings.claude_use_sysprompt);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -3468,6 +3585,42 @@ function updateScaleForm() {
|
||||
}
|
||||
}
|
||||
|
||||
function onCustomizeParametersClick() {
|
||||
const template = $(`
|
||||
<div class="flex-container flexFlowColumn height100p">
|
||||
<h3>Additional Parameters</h3>
|
||||
<div class="flex1 flex-container flexFlowColumn">
|
||||
<h4>Include Body Parameters</h4>
|
||||
<textarea id="custom_include_body" class="flex1" placeholder="Parameters to be included in the Chat Completion request body (YAML object) Example: - top_k: 20 - repetition_penalty: 1.1"></textarea>
|
||||
</div>
|
||||
<div class="flex1 flex-container flexFlowColumn">
|
||||
<h4>Exclude Body Parameters</h4>
|
||||
<textarea id="custom_exclude_body" class="flex1" placeholder="Parameters to be excluded from the Chat Completion request body (YAML array) Example: - frequency_penalty - presence_penalty"></textarea>
|
||||
</div>
|
||||
<div class="flex1 flex-container flexFlowColumn">
|
||||
<h4>Include Request Headers</h4>
|
||||
<textarea id="custom_include_headers" class="flex1" placeholder="Additional headers for Chat Completion requests (YAML object) Example: - CustomHeader: custom-value - AnotherHeader: custom-value"></textarea>
|
||||
</div>
|
||||
</div>`);
|
||||
|
||||
template.find('#custom_include_body').val(oai_settings.custom_include_body).on('input', function () {
|
||||
oai_settings.custom_include_body = String($(this).val());
|
||||
saveSettingsDebounced();
|
||||
});
|
||||
|
||||
template.find('#custom_exclude_body').val(oai_settings.custom_exclude_body).on('input', function () {
|
||||
oai_settings.custom_exclude_body = String($(this).val());
|
||||
saveSettingsDebounced();
|
||||
});
|
||||
|
||||
template.find('#custom_include_headers').val(oai_settings.custom_include_headers).on('input', function () {
|
||||
oai_settings.custom_include_headers = String($(this).val());
|
||||
saveSettingsDebounced();
|
||||
});
|
||||
|
||||
callPopup(template, 'text', '', { wide: true, large: true });
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if the model supports image inlining
|
||||
* @returns {boolean} True if the model supports image inlining
|
||||
@@ -3479,7 +3632,7 @@ export function isImageInliningSupported() {
|
||||
|
||||
const gpt4v = 'gpt-4-vision';
|
||||
const geminiProV = 'gemini-pro-vision';
|
||||
const llava13b = 'llava-13b';
|
||||
const llava = 'llava';
|
||||
|
||||
if (!oai_settings.image_inlining) {
|
||||
return false;
|
||||
@@ -3491,7 +3644,9 @@ export function isImageInliningSupported() {
|
||||
case chat_completion_sources.MAKERSUITE:
|
||||
return oai_settings.google_model.includes(geminiProV);
|
||||
case chat_completion_sources.OPENROUTER:
|
||||
return oai_settings.openrouter_model.includes(gpt4v) || oai_settings.openrouter_model.includes(llava13b);
|
||||
return !oai_settings.openrouter_force_instruct && (oai_settings.openrouter_model.includes(gpt4v) || oai_settings.openrouter_model.includes(llava));
|
||||
case chat_completion_sources.CUSTOM:
|
||||
return oai_settings.custom_model.includes(gpt4v) || oai_settings.custom_model.includes(llava) || oai_settings.custom_model.includes(geminiProV);
|
||||
default:
|
||||
return false;
|
||||
}
|
||||
@@ -3585,6 +3740,12 @@ $(document).ready(async function () {
|
||||
saveSettingsDebounced();
|
||||
});
|
||||
|
||||
$('#claude_use_sysprompt').on('change', function () {
|
||||
oai_settings.claude_use_sysprompt = !!$('#claude_use_sysprompt').prop('checked');
|
||||
$('#claude_human_sysprompt_message_block').toggle(oai_settings.claude_use_sysprompt);
|
||||
saveSettingsDebounced();
|
||||
});
|
||||
|
||||
$('#names_in_completion').on('change', function () {
|
||||
oai_settings.names_in_completion = !!$('#names_in_completion').prop('checked');
|
||||
saveSettingsDebounced();
|
||||
@@ -3658,6 +3819,12 @@ $(document).ready(async function () {
|
||||
saveSettingsDebounced();
|
||||
});
|
||||
|
||||
$('#claude_human_sysprompt_message_restore').on('click', function () {
|
||||
oai_settings.human_sysprompt_message = default_claude_human_sysprompt_message;
|
||||
$('#claude_human_sysprompt_textarea').val(oai_settings.human_sysprompt_message);
|
||||
saveSettingsDebounced();
|
||||
});
|
||||
|
||||
$('#newgroupchat_prompt_restore').on('click', function () {
|
||||
oai_settings.new_group_chat_prompt = default_new_group_chat_prompt;
|
||||
$('#newgroupchat_prompt_textarea').val(oai_settings.new_group_chat_prompt);
|
||||
@@ -3745,6 +3912,11 @@ $(document).ready(async function () {
|
||||
saveSettingsDebounced();
|
||||
});
|
||||
|
||||
$('#claude_human_sysprompt_textarea').on('input', function () {
|
||||
oai_settings.human_sysprompt_message = String($('#claude_human_sysprompt_textarea').val());
|
||||
saveSettingsDebounced();
|
||||
});
|
||||
|
||||
$('#openrouter_use_fallback').on('input', function () {
|
||||
oai_settings.openrouter_use_fallback = !!$(this).prop('checked');
|
||||
saveSettingsDebounced();
|
||||
@@ -3775,11 +3947,26 @@ $(document).ready(async function () {
|
||||
saveSettingsDebounced();
|
||||
});
|
||||
|
||||
$('#continue_prefill').on('input', function () {
|
||||
oai_settings.continue_prefill = !!$(this).prop('checked');
|
||||
saveSettingsDebounced();
|
||||
});
|
||||
|
||||
$('#seed_openai').on('input', function () {
|
||||
oai_settings.seed = Number($(this).val());
|
||||
saveSettingsDebounced();
|
||||
});
|
||||
|
||||
$('#custom_api_url_text').on('input', function () {
|
||||
oai_settings.custom_url = String($(this).val());
|
||||
saveSettingsDebounced();
|
||||
});
|
||||
|
||||
$('#custom_model_id').on('input', function () {
|
||||
oai_settings.custom_model = String($(this).val());
|
||||
saveSettingsDebounced();
|
||||
});
|
||||
|
||||
$(document).on('input', '#openai_settings .autoSetHeight', function () {
|
||||
resetScrollHeight($(this));
|
||||
});
|
||||
@@ -3796,6 +3983,7 @@ $(document).ready(async function () {
|
||||
$('#openrouter_sort_models').on('change', onOpenrouterModelSortChange);
|
||||
$('#model_ai21_select').on('change', onModelChange);
|
||||
$('#model_mistralai_select').on('change', onModelChange);
|
||||
$('#model_custom_select').on('change', onModelChange);
|
||||
$('#settings_preset_openai').on('change', onSettingsPresetChange);
|
||||
$('#new_oai_preset').on('click', onNewPresetClick);
|
||||
$('#delete_oai_preset').on('click', onDeletePresetClick);
|
||||
@@ -3810,4 +3998,5 @@ $(document).ready(async function () {
|
||||
$('#openai_logit_bias_delete_preset').on('click', onLogitBiasPresetDeleteClick);
|
||||
$('#import_oai_preset').on('click', onImportPresetClick);
|
||||
$('#openai_proxy_password_show').on('click', onProxyPasswordShowClick);
|
||||
$('#customize_additional_parameters').on('click', onCustomizeParametersClick);
|
||||
});
|
||||
|
@@ -193,6 +193,22 @@ export function autoSelectPersona(name) {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Updates the name of a persona if it exists.
|
||||
* @param {string} avatarId User avatar id
|
||||
* @param {string} newName New name for the persona
|
||||
*/
|
||||
export async function updatePersonaNameIfExists(avatarId, newName) {
|
||||
if (avatarId in power_user.personas) {
|
||||
power_user.personas[avatarId] = newName;
|
||||
await getUserAvatars();
|
||||
saveSettingsDebounced();
|
||||
console.log(`Updated persona name for ${avatarId} to ${newName}`);
|
||||
} else {
|
||||
console.log(`Persona name ${avatarId} was not updated because it does not exist`);
|
||||
}
|
||||
}
|
||||
|
||||
async function bindUserNameToPersona() {
|
||||
const avatarId = $(this).closest('.avatar-container').find('.avatar').attr('imgfile');
|
||||
|
||||
|
@@ -36,6 +36,7 @@ import {
|
||||
import { registerSlashCommand } from './slash-commands.js';
|
||||
import { tags } from './tags.js';
|
||||
import { tokenizers } from './tokenizers.js';
|
||||
import { BIAS_CACHE } from './logit-bias.js';
|
||||
|
||||
import { countOccurrences, debounce, delay, isOdd, resetScrollHeight, shuffle, sortMoments, stringToRange, timestampToMoment } from './utils.js';
|
||||
|
||||
@@ -204,6 +205,7 @@ let power_user = {
|
||||
names: false,
|
||||
names_force_groups: true,
|
||||
activation_regex: '',
|
||||
bind_to_context: false,
|
||||
},
|
||||
|
||||
default_context: 'Default',
|
||||
@@ -1717,17 +1719,18 @@ function loadContextSettings() {
|
||||
}
|
||||
});
|
||||
|
||||
// Select matching instruct preset
|
||||
for (const instruct_preset of instruct_presets) {
|
||||
// If instruct preset matches the context template
|
||||
if (instruct_preset.name === name) {
|
||||
selectInstructPreset(instruct_preset.name);
|
||||
break;
|
||||
if (power_user.instruct.bind_to_context) {
|
||||
// Select matching instruct preset
|
||||
for (const instruct_preset of instruct_presets) {
|
||||
// If instruct preset matches the context template
|
||||
if (instruct_preset.name === name) {
|
||||
selectInstructPreset(instruct_preset.name);
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
highlightDefaultContext();
|
||||
|
||||
saveSettingsDebounced();
|
||||
});
|
||||
|
||||
@@ -2932,6 +2935,7 @@ $(document).ready(() => {
|
||||
$('#tokenizer').on('change', function () {
|
||||
const value = $(this).find(':selected').val();
|
||||
power_user.tokenizer = Number(value);
|
||||
BIAS_CACHE.clear();
|
||||
saveSettingsDebounced();
|
||||
|
||||
// Trigger character editor re-tokenize
|
||||
|
@@ -303,8 +303,14 @@ class PresetManager {
|
||||
'model_novel',
|
||||
'streaming_kobold',
|
||||
'enabled',
|
||||
'bind_to_context',
|
||||
'seed',
|
||||
'legacy_api',
|
||||
'mancer_model',
|
||||
'togetherai_model',
|
||||
'ollama_model',
|
||||
'server_urls',
|
||||
'type',
|
||||
];
|
||||
const settings = Object.assign({}, getSettingsByApiId(this.apiId));
|
||||
|
||||
@@ -323,7 +329,7 @@ class PresetManager {
|
||||
}
|
||||
|
||||
async deleteCurrentPreset() {
|
||||
const { preset_names } = this.getPresetList();
|
||||
const { preset_names, presets } = this.getPresetList();
|
||||
const value = this.getSelectedPreset();
|
||||
const nameToDelete = this.getSelectedPresetName();
|
||||
|
||||
@@ -335,7 +341,9 @@ class PresetManager {
|
||||
$(this.select).find(`option[value="${value}"]`).remove();
|
||||
|
||||
if (this.isKeyedApi()) {
|
||||
preset_names.splice(preset_names.indexOf(value), 1);
|
||||
const index = preset_names.indexOf(nameToDelete);
|
||||
preset_names.splice(index, 1);
|
||||
presets.splice(index, 1);
|
||||
} else {
|
||||
delete preset_names[nameToDelete];
|
||||
}
|
||||
|
@@ -15,6 +15,8 @@ export const SECRET_KEYS = {
|
||||
MAKERSUITE: 'api_key_makersuite',
|
||||
SERPAPI: 'api_key_serpapi',
|
||||
MISTRALAI: 'api_key_mistralai',
|
||||
TOGETHERAI: 'api_key_togetherai',
|
||||
CUSTOM: 'api_key_custom',
|
||||
};
|
||||
|
||||
const INPUT_MAP = {
|
||||
@@ -31,6 +33,8 @@ const INPUT_MAP = {
|
||||
[SECRET_KEYS.APHRODITE]: '#api_key_aphrodite',
|
||||
[SECRET_KEYS.TABBY]: '#api_key_tabby',
|
||||
[SECRET_KEYS.MISTRALAI]: '#api_key_mistralai',
|
||||
[SECRET_KEYS.CUSTOM]: '#api_key_custom',
|
||||
[SECRET_KEYS.TOGETHERAI]: '#api_key_togetherai',
|
||||
};
|
||||
|
||||
async function clearSecret() {
|
||||
|
@@ -20,7 +20,7 @@ import {
|
||||
main_api,
|
||||
name1,
|
||||
reloadCurrentChat,
|
||||
replaceBiasMarkup,
|
||||
removeMacros,
|
||||
saveChatConditional,
|
||||
sendMessageAsUser,
|
||||
sendSystemMessage,
|
||||
@@ -842,6 +842,38 @@ async function unhideMessageCallback(_, arg) {
|
||||
return '';
|
||||
}
|
||||
|
||||
/**
|
||||
* Copium for running group actions when the member is offscreen.
|
||||
* @param {number} chid - character ID
|
||||
* @param {string} action - one of 'enable', 'disable', 'up', 'down', 'view', 'remove'
|
||||
* @returns {void}
|
||||
*/
|
||||
function performGroupMemberAction(chid, action) {
|
||||
const memberSelector = `.group_member[chid="${chid}"]`;
|
||||
// Do not optimize. Paginator gets recreated on every action
|
||||
const paginationSelector = '#rm_group_members_pagination';
|
||||
const pageSizeSelector = '#rm_group_members_pagination select';
|
||||
let wasOffscreen = false;
|
||||
let paginationValue = null;
|
||||
let pageValue = null;
|
||||
|
||||
if ($(memberSelector).length === 0) {
|
||||
wasOffscreen = true;
|
||||
paginationValue = Number($(pageSizeSelector).val());
|
||||
pageValue = $(paginationSelector).pagination('getCurrentPageNum');
|
||||
$(pageSizeSelector).val($(pageSizeSelector).find('option').last().val()).trigger('change');
|
||||
}
|
||||
|
||||
$(memberSelector).find(`[data-action="${action}"]`).trigger('click');
|
||||
|
||||
if (wasOffscreen) {
|
||||
$(pageSizeSelector).val(paginationValue).trigger('change');
|
||||
if ($(paginationSelector).length) {
|
||||
$(paginationSelector).pagination('go', pageValue);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async function disableGroupMemberCallback(_, arg) {
|
||||
if (!selected_group) {
|
||||
toastr.warning('Cannot run /disable command outside of a group chat.');
|
||||
@@ -855,7 +887,7 @@ async function disableGroupMemberCallback(_, arg) {
|
||||
return '';
|
||||
}
|
||||
|
||||
$(`.group_member[chid="${chid}"] [data-action="disable"]`).trigger('click');
|
||||
performGroupMemberAction(chid, 'disable');
|
||||
return '';
|
||||
}
|
||||
|
||||
@@ -872,7 +904,7 @@ async function enableGroupMemberCallback(_, arg) {
|
||||
return '';
|
||||
}
|
||||
|
||||
$(`.group_member[chid="${chid}"] [data-action="enable"]`).trigger('click');
|
||||
performGroupMemberAction(chid, 'enable');
|
||||
return '';
|
||||
}
|
||||
|
||||
@@ -889,7 +921,7 @@ async function moveGroupMemberUpCallback(_, arg) {
|
||||
return '';
|
||||
}
|
||||
|
||||
$(`.group_member[chid="${chid}"] [data-action="up"]`).trigger('click');
|
||||
performGroupMemberAction(chid, 'up');
|
||||
return '';
|
||||
}
|
||||
|
||||
@@ -906,7 +938,7 @@ async function moveGroupMemberDownCallback(_, arg) {
|
||||
return '';
|
||||
}
|
||||
|
||||
$(`.group_member[chid="${chid}"] [data-action="down"]`).trigger('click');
|
||||
performGroupMemberAction(chid, 'down');
|
||||
return '';
|
||||
}
|
||||
|
||||
@@ -928,7 +960,7 @@ async function peekCallback(_, arg) {
|
||||
return '';
|
||||
}
|
||||
|
||||
$(`.group_member[chid="${chid}"] [data-action="view"]`).trigger('click');
|
||||
performGroupMemberAction(chid, 'view');
|
||||
return '';
|
||||
}
|
||||
|
||||
@@ -950,7 +982,7 @@ async function removeGroupMemberCallback(_, arg) {
|
||||
return '';
|
||||
}
|
||||
|
||||
$(`.group_member[chid="${chid}"] [data-action="remove"]`).trigger('click');
|
||||
performGroupMemberAction(chid, 'remove');
|
||||
return '';
|
||||
}
|
||||
|
||||
@@ -1228,7 +1260,7 @@ export async function sendMessageAs(args, text) {
|
||||
|
||||
// Messages that do nothing but set bias will be hidden from the context
|
||||
const bias = extractMessageBias(mesText);
|
||||
const isSystem = replaceBiasMarkup(mesText).trim().length === 0;
|
||||
const isSystem = bias && !removeMacros(mesText).length;
|
||||
|
||||
const character = characters.find(x => x.name === name);
|
||||
let force_avatar, original_avatar;
|
||||
@@ -1281,7 +1313,7 @@ export async function sendNarratorMessage(args, text) {
|
||||
const name = chat_metadata[NARRATOR_NAME_KEY] || NARRATOR_NAME_DEFAULT;
|
||||
// Messages that do nothing but set bias will be hidden from the context
|
||||
const bias = extractMessageBias(text);
|
||||
const isSystem = replaceBiasMarkup(text).trim().length === 0;
|
||||
const isSystem = bias && !removeMacros(text).length;
|
||||
|
||||
const message = {
|
||||
name: name,
|
||||
|
@@ -15,7 +15,7 @@
|
||||
<h3>Confused or lost?</h3>
|
||||
<ul>
|
||||
<li>
|
||||
<span class="note-link-span">?</span> - click these icons!
|
||||
<span class="note-link-span"><a class="fa-solid fa-circle-question" target="_blank" href="https://docs.sillytavern.app/"></a></span> - click these icons!
|
||||
</li>
|
||||
<li>
|
||||
Enter <code>/?</code> in the chat bar
|
||||
|
202
public/scripts/textgen-models.js
Normal file
202
public/scripts/textgen-models.js
Normal file
@@ -0,0 +1,202 @@
|
||||
import { callPopup, getRequestHeaders, setGenerationParamsFromPreset } from '../script.js';
|
||||
import { isMobile } from './RossAscends-mods.js';
|
||||
import { textgenerationwebui_settings as textgen_settings, textgen_types } from './textgen-settings.js';
|
||||
|
||||
let mancerModels = [];
|
||||
let togetherModels = [];
|
||||
|
||||
export async function loadOllamaModels(data) {
|
||||
if (!Array.isArray(data)) {
|
||||
console.error('Invalid Ollama models data', data);
|
||||
return;
|
||||
}
|
||||
|
||||
if (!data.find(x => x.id === textgen_settings.ollama_model)) {
|
||||
textgen_settings.ollama_model = data[0]?.id || '';
|
||||
}
|
||||
|
||||
$('#ollama_model').empty();
|
||||
for (const model of data) {
|
||||
const option = document.createElement('option');
|
||||
option.value = model.id;
|
||||
option.text = model.name;
|
||||
option.selected = model.id === textgen_settings.ollama_model;
|
||||
$('#ollama_model').append(option);
|
||||
}
|
||||
}
|
||||
|
||||
export async function loadTogetherAIModels(data) {
|
||||
if (!Array.isArray(data)) {
|
||||
console.error('Invalid Together AI models data', data);
|
||||
return;
|
||||
}
|
||||
|
||||
togetherModels = data;
|
||||
|
||||
if (!data.find(x => x.name === textgen_settings.togetherai_model)) {
|
||||
textgen_settings.togetherai_model = data[0]?.name || '';
|
||||
}
|
||||
|
||||
$('#model_togetherai_select').empty();
|
||||
for (const model of data) {
|
||||
// Hey buddy, I think you've got the wrong door.
|
||||
if (model.display_type === 'image') {
|
||||
continue;
|
||||
}
|
||||
|
||||
const option = document.createElement('option');
|
||||
option.value = model.name;
|
||||
option.text = model.display_name;
|
||||
option.selected = model.name === textgen_settings.togetherai_model;
|
||||
$('#model_togetherai_select').append(option);
|
||||
}
|
||||
}
|
||||
|
||||
export async function loadMancerModels(data) {
|
||||
if (!Array.isArray(data)) {
|
||||
console.error('Invalid Mancer models data', data);
|
||||
return;
|
||||
}
|
||||
|
||||
mancerModels = data;
|
||||
|
||||
if (!data.find(x => x.id === textgen_settings.mancer_model)) {
|
||||
textgen_settings.mancer_model = data[0]?.id || '';
|
||||
}
|
||||
|
||||
$('#mancer_model').empty();
|
||||
for (const model of data) {
|
||||
const option = document.createElement('option');
|
||||
option.value = model.id;
|
||||
option.text = model.name;
|
||||
option.selected = model.id === textgen_settings.mancer_model;
|
||||
$('#mancer_model').append(option);
|
||||
}
|
||||
}
|
||||
|
||||
function onMancerModelSelect() {
|
||||
const modelId = String($('#mancer_model').val());
|
||||
textgen_settings.mancer_model = modelId;
|
||||
$('#api_button_textgenerationwebui').trigger('click');
|
||||
|
||||
const limits = mancerModels.find(x => x.id === modelId)?.limits;
|
||||
setGenerationParamsFromPreset({ max_length: limits.context, genamt: limits.completion });
|
||||
}
|
||||
|
||||
function onTogetherModelSelect() {
|
||||
const modelName = String($('#model_togetherai_select').val());
|
||||
textgen_settings.togetherai_model = modelName;
|
||||
$('#api_button_textgenerationwebui').trigger('click');
|
||||
const model = togetherModels.find(x => x.name === modelName);
|
||||
setGenerationParamsFromPreset({ max_length: model.context_length });
|
||||
}
|
||||
|
||||
function onOllamaModelSelect() {
|
||||
const modelId = String($('#ollama_model').val());
|
||||
textgen_settings.ollama_model = modelId;
|
||||
$('#api_button_textgenerationwebui').trigger('click');
|
||||
}
|
||||
|
||||
function getMancerModelTemplate(option) {
|
||||
const model = mancerModels.find(x => x.id === option?.element?.value);
|
||||
|
||||
if (!option.id || !model) {
|
||||
return option.text;
|
||||
}
|
||||
|
||||
const creditsPerPrompt = (model.limits?.context - model.limits?.completion) * model.pricing?.prompt;
|
||||
const creditsPerCompletion = model.limits?.completion * model.pricing?.completion;
|
||||
const creditsTotal = Math.round(creditsPerPrompt + creditsPerCompletion).toFixed(0);
|
||||
|
||||
return $((`
|
||||
<div class="flex-container flexFlowColumn">
|
||||
<div><strong>${DOMPurify.sanitize(model.name)}</strong> | <span>${model.limits?.context} ctx</span> / <span>${model.limits?.completion} res</span> | <small>Credits per request (max): ${creditsTotal}</small></div>
|
||||
</div>
|
||||
`));
|
||||
}
|
||||
|
||||
function getTogetherModelTemplate(option) {
|
||||
const model = togetherModels.find(x => x.name === option?.element?.value);
|
||||
|
||||
if (!option.id || !model) {
|
||||
return option.text;
|
||||
}
|
||||
|
||||
return $((`
|
||||
<div class="flex-container flexFlowColumn">
|
||||
<div><strong>${DOMPurify.sanitize(model.name)}</strong> | <span>${model.context_length || '???'} tokens</span></div>
|
||||
<div><small>${DOMPurify.sanitize(model.description)}</small></div>
|
||||
</div>
|
||||
`));
|
||||
}
|
||||
|
||||
async function downloadOllamaModel() {
|
||||
try {
|
||||
const serverUrl = textgen_settings.server_urls[textgen_types.OLLAMA];
|
||||
|
||||
if (!serverUrl) {
|
||||
toastr.info('Please connect to an Ollama server first.');
|
||||
return;
|
||||
}
|
||||
|
||||
const html = `Enter a model tag, for example <code>llama2:latest</code>.<br>
|
||||
See <a target="_blank" href="https://ollama.ai/library">Library</a> for available models.`;
|
||||
const name = await callPopup(html, 'input', '', { okButton: 'Download' });
|
||||
|
||||
if (!name) {
|
||||
return;
|
||||
}
|
||||
|
||||
toastr.info('Download may take a while, please wait...', 'Working on it');
|
||||
|
||||
const response = await fetch('/api/backends/text-completions/ollama/download', {
|
||||
method: 'POST',
|
||||
headers: getRequestHeaders(),
|
||||
body: JSON.stringify({
|
||||
name: name,
|
||||
api_server: serverUrl,
|
||||
}),
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error(response.statusText);
|
||||
}
|
||||
|
||||
// Force refresh the model list
|
||||
toastr.success('Download complete. Please select the model from the dropdown.');
|
||||
$('#api_button_textgenerationwebui').trigger('click');
|
||||
} catch (err) {
|
||||
console.error(err);
|
||||
toastr.error('Failed to download Ollama model. Please try again.');
|
||||
}
|
||||
}
|
||||
|
||||
jQuery(function () {
|
||||
$('#mancer_model').on('change', onMancerModelSelect);
|
||||
$('#model_togetherai_select').on('change', onTogetherModelSelect);
|
||||
$('#ollama_model').on('change', onOllamaModelSelect);
|
||||
$('#ollama_download_model').on('click', downloadOllamaModel);
|
||||
|
||||
if (!isMobile()) {
|
||||
$('#mancer_model').select2({
|
||||
placeholder: 'Select a model',
|
||||
searchInputPlaceholder: 'Search models...',
|
||||
searchInputCssClass: 'text_pole',
|
||||
width: '100%',
|
||||
templateResult: getMancerModelTemplate,
|
||||
});
|
||||
$('#model_togetherai_select').select2({
|
||||
placeholder: 'Select a model',
|
||||
searchInputPlaceholder: 'Search models...',
|
||||
searchInputCssClass: 'text_pole',
|
||||
width: '100%',
|
||||
templateResult: getTogetherModelTemplate,
|
||||
});
|
||||
$('#ollama_model').select2({
|
||||
placeholder: 'Select a model',
|
||||
searchInputPlaceholder: 'Search models...',
|
||||
searchInputCssClass: 'text_pole',
|
||||
width: '100%',
|
||||
});
|
||||
}
|
||||
});
|
@@ -1,5 +1,4 @@
|
||||
import {
|
||||
api_server_textgenerationwebui,
|
||||
getRequestHeaders,
|
||||
getStoppingStrings,
|
||||
max_context,
|
||||
@@ -9,6 +8,7 @@ import {
|
||||
setOnlineStatus,
|
||||
substituteParams,
|
||||
} from '../script.js';
|
||||
import { BIAS_CACHE, createNewLogitBiasEntry, displayLogitBias, getLogitBiasListResult } from './logit-bias.js';
|
||||
|
||||
import {
|
||||
power_user,
|
||||
@@ -31,15 +31,29 @@ export const textgen_types = {
|
||||
APHRODITE: 'aphrodite',
|
||||
TABBY: 'tabby',
|
||||
KOBOLDCPP: 'koboldcpp',
|
||||
TOGETHERAI: 'togetherai',
|
||||
LLAMACPP: 'llamacpp',
|
||||
OLLAMA: 'ollama',
|
||||
};
|
||||
|
||||
const { MANCER, APHRODITE } = textgen_types;
|
||||
const { MANCER, APHRODITE, TOGETHERAI, OOBA, OLLAMA, LLAMACPP } = textgen_types;
|
||||
const BIAS_KEY = '#textgenerationwebui_api-settings';
|
||||
|
||||
// Maybe let it be configurable in the future?
|
||||
// (7 days later) The future has come.
|
||||
const MANCER_SERVER_KEY = 'mancer_server';
|
||||
const MANCER_SERVER_DEFAULT = 'https://neuro.mancer.tech';
|
||||
export let MANCER_SERVER = localStorage.getItem(MANCER_SERVER_KEY) ?? MANCER_SERVER_DEFAULT;
|
||||
let MANCER_SERVER = localStorage.getItem(MANCER_SERVER_KEY) ?? MANCER_SERVER_DEFAULT;
|
||||
let TOGETHERAI_SERVER = 'https://api.together.xyz';
|
||||
|
||||
const SERVER_INPUTS = {
|
||||
[textgen_types.OOBA]: '#textgenerationwebui_api_url_text',
|
||||
[textgen_types.APHRODITE]: '#aphrodite_api_url_text',
|
||||
[textgen_types.TABBY]: '#tabby_api_url_text',
|
||||
[textgen_types.KOBOLDCPP]: '#koboldcpp_api_url_text',
|
||||
[textgen_types.LLAMACPP]: '#llamacpp_api_url_text',
|
||||
[textgen_types.OLLAMA]: '#ollama_api_url_text',
|
||||
};
|
||||
|
||||
const KOBOLDCPP_ORDER = [6, 0, 1, 3, 4, 2, 5];
|
||||
const settings = {
|
||||
@@ -89,9 +103,13 @@ const settings = {
|
||||
//prompt_log_probs_aphrodite: 0,
|
||||
type: textgen_types.OOBA,
|
||||
mancer_model: 'mytholite',
|
||||
togetherai_model: 'Gryphe/MythoMax-L2-13b',
|
||||
ollama_model: '',
|
||||
legacy_api: false,
|
||||
sampler_order: KOBOLDCPP_ORDER,
|
||||
logit_bias: [],
|
||||
n: 1,
|
||||
server_urls: {},
|
||||
};
|
||||
|
||||
export let textgenerationwebui_banned_in_macros = [];
|
||||
@@ -144,8 +162,40 @@ const setting_names = [
|
||||
//'prompt_log_probs_aphrodite'
|
||||
'sampler_order',
|
||||
'n',
|
||||
'logit_bias',
|
||||
];
|
||||
|
||||
export function validateTextGenUrl() {
|
||||
const selector = SERVER_INPUTS[settings.type];
|
||||
|
||||
if (!selector) {
|
||||
return;
|
||||
}
|
||||
|
||||
const control = $(selector);
|
||||
const url = String(control.val()).trim();
|
||||
const formattedUrl = formatTextGenURL(url);
|
||||
|
||||
if (!formattedUrl) {
|
||||
toastr.error('Enter a valid API URL', 'Text Completion API');
|
||||
return;
|
||||
}
|
||||
|
||||
control.val(formattedUrl);
|
||||
}
|
||||
|
||||
export function getTextGenServer() {
|
||||
if (settings.type === MANCER) {
|
||||
return MANCER_SERVER;
|
||||
}
|
||||
|
||||
if (settings.type === TOGETHERAI) {
|
||||
return TOGETHERAI_SERVER;
|
||||
}
|
||||
|
||||
return settings.server_urls[settings.type] ?? '';
|
||||
}
|
||||
|
||||
async function selectPreset(name) {
|
||||
const preset = textgenerationwebui_presets[textgenerationwebui_preset_names.indexOf(name)];
|
||||
|
||||
@@ -159,13 +209,15 @@ async function selectPreset(name) {
|
||||
setSettingByName(name, value, true);
|
||||
}
|
||||
setGenerationParamsFromPreset(preset);
|
||||
BIAS_CACHE.delete(BIAS_KEY);
|
||||
displayLogitBias(preset.logit_bias, BIAS_KEY);
|
||||
saveSettingsDebounced();
|
||||
}
|
||||
|
||||
function formatTextGenURL(value) {
|
||||
try {
|
||||
// Mancer doesn't need any formatting (it's hardcoded)
|
||||
if (settings.type === MANCER) {
|
||||
// Mancer/Together doesn't need any formatting (it's hardcoded)
|
||||
if (settings.type === MANCER || settings.type === TOGETHERAI) {
|
||||
return value;
|
||||
}
|
||||
|
||||
@@ -240,11 +292,62 @@ function getCustomTokenBans() {
|
||||
return result.filter(onlyUnique).map(x => String(x)).join(',');
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculates logit bias object from the logit bias list.
|
||||
* @returns {object} Logit bias object
|
||||
*/
|
||||
function calculateLogitBias() {
|
||||
if (!Array.isArray(settings.logit_bias) || settings.logit_bias.length === 0) {
|
||||
return {};
|
||||
}
|
||||
|
||||
const tokenizer = SENTENCEPIECE_TOKENIZERS.includes(power_user.tokenizer) ? power_user.tokenizer : tokenizers.LLAMA;
|
||||
const result = {};
|
||||
|
||||
/**
|
||||
* Adds bias to the logit bias object.
|
||||
* @param {number} bias
|
||||
* @param {number[]} sequence
|
||||
* @returns {object} Accumulated logit bias object
|
||||
*/
|
||||
function addBias(bias, sequence) {
|
||||
if (sequence.length === 0) {
|
||||
return;
|
||||
}
|
||||
|
||||
for (const logit of sequence) {
|
||||
const key = String(logit);
|
||||
result[key] = bias;
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
getLogitBiasListResult(settings.logit_bias, tokenizer, addBias);
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
function loadTextGenSettings(data, loadedSettings) {
|
||||
textgenerationwebui_presets = convertPresets(data.textgenerationwebui_presets);
|
||||
textgenerationwebui_preset_names = data.textgenerationwebui_preset_names ?? [];
|
||||
Object.assign(settings, loadedSettings.textgenerationwebui_settings ?? {});
|
||||
|
||||
if (loadedSettings.api_server_textgenerationwebui) {
|
||||
for (const type of Object.keys(SERVER_INPUTS)) {
|
||||
settings.server_urls[type] = loadedSettings.api_server_textgenerationwebui;
|
||||
}
|
||||
delete loadedSettings.api_server_textgenerationwebui;
|
||||
}
|
||||
|
||||
for (const [type, selector] of Object.entries(SERVER_INPUTS)) {
|
||||
const control = $(selector);
|
||||
control.val(settings.server_urls[type] ?? '').on('input', function () {
|
||||
settings.server_urls[type] = String($(this).val());
|
||||
saveSettingsDebounced();
|
||||
});
|
||||
}
|
||||
|
||||
if (loadedSettings.api_use_mancer_webui) {
|
||||
settings.type = MANCER;
|
||||
}
|
||||
@@ -267,6 +370,8 @@ function loadTextGenSettings(data, loadedSettings) {
|
||||
|
||||
$('#textgen_type').val(settings.type);
|
||||
showTypeSpecificControls(settings.type);
|
||||
BIAS_CACHE.delete(BIAS_KEY);
|
||||
displayLogitBias(settings.logit_bias, BIAS_KEY);
|
||||
//this is needed because showTypeSpecificControls() does not handle NOT declarations
|
||||
if (settings.type === textgen_types.APHRODITE) {
|
||||
$('[data-forAphro=False]').each(function () {
|
||||
@@ -288,19 +393,6 @@ function loadTextGenSettings(data, loadedSettings) {
|
||||
});
|
||||
}
|
||||
|
||||
export function getTextGenUrlSourceId() {
|
||||
switch (settings.type) {
|
||||
case textgen_types.OOBA:
|
||||
return '#textgenerationwebui_api_url_text';
|
||||
case textgen_types.APHRODITE:
|
||||
return '#aphrodite_api_url_text';
|
||||
case textgen_types.TABBY:
|
||||
return '#tabby_api_url_text';
|
||||
case textgen_types.KOBOLDCPP:
|
||||
return '#koboldcpp_api_url_text';
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Sorts the sampler items by the given order.
|
||||
* @param {any[]} orderArray Sampler order array.
|
||||
@@ -370,9 +462,13 @@ jQuery(function () {
|
||||
|
||||
showTypeSpecificControls(type);
|
||||
setOnlineStatus('no_connection');
|
||||
BIAS_CACHE.delete(BIAS_KEY);
|
||||
|
||||
$('#main_api').trigger('change');
|
||||
$('#api_button_textgenerationwebui').trigger('click');
|
||||
|
||||
if (!SERVER_INPUTS[type] || settings.server_urls[type]) {
|
||||
$('#api_button_textgenerationwebui').trigger('click');
|
||||
}
|
||||
|
||||
saveSettingsDebounced();
|
||||
});
|
||||
@@ -412,15 +508,20 @@ jQuery(function () {
|
||||
saveSettingsDebounced();
|
||||
});
|
||||
}
|
||||
|
||||
$('#textgen_logit_bias_new_entry').on('click', () => createNewLogitBiasEntry(settings.logit_bias, BIAS_KEY));
|
||||
});
|
||||
|
||||
function showTypeSpecificControls(type) {
|
||||
$('[data-tg-type]').each(function () {
|
||||
const tgType = $(this).attr('data-tg-type');
|
||||
if (tgType == type) {
|
||||
$(this).show();
|
||||
} else {
|
||||
$(this).hide();
|
||||
const tgTypes = $(this).attr('data-tg-type').split(',');
|
||||
for (const tgType of tgTypes) {
|
||||
if (tgType === type || tgType == 'all') {
|
||||
$(this).show();
|
||||
return;
|
||||
} else {
|
||||
$(this).hide();
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
@@ -437,6 +538,11 @@ function setSettingByName(setting, value, trigger) {
|
||||
return;
|
||||
}
|
||||
|
||||
if ('logit_bias' === setting) {
|
||||
settings.logit_bias = Array.isArray(value) ? value : [];
|
||||
return;
|
||||
}
|
||||
|
||||
const isCheckbox = $(`#${setting}_textgenerationwebui`).attr('type') == 'checkbox';
|
||||
const isText = $(`#${setting}_textgenerationwebui`).attr('type') == 'text' || $(`#${setting}_textgenerationwebui`).is('textarea');
|
||||
if (isCheckbox) {
|
||||
@@ -498,11 +604,11 @@ async function generateTextGenWithStreaming(generate_data, signal) {
|
||||
|
||||
let data = JSON.parse(value.data);
|
||||
|
||||
if (data?.choices[0]?.index > 0) {
|
||||
if (data?.choices?.[0]?.index > 0) {
|
||||
const swipeIndex = data.choices[0].index - 1;
|
||||
swipes[swipeIndex] = (swipes[swipeIndex] || '') + data.choices[0].text;
|
||||
} else {
|
||||
text += data?.choices[0]?.text || '';
|
||||
text += data?.choices?.[0]?.text || data?.content || '';
|
||||
}
|
||||
|
||||
yield { text, swipes };
|
||||
@@ -533,6 +639,11 @@ function tryParseStreamingError(response, decoded) {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Converts a string of comma-separated integers to an array of integers.
|
||||
* @param {string} string Input string
|
||||
* @returns {number[]} Array of integers
|
||||
*/
|
||||
function toIntArray(string) {
|
||||
if (!string) {
|
||||
return [];
|
||||
@@ -546,16 +657,29 @@ function getModel() {
|
||||
return settings.mancer_model;
|
||||
}
|
||||
|
||||
if (settings.type === TOGETHERAI) {
|
||||
return settings.togetherai_model;
|
||||
}
|
||||
|
||||
if (settings.type === APHRODITE) {
|
||||
return online_status;
|
||||
}
|
||||
|
||||
if (settings.type === OLLAMA) {
|
||||
if (!settings.ollama_model) {
|
||||
toastr.error('No Ollama model selected.', 'Text Completion API');
|
||||
throw new Error('No Ollama model selected');
|
||||
}
|
||||
|
||||
return settings.ollama_model;
|
||||
}
|
||||
|
||||
return undefined;
|
||||
}
|
||||
|
||||
export function getTextGenGenerationData(finalPrompt, maxTokens, isImpersonate, isContinue, cfgValues, type) {
|
||||
const canMultiSwipe = !isContinue && !isImpersonate && type !== 'quiet';
|
||||
let APIflags = {
|
||||
let params = {
|
||||
'prompt': finalPrompt,
|
||||
'model': getModel(),
|
||||
'max_new_tokens': maxTokens,
|
||||
@@ -590,15 +714,11 @@ export function getTextGenGenerationData(finalPrompt, maxTokens, isImpersonate,
|
||||
toIntArray(getCustomTokenBans()) :
|
||||
getCustomTokenBans(),
|
||||
'api_type': settings.type,
|
||||
'api_server': settings.type === MANCER ?
|
||||
MANCER_SERVER :
|
||||
api_server_textgenerationwebui,
|
||||
'legacy_api': settings.legacy_api && settings.type !== MANCER,
|
||||
'sampler_order': settings.type === textgen_types.KOBOLDCPP ?
|
||||
settings.sampler_order :
|
||||
undefined,
|
||||
'api_server': getTextGenServer(),
|
||||
'legacy_api': settings.legacy_api && (settings.type === OOBA || settings.type === APHRODITE),
|
||||
'sampler_order': settings.type === textgen_types.KOBOLDCPP ? settings.sampler_order : undefined,
|
||||
};
|
||||
let aphroditeExclusionFlags = {
|
||||
const nonAphroditeParams = {
|
||||
'repetition_penalty_range': settings.rep_pen_range,
|
||||
'encoder_repetition_penalty': settings.encoder_rep_pen,
|
||||
'no_repeat_ngram_size': settings.no_repeat_ngram_size,
|
||||
@@ -609,8 +729,15 @@ export function getTextGenGenerationData(finalPrompt, maxTokens, isImpersonate,
|
||||
'guidance_scale': cfgValues?.guidanceScale?.value ?? settings.guidance_scale ?? 1,
|
||||
'negative_prompt': cfgValues?.negativePrompt ?? substituteParams(settings.negative_prompt) ?? '',
|
||||
'grammar_string': settings.grammar_string,
|
||||
// llama.cpp aliases. In case someone wants to use LM Studio as Text Completion API
|
||||
'repeat_penalty': settings.rep_pen,
|
||||
'tfs_z': settings.tfs,
|
||||
'repeat_last_n': settings.rep_pen_range,
|
||||
'n_predict': settings.maxTokens,
|
||||
'mirostat': settings.mirostat_mode,
|
||||
'ignore_eos': settings.ban_eos_token,
|
||||
};
|
||||
let aphroditeFlags = {
|
||||
const aphroditeParams = {
|
||||
'n': canMultiSwipe ? settings.n : 1,
|
||||
'best_of': canMultiSwipe ? settings.n : 1,
|
||||
'ignore_eos': settings.ignore_eos_token_aphrodite,
|
||||
@@ -619,12 +746,33 @@ export function getTextGenGenerationData(finalPrompt, maxTokens, isImpersonate,
|
||||
//'logprobs': settings.log_probs_aphrodite,
|
||||
//'prompt_logprobs': settings.prompt_log_probs_aphrodite,
|
||||
};
|
||||
if (settings.type === textgen_types.APHRODITE) {
|
||||
APIflags = Object.assign(APIflags, aphroditeFlags);
|
||||
if (settings.type === APHRODITE) {
|
||||
params = Object.assign(params, aphroditeParams);
|
||||
} else {
|
||||
APIflags = Object.assign(APIflags, aphroditeExclusionFlags);
|
||||
params = Object.assign(params, nonAphroditeParams);
|
||||
}
|
||||
|
||||
return APIflags;
|
||||
if (Array.isArray(settings.logit_bias) && settings.logit_bias.length) {
|
||||
const logitBias = BIAS_CACHE.get(BIAS_KEY) || calculateLogitBias();
|
||||
BIAS_CACHE.set(BIAS_KEY, logitBias);
|
||||
params.logit_bias = logitBias;
|
||||
}
|
||||
|
||||
if (settings.type === LLAMACPP || settings.type === OLLAMA) {
|
||||
// Convert bias and token bans to array of arrays
|
||||
const logitBiasArray = (params.logit_bias && typeof params.logit_bias === 'object' && Object.keys(params.logit_bias).length > 0)
|
||||
? Object.entries(params.logit_bias).map(([key, value]) => [Number(key), value])
|
||||
: [];
|
||||
const tokenBans = toIntArray(getCustomTokenBans());
|
||||
logitBiasArray.push(...tokenBans.map(x => [Number(x), false]));
|
||||
const llamaCppParams = {
|
||||
'logit_bias': logitBiasArray,
|
||||
// Conflicts with ooba's grammar_string
|
||||
'grammar': settings.grammar_string,
|
||||
};
|
||||
params = Object.assign(params, llamaCppParams);
|
||||
}
|
||||
|
||||
return params;
|
||||
}
|
||||
|
||||
|
@@ -1,12 +1,12 @@
|
||||
import { characters, main_api, api_server, api_server_textgenerationwebui, nai_settings, online_status, this_chid } from '../script.js';
|
||||
import { characters, main_api, api_server, nai_settings, online_status, this_chid } from '../script.js';
|
||||
import { power_user, registerDebugFunction } from './power-user.js';
|
||||
import { chat_completion_sources, model_list, oai_settings } from './openai.js';
|
||||
import { groups, selected_group } from './group-chats.js';
|
||||
import { getStringHash } from './utils.js';
|
||||
import { kai_flags } from './kai-settings.js';
|
||||
import { textgen_types, textgenerationwebui_settings as textgen_settings } from './textgen-settings.js';
|
||||
import { textgen_types, textgenerationwebui_settings as textgen_settings, getTextGenServer } from './textgen-settings.js';
|
||||
|
||||
const { OOBA, TABBY, KOBOLDCPP, MANCER } = textgen_types;
|
||||
const { OOBA, TABBY, KOBOLDCPP, APHRODITE, LLAMACPP } = textgen_types;
|
||||
|
||||
export const CHARACTERS_PER_TOKEN_RATIO = 3.35;
|
||||
const TOKENIZER_WARNING_KEY = 'tokenizationWarningShown';
|
||||
@@ -190,7 +190,7 @@ export function getTokenizerBestMatch(forApi) {
|
||||
// - Tokenizer haven't reported an error previously
|
||||
const hasTokenizerError = sessionStorage.getItem(TOKENIZER_WARNING_KEY);
|
||||
const isConnected = online_status !== 'no_connection';
|
||||
const isTokenizerSupported = textgen_settings.type === OOBA || textgen_settings.type === TABBY || textgen_settings.type === KOBOLDCPP;
|
||||
const isTokenizerSupported = [OOBA, TABBY, KOBOLDCPP, LLAMACPP].includes(textgen_settings.type);
|
||||
|
||||
if (!hasTokenizerError && isConnected) {
|
||||
if (forApi === 'kobold' && kai_flags.can_use_tokenization) {
|
||||
@@ -388,6 +388,10 @@ export function getTokenizerModel() {
|
||||
return mistralTokenizer;
|
||||
}
|
||||
|
||||
if (oai_settings.chat_completion_source == chat_completion_sources.CUSTOM) {
|
||||
return oai_settings.custom_model;
|
||||
}
|
||||
|
||||
// Default to Turbo 3.5
|
||||
return turboTokenizer;
|
||||
}
|
||||
@@ -537,10 +541,8 @@ function getTextgenAPITokenizationParams(str) {
|
||||
return {
|
||||
text: str,
|
||||
api_type: textgen_settings.type,
|
||||
url: api_server_textgenerationwebui,
|
||||
legacy_api:
|
||||
textgen_settings.legacy_api &&
|
||||
textgen_settings.type !== MANCER,
|
||||
url: getTextGenServer(),
|
||||
legacy_api: textgen_settings.legacy_api && (textgen_settings.type === OOBA || textgen_settings.type === APHRODITE),
|
||||
};
|
||||
}
|
||||
|
||||
|
@@ -637,6 +637,9 @@ hr {
|
||||
order: 2;
|
||||
padding-right: 2px;
|
||||
place-self: center;
|
||||
cursor: pointer;
|
||||
transition: 0.3s;
|
||||
opacity: 0.7;
|
||||
}
|
||||
|
||||
#options_button {
|
||||
@@ -3448,30 +3451,30 @@ a {
|
||||
height: 100%;
|
||||
}
|
||||
|
||||
.novelai_logit_bias_form {
|
||||
.logit_bias_form {
|
||||
display: flex;
|
||||
flex-direction: row;
|
||||
column-gap: 10px;
|
||||
align-items: center;
|
||||
}
|
||||
|
||||
.novelai_logit_bias_text,
|
||||
.novelai_logit_bias_value {
|
||||
.logit_bias_text,
|
||||
.logit_bias_value {
|
||||
flex: 1;
|
||||
}
|
||||
|
||||
.novelai_logit_bias_list {
|
||||
.logit_bias_list {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
gap: 10px;
|
||||
}
|
||||
|
||||
.novelai_logit_bias_list:empty {
|
||||
.logit_bias_list:empty {
|
||||
width: 100%;
|
||||
height: 100%;
|
||||
}
|
||||
|
||||
.novelai_logit_bias_list:empty::before {
|
||||
.logit_bias_list:empty::before {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
@@ -3480,7 +3483,7 @@ a {
|
||||
width: 100%;
|
||||
height: 100%;
|
||||
opacity: 0.8;
|
||||
min-height: 2.5rem;
|
||||
min-height: 2.5em;
|
||||
}
|
||||
|
||||
.openai_logit_bias_preset_form {
|
||||
@@ -3531,11 +3534,11 @@ a {
|
||||
display: none;
|
||||
}
|
||||
|
||||
.reverse_proxy_warning {
|
||||
.reverse_proxy_warning:not(small) {
|
||||
color: var(--warning);
|
||||
background-color: var(--black70a);
|
||||
text-shadow: none !important;
|
||||
margin-top: 12px !important;
|
||||
margin-top: 5px !important;
|
||||
border-radius: 5px;
|
||||
padding: 3px;
|
||||
border: 1px solid var(--SmartThemeBorderColor);
|
||||
|
@@ -11,6 +11,14 @@ function getMancerHeaders() {
|
||||
}) : {};
|
||||
}
|
||||
|
||||
function getTogetherAIHeaders() {
|
||||
const apiKey = readSecret(SECRET_KEYS.TOGETHERAI);
|
||||
|
||||
return apiKey ? ({
|
||||
'Authorization': `Bearer ${apiKey}`,
|
||||
}) : {};
|
||||
}
|
||||
|
||||
function getAphroditeHeaders() {
|
||||
const apiKey = readSecret(SECRET_KEYS.APHRODITE);
|
||||
|
||||
@@ -58,6 +66,9 @@ function setAdditionalHeaders(request, args, server) {
|
||||
case TEXTGEN_TYPES.TABBY:
|
||||
headers = getTabbyHeaders();
|
||||
break;
|
||||
case TEXTGEN_TYPES.TOGETHERAI:
|
||||
headers = getTogetherAIHeaders();
|
||||
break;
|
||||
default:
|
||||
headers = server ? getOverrideHeaders((new URL(server))?.host) : {};
|
||||
break;
|
||||
|
@@ -160,6 +160,7 @@ const CHAT_COMPLETION_SOURCES = {
|
||||
AI21: 'ai21',
|
||||
MAKERSUITE: 'makersuite',
|
||||
MISTRALAI: 'mistralai',
|
||||
CUSTOM: 'custom',
|
||||
};
|
||||
|
||||
const UPLOADS_PATH = './uploads';
|
||||
@@ -171,8 +172,42 @@ const TEXTGEN_TYPES = {
|
||||
APHRODITE: 'aphrodite',
|
||||
TABBY: 'tabby',
|
||||
KOBOLDCPP: 'koboldcpp',
|
||||
TOGETHERAI: 'togetherai',
|
||||
LLAMACPP: 'llamacpp',
|
||||
OLLAMA: 'ollama',
|
||||
};
|
||||
|
||||
// https://docs.together.ai/reference/completions
|
||||
const TOGETHERAI_KEYS = [
|
||||
'model',
|
||||
'prompt',
|
||||
'max_tokens',
|
||||
'temperature',
|
||||
'top_p',
|
||||
'top_k',
|
||||
'repetition_penalty',
|
||||
'stream',
|
||||
];
|
||||
|
||||
// https://github.com/jmorganca/ollama/blob/main/docs/api.md#request-with-options
|
||||
const OLLAMA_KEYS = [
|
||||
'num_predict',
|
||||
'stop',
|
||||
'temperature',
|
||||
'repeat_penalty',
|
||||
'presence_penalty',
|
||||
'frequency_penalty',
|
||||
'top_k',
|
||||
'top_p',
|
||||
'tfs_z',
|
||||
'typical_p',
|
||||
'seed',
|
||||
'repeat_last_n',
|
||||
'mirostat',
|
||||
'mirostat_tau',
|
||||
'mirostat_eta',
|
||||
];
|
||||
|
||||
const AVATAR_WIDTH = 400;
|
||||
const AVATAR_HEIGHT = 600;
|
||||
|
||||
@@ -186,4 +221,6 @@ module.exports = {
|
||||
CHAT_COMPLETION_SOURCES,
|
||||
AVATAR_WIDTH,
|
||||
AVATAR_HEIGHT,
|
||||
TOGETHERAI_KEYS,
|
||||
OLLAMA_KEYS,
|
||||
};
|
||||
|
@@ -4,7 +4,7 @@ const { Readable } = require('stream');
|
||||
|
||||
const { jsonParser } = require('../../express-common');
|
||||
const { CHAT_COMPLETION_SOURCES, GEMINI_SAFETY, BISON_SAFETY } = require('../../constants');
|
||||
const { forwardFetchResponse, getConfigValue, tryParse, uuidv4 } = require('../../util');
|
||||
const { forwardFetchResponse, getConfigValue, tryParse, uuidv4, mergeObjectWithYaml, excludeKeysByYaml, color } = require('../../util');
|
||||
const { convertClaudePrompt, convertGooglePrompt, convertTextCompletionPrompt } = require('../prompt-converters');
|
||||
|
||||
const { readSecret, SECRET_KEYS } = require('../secrets');
|
||||
@@ -21,9 +21,10 @@ const API_CLAUDE = 'https://api.anthropic.com/v1';
|
||||
async function sendClaudeRequest(request, response) {
|
||||
const apiUrl = new URL(request.body.reverse_proxy || API_CLAUDE).toString();
|
||||
const apiKey = request.body.reverse_proxy ? request.body.proxy_password : readSecret(SECRET_KEYS.CLAUDE);
|
||||
const divider = '-'.repeat(process.stdout.columns);
|
||||
|
||||
if (!apiKey) {
|
||||
console.log('Claude API key is missing.');
|
||||
console.log(color.red(`Claude API key is missing.\n${divider}`));
|
||||
return response.status(400).send({ error: true });
|
||||
}
|
||||
|
||||
@@ -34,34 +35,66 @@ async function sendClaudeRequest(request, response) {
|
||||
controller.abort();
|
||||
});
|
||||
|
||||
let doSystemPrompt = request.body.model === 'claude-2' || request.body.model === 'claude-2.1';
|
||||
let requestPrompt = convertClaudePrompt(request.body.messages, true, !request.body.exclude_assistant, doSystemPrompt);
|
||||
const isSysPromptSupported = request.body.model === 'claude-2' || request.body.model === 'claude-2.1';
|
||||
const requestPrompt = convertClaudePrompt(request.body.messages, !request.body.exclude_assistant, request.body.assistant_prefill, isSysPromptSupported, request.body.claude_use_sysprompt, request.body.human_sysprompt_message);
|
||||
|
||||
if (request.body.assistant_prefill && !request.body.exclude_assistant) {
|
||||
requestPrompt += request.body.assistant_prefill;
|
||||
// Check Claude messages sequence and prefixes presence.
|
||||
const sequence = requestPrompt.split('\n').filter(x => x.startsWith('Human:') || x.startsWith('Assistant:'));
|
||||
const humanFound = sequence.some(line => line.startsWith('Human:'));
|
||||
const assistantFound = sequence.some(line => line.startsWith('Assistant:'));
|
||||
let humanErrorCount = 0;
|
||||
let assistantErrorCount = 0;
|
||||
|
||||
for (let i = 0; i < sequence.length - 1; i++) {
|
||||
if (sequence[i].startsWith(sequence[i + 1].split(':')[0])) {
|
||||
if (sequence[i].startsWith('Human:')) {
|
||||
humanErrorCount++;
|
||||
} else if (sequence[i].startsWith('Assistant:')) {
|
||||
assistantErrorCount++;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
console.log('Claude request:', requestPrompt);
|
||||
const stop_sequences = ['\n\nHuman:', '\n\nSystem:', '\n\nAssistant:'];
|
||||
if (!humanFound) {
|
||||
console.log(color.red(`${divider}\nWarning: No 'Human:' prefix found in the prompt.\n${divider}`));
|
||||
}
|
||||
if (!assistantFound) {
|
||||
console.log(color.red(`${divider}\nWarning: No 'Assistant: ' prefix found in the prompt.\n${divider}`));
|
||||
}
|
||||
if (!sequence[0].startsWith('Human:')) {
|
||||
console.log(color.red(`${divider}\nWarning: The messages sequence should start with 'Human:' prefix.\nMake sure you have 'Human:' prefix at the very beggining of the prompt, or after the system prompt.\n${divider}`));
|
||||
}
|
||||
if (humanErrorCount > 0 || assistantErrorCount > 0) {
|
||||
console.log(color.red(`${divider}\nWarning: Detected incorrect Prefix sequence(s).`));
|
||||
console.log(color.red(`Incorrect "Human:" prefix(es): ${humanErrorCount}.\nIncorrect "Assistant: " prefix(es): ${assistantErrorCount}.`));
|
||||
console.log(color.red('Check the prompt above and fix it in the SillyTavern.'));
|
||||
console.log(color.red('\nThe correct sequence should look like this:\nSystem prompt <-(for the sysprompt format only, else have 2 empty lines above the first human\'s message.)'));
|
||||
console.log(color.red(` <-----(Each message beginning with the "Assistant:/Human:" prefix must have one empty line above.)\nHuman:\n\nAssistant:\n...\n\nHuman:\n\nAssistant:\n${divider}`));
|
||||
}
|
||||
|
||||
// Add custom stop sequences
|
||||
const stopSequences = ['\n\nHuman:', '\n\nSystem:', '\n\nAssistant:'];
|
||||
if (Array.isArray(request.body.stop)) {
|
||||
stop_sequences.push(...request.body.stop);
|
||||
stopSequences.push(...request.body.stop);
|
||||
}
|
||||
|
||||
const requestBody = {
|
||||
prompt: requestPrompt,
|
||||
model: request.body.model,
|
||||
max_tokens_to_sample: request.body.max_tokens,
|
||||
stop_sequences: stopSequences,
|
||||
temperature: request.body.temperature,
|
||||
top_p: request.body.top_p,
|
||||
top_k: request.body.top_k,
|
||||
stream: request.body.stream,
|
||||
};
|
||||
|
||||
console.log('Claude request:', requestBody);
|
||||
|
||||
const generateResponse = await fetch(apiUrl + '/complete', {
|
||||
method: 'POST',
|
||||
signal: controller.signal,
|
||||
body: JSON.stringify({
|
||||
prompt: requestPrompt,
|
||||
model: request.body.model,
|
||||
max_tokens_to_sample: request.body.max_tokens,
|
||||
stop_sequences: stop_sequences,
|
||||
temperature: request.body.temperature,
|
||||
top_p: request.body.top_p,
|
||||
top_k: request.body.top_k,
|
||||
stream: request.body.stream,
|
||||
}),
|
||||
body: JSON.stringify(requestBody),
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
'anthropic-version': '2023-06-01',
|
||||
@@ -75,20 +108,20 @@ async function sendClaudeRequest(request, response) {
|
||||
forwardFetchResponse(generateResponse, response);
|
||||
} else {
|
||||
if (!generateResponse.ok) {
|
||||
console.log(`Claude API returned error: ${generateResponse.status} ${generateResponse.statusText} ${await generateResponse.text()}`);
|
||||
console.log(color.red(`Claude API returned error: ${generateResponse.status} ${generateResponse.statusText}\n${await generateResponse.text()}\n${divider}`));
|
||||
return response.status(generateResponse.status).send({ error: true });
|
||||
}
|
||||
|
||||
const generateResponseJson = await generateResponse.json();
|
||||
const responseText = generateResponseJson.completion;
|
||||
console.log('Claude response:', responseText);
|
||||
console.log('Claude response:', generateResponseJson);
|
||||
|
||||
// Wrap it back to OAI format
|
||||
const reply = { choices: [{ 'message': { 'content': responseText } }] };
|
||||
return response.send(reply);
|
||||
}
|
||||
} catch (error) {
|
||||
console.log('Error communicating with Claude: ', error);
|
||||
console.log(color.red(`Error communicating with Claude: ${error}\n${divider}`));
|
||||
if (!response.headersSent) {
|
||||
return response.status(500).send({ error: true });
|
||||
}
|
||||
@@ -410,12 +443,12 @@ async function sendMistralAIRequest(request, response) {
|
||||
const messages = Array.isArray(request.body.messages) ? request.body.messages : [];
|
||||
const lastMsg = messages[messages.length - 1];
|
||||
if (messages.length > 0 && lastMsg && (lastMsg.role === 'system' || lastMsg.role === 'assistant')) {
|
||||
lastMsg.role = 'user';
|
||||
if (lastMsg.role === 'assistant') {
|
||||
lastMsg.content = lastMsg.name + ': ' + lastMsg.content;
|
||||
} else if (lastMsg.role === 'system') {
|
||||
lastMsg.content = '[INST] ' + lastMsg.content + ' [/INST]';
|
||||
}
|
||||
lastMsg.role = 'user';
|
||||
}
|
||||
|
||||
//system prompts can be stacked at the start, but any futher sys prompts after the first user/assistant message will break the model
|
||||
@@ -438,26 +471,30 @@ async function sendMistralAIRequest(request, response) {
|
||||
controller.abort();
|
||||
});
|
||||
|
||||
const requestBody = {
|
||||
'model': request.body.model,
|
||||
'messages': messages,
|
||||
'temperature': request.body.temperature,
|
||||
'top_p': request.body.top_p,
|
||||
'max_tokens': request.body.max_tokens,
|
||||
'stream': request.body.stream,
|
||||
'safe_mode': request.body.safe_mode,
|
||||
'random_seed': request.body.seed === -1 ? undefined : request.body.seed,
|
||||
};
|
||||
|
||||
const config = {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
'Authorization': 'Bearer ' + apiKey,
|
||||
},
|
||||
body: JSON.stringify({
|
||||
'model': request.body.model,
|
||||
'messages': messages,
|
||||
'temperature': request.body.temperature,
|
||||
'top_p': request.body.top_p,
|
||||
'max_tokens': request.body.max_tokens,
|
||||
'stream': request.body.stream,
|
||||
'safe_mode': request.body.safe_mode,
|
||||
'random_seed': request.body.seed === -1 ? undefined : request.body.seed,
|
||||
}),
|
||||
body: JSON.stringify(requestBody),
|
||||
signal: controller.signal,
|
||||
timeout: 0,
|
||||
};
|
||||
|
||||
console.log('MisralAI request:', requestBody);
|
||||
|
||||
const generateResponse = await fetch('https://api.mistral.ai/v1/chat/completions', config);
|
||||
if (request.body.stream) {
|
||||
forwardFetchResponse(generateResponse, response);
|
||||
@@ -469,6 +506,7 @@ async function sendMistralAIRequest(request, response) {
|
||||
return response.status(generateResponse.status === 401 ? 500 : generateResponse.status).send({ error: true });
|
||||
}
|
||||
const generateResponseJson = await generateResponse.json();
|
||||
console.log('MistralAI response:', generateResponseJson);
|
||||
return response.send(generateResponseJson);
|
||||
}
|
||||
} catch (error) {
|
||||
@@ -502,12 +540,17 @@ router.post('/status', jsonParser, async function (request, response_getstatus_o
|
||||
} else if (request.body.chat_completion_source === CHAT_COMPLETION_SOURCES.MISTRALAI) {
|
||||
api_url = 'https://api.mistral.ai/v1';
|
||||
api_key_openai = readSecret(SECRET_KEYS.MISTRALAI);
|
||||
} else if (request.body.chat_completion_source === CHAT_COMPLETION_SOURCES.CUSTOM) {
|
||||
api_url = request.body.custom_url;
|
||||
api_key_openai = readSecret(SECRET_KEYS.CUSTOM);
|
||||
headers = {};
|
||||
mergeObjectWithYaml(headers, request.body.custom_include_headers);
|
||||
} else {
|
||||
console.log('This chat completion source is not supported yet.');
|
||||
return response_getstatus_openai.status(400).send({ error: true });
|
||||
}
|
||||
|
||||
if (!api_key_openai && !request.body.reverse_proxy) {
|
||||
if (!api_key_openai && !request.body.reverse_proxy && request.body.chat_completion_source !== CHAT_COMPLETION_SOURCES.CUSTOM) {
|
||||
console.log('OpenAI API key is missing.');
|
||||
return response_getstatus_openai.status(400).send({ error: true });
|
||||
}
|
||||
@@ -657,7 +700,7 @@ router.post('/generate', jsonParser, function (request, response) {
|
||||
let headers;
|
||||
let bodyParams;
|
||||
|
||||
if (request.body.chat_completion_source !== CHAT_COMPLETION_SOURCES.OPENROUTER) {
|
||||
if (request.body.chat_completion_source === CHAT_COMPLETION_SOURCES.OPENAI) {
|
||||
apiUrl = new URL(request.body.reverse_proxy || API_OPENAI).toString();
|
||||
apiKey = request.body.reverse_proxy ? request.body.proxy_password : readSecret(SECRET_KEYS.OPENAI);
|
||||
headers = {};
|
||||
@@ -666,7 +709,7 @@ router.post('/generate', jsonParser, function (request, response) {
|
||||
if (getConfigValue('openai.randomizeUserId', false)) {
|
||||
bodyParams['user'] = uuidv4();
|
||||
}
|
||||
} else {
|
||||
} else if (request.body.chat_completion_source === CHAT_COMPLETION_SOURCES.OPENROUTER) {
|
||||
apiUrl = 'https://openrouter.ai/api/v1';
|
||||
apiKey = readSecret(SECRET_KEYS.OPENROUTER);
|
||||
// OpenRouter needs to pass the referer: https://openrouter.ai/docs
|
||||
@@ -676,9 +719,19 @@ router.post('/generate', jsonParser, function (request, response) {
|
||||
if (request.body.use_fallback) {
|
||||
bodyParams['route'] = 'fallback';
|
||||
}
|
||||
} else if (request.body.chat_completion_source === CHAT_COMPLETION_SOURCES.CUSTOM) {
|
||||
apiUrl = request.body.custom_url;
|
||||
apiKey = readSecret(SECRET_KEYS.CUSTOM);
|
||||
headers = {};
|
||||
bodyParams = {};
|
||||
mergeObjectWithYaml(bodyParams, request.body.custom_include_body);
|
||||
mergeObjectWithYaml(headers, request.body.custom_include_headers);
|
||||
} else {
|
||||
console.log('This chat completion source is not supported yet.');
|
||||
return response.status(400).send({ error: true });
|
||||
}
|
||||
|
||||
if (!apiKey && !request.body.reverse_proxy) {
|
||||
if (!apiKey && !request.body.reverse_proxy && request.body.chat_completion_source !== CHAT_COMPLETION_SOURCES.CUSTOM) {
|
||||
console.log('OpenAI API key is missing.');
|
||||
return response.status(400).send({ error: true });
|
||||
}
|
||||
@@ -700,6 +753,27 @@ router.post('/generate', jsonParser, function (request, response) {
|
||||
controller.abort();
|
||||
});
|
||||
|
||||
const requestBody = {
|
||||
'messages': isTextCompletion === false ? request.body.messages : undefined,
|
||||
'prompt': isTextCompletion === true ? textPrompt : undefined,
|
||||
'model': request.body.model,
|
||||
'temperature': request.body.temperature,
|
||||
'max_tokens': request.body.max_tokens,
|
||||
'stream': request.body.stream,
|
||||
'presence_penalty': request.body.presence_penalty,
|
||||
'frequency_penalty': request.body.frequency_penalty,
|
||||
'top_p': request.body.top_p,
|
||||
'top_k': request.body.top_k,
|
||||
'stop': isTextCompletion === false ? request.body.stop : undefined,
|
||||
'logit_bias': request.body.logit_bias,
|
||||
'seed': request.body.seed,
|
||||
...bodyParams,
|
||||
};
|
||||
|
||||
if (request.body.chat_completion_source === CHAT_COMPLETION_SOURCES.CUSTOM) {
|
||||
excludeKeysByYaml(requestBody, request.body.custom_exclude_body);
|
||||
}
|
||||
|
||||
/** @type {import('node-fetch').RequestInit} */
|
||||
const config = {
|
||||
method: 'post',
|
||||
@@ -708,27 +782,12 @@ router.post('/generate', jsonParser, function (request, response) {
|
||||
'Authorization': 'Bearer ' + apiKey,
|
||||
...headers,
|
||||
},
|
||||
body: JSON.stringify({
|
||||
'messages': isTextCompletion === false ? request.body.messages : undefined,
|
||||
'prompt': isTextCompletion === true ? textPrompt : undefined,
|
||||
'model': request.body.model,
|
||||
'temperature': request.body.temperature,
|
||||
'max_tokens': request.body.max_tokens,
|
||||
'stream': request.body.stream,
|
||||
'presence_penalty': request.body.presence_penalty,
|
||||
'frequency_penalty': request.body.frequency_penalty,
|
||||
'top_p': request.body.top_p,
|
||||
'top_k': request.body.top_k,
|
||||
'stop': isTextCompletion === false ? request.body.stop : undefined,
|
||||
'logit_bias': request.body.logit_bias,
|
||||
'seed': request.body.seed,
|
||||
...bodyParams,
|
||||
}),
|
||||
body: JSON.stringify(requestBody),
|
||||
signal: controller.signal,
|
||||
timeout: 0,
|
||||
};
|
||||
|
||||
console.log(JSON.parse(String(config.body)));
|
||||
console.log(requestBody);
|
||||
|
||||
makeRequest(config, response, request);
|
||||
|
||||
|
@@ -1,13 +1,82 @@
|
||||
const express = require('express');
|
||||
const fetch = require('node-fetch').default;
|
||||
const _ = require('lodash');
|
||||
const Readable = require('stream').Readable;
|
||||
|
||||
const { jsonParser } = require('../../express-common');
|
||||
const { TEXTGEN_TYPES } = require('../../constants');
|
||||
const { forwardFetchResponse } = require('../../util');
|
||||
const { TEXTGEN_TYPES, TOGETHERAI_KEYS, OLLAMA_KEYS } = require('../../constants');
|
||||
const { forwardFetchResponse, trimV1 } = require('../../util');
|
||||
const { setAdditionalHeaders } = require('../../additional-headers');
|
||||
|
||||
const router = express.Router();
|
||||
|
||||
/**
|
||||
* Special boy's steaming routine. Wrap this abomination into proper SSE stream.
|
||||
* @param {import('node-fetch').Response} jsonStream JSON stream
|
||||
* @param {import('express').Request} request Express request
|
||||
* @param {import('express').Response} response Express response
|
||||
* @returns {Promise<any>} Nothing valuable
|
||||
*/
|
||||
async function parseOllamaStream(jsonStream, request, response) {
|
||||
try {
|
||||
let partialData = '';
|
||||
jsonStream.body.on('data', (data) => {
|
||||
const chunk = data.toString();
|
||||
partialData += chunk;
|
||||
while (true) {
|
||||
let json;
|
||||
try {
|
||||
json = JSON.parse(partialData);
|
||||
} catch (e) {
|
||||
break;
|
||||
}
|
||||
const text = json.response || '';
|
||||
const chunk = { choices: [{ text }] };
|
||||
response.write(`data: ${JSON.stringify(chunk)}\n\n`);
|
||||
partialData = '';
|
||||
}
|
||||
});
|
||||
|
||||
request.socket.on('close', function () {
|
||||
if (jsonStream.body instanceof Readable) jsonStream.body.destroy();
|
||||
response.end();
|
||||
});
|
||||
|
||||
jsonStream.body.on('end', () => {
|
||||
console.log('Streaming request finished');
|
||||
response.write('data: [DONE]\n\n');
|
||||
response.end();
|
||||
});
|
||||
} catch (error) {
|
||||
console.log('Error forwarding streaming response:', error);
|
||||
if (!response.headersSent) {
|
||||
return response.status(500).send({ error: true });
|
||||
} else {
|
||||
return response.end();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Abort KoboldCpp generation request.
|
||||
* @param {string} url Server base URL
|
||||
* @returns {Promise<void>} Promise resolving when we are done
|
||||
*/
|
||||
async function abortKoboldCppRequest(url) {
|
||||
try {
|
||||
console.log('Aborting Kobold generation...');
|
||||
const abortResponse = await fetch(`${url}/api/extra/abort`, {
|
||||
method: 'POST',
|
||||
});
|
||||
|
||||
if (!abortResponse.ok) {
|
||||
console.log('Error sending abort request to Kobold:', abortResponse.status, abortResponse.statusText);
|
||||
}
|
||||
} catch (error) {
|
||||
console.log(error);
|
||||
}
|
||||
}
|
||||
|
||||
//************** Ooba/OpenAI text completions API
|
||||
router.post('/status', jsonParser, async function (request, response) {
|
||||
if (!request.body) return response.sendStatus(400);
|
||||
@@ -18,9 +87,7 @@ router.post('/status', jsonParser, async function (request, response) {
|
||||
}
|
||||
|
||||
console.log('Trying to connect to API:', request.body);
|
||||
|
||||
// Convert to string + remove trailing slash + /v1 suffix
|
||||
const baseUrl = String(request.body.api_server).replace(/\/$/, '').replace(/\/v1$/, '');
|
||||
const baseUrl = trimV1(request.body.api_server);
|
||||
|
||||
const args = {
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
@@ -38,6 +105,7 @@ router.post('/status', jsonParser, async function (request, response) {
|
||||
case TEXTGEN_TYPES.OOBA:
|
||||
case TEXTGEN_TYPES.APHRODITE:
|
||||
case TEXTGEN_TYPES.KOBOLDCPP:
|
||||
case TEXTGEN_TYPES.LLAMACPP:
|
||||
url += '/v1/models';
|
||||
break;
|
||||
case TEXTGEN_TYPES.MANCER:
|
||||
@@ -46,6 +114,12 @@ router.post('/status', jsonParser, async function (request, response) {
|
||||
case TEXTGEN_TYPES.TABBY:
|
||||
url += '/v1/model/list';
|
||||
break;
|
||||
case TEXTGEN_TYPES.TOGETHERAI:
|
||||
url += '/api/models?&info';
|
||||
break;
|
||||
case TEXTGEN_TYPES.OLLAMA:
|
||||
url += '/api/tags';
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -56,13 +130,22 @@ router.post('/status', jsonParser, async function (request, response) {
|
||||
return response.status(400);
|
||||
}
|
||||
|
||||
const data = await modelsReply.json();
|
||||
let data = await modelsReply.json();
|
||||
|
||||
if (request.body.legacy_api) {
|
||||
console.log('Legacy API response:', data);
|
||||
return response.send({ result: data?.result });
|
||||
}
|
||||
|
||||
// Rewrap to OAI-like response
|
||||
if (request.body.api_type === TEXTGEN_TYPES.TOGETHERAI && Array.isArray(data)) {
|
||||
data = { data: data.map(x => ({ id: x.name, ...x })) };
|
||||
}
|
||||
|
||||
if (request.body.api_type === TEXTGEN_TYPES.OLLAMA && Array.isArray(data.models)) {
|
||||
data = { data: data.models.map(x => ({ id: x.name, ...x })) };
|
||||
}
|
||||
|
||||
if (!Array.isArray(data.data)) {
|
||||
console.log('Models response is not an array.');
|
||||
return response.status(400);
|
||||
@@ -117,8 +200,8 @@ router.post('/status', jsonParser, async function (request, response) {
|
||||
}
|
||||
});
|
||||
|
||||
router.post('/generate', jsonParser, async function (request, response_generate) {
|
||||
if (!request.body) return response_generate.sendStatus(400);
|
||||
router.post('/generate', jsonParser, async function (request, response) {
|
||||
if (!request.body) return response.sendStatus(400);
|
||||
|
||||
try {
|
||||
if (request.body.api_server.indexOf('localhost') !== -1) {
|
||||
@@ -130,12 +213,15 @@ router.post('/generate', jsonParser, async function (request, response_generate)
|
||||
|
||||
const controller = new AbortController();
|
||||
request.socket.removeAllListeners('close');
|
||||
request.socket.on('close', function () {
|
||||
request.socket.on('close', async function () {
|
||||
if (request.body.api_type === TEXTGEN_TYPES.KOBOLDCPP && !response.writableEnded) {
|
||||
await abortKoboldCppRequest(trimV1(baseUrl));
|
||||
}
|
||||
|
||||
controller.abort();
|
||||
});
|
||||
|
||||
// Convert to string + remove trailing slash + /v1 suffix
|
||||
let url = String(baseUrl).replace(/\/$/, '').replace(/\/v1$/, '');
|
||||
let url = trimV1(baseUrl);
|
||||
|
||||
if (request.body.legacy_api) {
|
||||
url += '/v1/generate';
|
||||
@@ -145,11 +231,18 @@ router.post('/generate', jsonParser, async function (request, response_generate)
|
||||
case TEXTGEN_TYPES.OOBA:
|
||||
case TEXTGEN_TYPES.TABBY:
|
||||
case TEXTGEN_TYPES.KOBOLDCPP:
|
||||
case TEXTGEN_TYPES.TOGETHERAI:
|
||||
url += '/v1/completions';
|
||||
break;
|
||||
case TEXTGEN_TYPES.MANCER:
|
||||
url += '/oai/v1/completions';
|
||||
break;
|
||||
case TEXTGEN_TYPES.LLAMACPP:
|
||||
url += '/completion';
|
||||
break;
|
||||
case TEXTGEN_TYPES.OLLAMA:
|
||||
url += '/api/generate';
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -163,10 +256,32 @@ router.post('/generate', jsonParser, async function (request, response_generate)
|
||||
|
||||
setAdditionalHeaders(request, args, baseUrl);
|
||||
|
||||
if (request.body.stream) {
|
||||
if (request.body.api_type === TEXTGEN_TYPES.TOGETHERAI) {
|
||||
const stop = Array.isArray(request.body.stop) ? request.body.stop[0] : '';
|
||||
request.body = _.pickBy(request.body, (_, key) => TOGETHERAI_KEYS.includes(key));
|
||||
if (typeof stop === 'string' && stop.length > 0) {
|
||||
request.body.stop = stop;
|
||||
}
|
||||
args.body = JSON.stringify(request.body);
|
||||
}
|
||||
|
||||
if (request.body.api_type === TEXTGEN_TYPES.OLLAMA) {
|
||||
args.body = JSON.stringify({
|
||||
model: request.body.model,
|
||||
prompt: request.body.prompt,
|
||||
stream: request.body.stream ?? false,
|
||||
raw: true,
|
||||
options: _.pickBy(request.body, (_, key) => OLLAMA_KEYS.includes(key)),
|
||||
});
|
||||
}
|
||||
|
||||
if (request.body.api_type === TEXTGEN_TYPES.OLLAMA && request.body.stream) {
|
||||
const stream = await fetch(url, args);
|
||||
parseOllamaStream(stream, request, response);
|
||||
} else if (request.body.stream) {
|
||||
const completionsStream = await fetch(url, args);
|
||||
// Pipe remote SSE stream to Express response
|
||||
forwardFetchResponse(completionsStream, response_generate);
|
||||
forwardFetchResponse(completionsStream, response);
|
||||
}
|
||||
else {
|
||||
const completionsReply = await fetch(url, args);
|
||||
@@ -181,28 +296,152 @@ router.post('/generate', jsonParser, async function (request, response_generate)
|
||||
data['choices'] = [{ text }];
|
||||
}
|
||||
|
||||
return response_generate.send(data);
|
||||
return response.send(data);
|
||||
} else {
|
||||
const text = await completionsReply.text();
|
||||
const errorBody = { error: true, status: completionsReply.status, response: text };
|
||||
|
||||
if (!response_generate.headersSent) {
|
||||
return response_generate.send(errorBody);
|
||||
if (!response.headersSent) {
|
||||
return response.send(errorBody);
|
||||
}
|
||||
|
||||
return response_generate.end();
|
||||
return response.end();
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
let value = { error: true, status: error?.status, response: error?.statusText };
|
||||
console.log('Endpoint error:', error);
|
||||
|
||||
if (!response_generate.headersSent) {
|
||||
return response_generate.send(value);
|
||||
if (!response.headersSent) {
|
||||
return response.send(value);
|
||||
}
|
||||
|
||||
return response_generate.end();
|
||||
return response.end();
|
||||
}
|
||||
});
|
||||
|
||||
const ollama = express.Router();
|
||||
|
||||
ollama.post('/download', jsonParser, async function (request, response) {
|
||||
try {
|
||||
if (!request.body.name || !request.body.api_server) return response.sendStatus(400);
|
||||
|
||||
const name = request.body.name;
|
||||
const url = String(request.body.api_server).replace(/\/$/, '');
|
||||
|
||||
const fetchResponse = await fetch(`${url}/api/pull`, {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({
|
||||
name: name,
|
||||
stream: false,
|
||||
}),
|
||||
timeout: 0,
|
||||
});
|
||||
|
||||
if (!fetchResponse.ok) {
|
||||
console.log('Download error:', fetchResponse.status, fetchResponse.statusText);
|
||||
return response.status(fetchResponse.status).send({ error: true });
|
||||
}
|
||||
|
||||
return response.send({ ok: true });
|
||||
} catch (error) {
|
||||
console.error(error);
|
||||
return response.status(500);
|
||||
}
|
||||
});
|
||||
|
||||
ollama.post('/caption-image', jsonParser, async function (request, response) {
|
||||
try {
|
||||
if (!request.body.server_url || !request.body.model) {
|
||||
return response.sendStatus(400);
|
||||
}
|
||||
|
||||
console.log('Ollama caption request:', request.body);
|
||||
const baseUrl = trimV1(request.body.server_url);
|
||||
|
||||
const fetchResponse = await fetch(`${baseUrl}/api/generate`, {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({
|
||||
model: request.body.model,
|
||||
prompt: request.body.prompt,
|
||||
images: [request.body.image],
|
||||
stream: false,
|
||||
}),
|
||||
timeout: 0,
|
||||
});
|
||||
|
||||
if (!fetchResponse.ok) {
|
||||
console.log('Ollama caption error:', fetchResponse.status, fetchResponse.statusText);
|
||||
return response.status(500).send({ error: true });
|
||||
}
|
||||
|
||||
const data = await fetchResponse.json();
|
||||
console.log('Ollama caption response:', data);
|
||||
|
||||
const caption = data?.response || '';
|
||||
|
||||
if (!caption) {
|
||||
console.log('Ollama caption is empty.');
|
||||
return response.status(500).send({ error: true });
|
||||
}
|
||||
|
||||
return response.send({ caption });
|
||||
} catch (error) {
|
||||
console.error(error);
|
||||
return response.status(500);
|
||||
}
|
||||
});
|
||||
|
||||
const llamacpp = express.Router();
|
||||
|
||||
llamacpp.post('/caption-image', jsonParser, async function (request, response) {
|
||||
try {
|
||||
if (!request.body.server_url) {
|
||||
return response.sendStatus(400);
|
||||
}
|
||||
|
||||
console.log('LlamaCpp caption request:', request.body);
|
||||
const baseUrl = trimV1(request.body.server_url);
|
||||
|
||||
const fetchResponse = await fetch(`${baseUrl}/completion`, {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
timeout: 0,
|
||||
body: JSON.stringify({
|
||||
prompt: `USER:[img-1]${String(request.body.prompt).trim()}\nASSISTANT:`,
|
||||
image_data: [{ data: request.body.image, id: 1 }],
|
||||
temperature: 0.1,
|
||||
stream: false,
|
||||
stop: ['USER:', '</s>'],
|
||||
}),
|
||||
});
|
||||
|
||||
if (!fetchResponse.ok) {
|
||||
console.log('LlamaCpp caption error:', fetchResponse.status, fetchResponse.statusText);
|
||||
return response.status(500).send({ error: true });
|
||||
}
|
||||
|
||||
const data = await fetchResponse.json();
|
||||
console.log('LlamaCpp caption response:', data);
|
||||
|
||||
const caption = data?.content || '';
|
||||
|
||||
if (!caption) {
|
||||
console.log('LlamaCpp caption is empty.');
|
||||
return response.status(500).send({ error: true });
|
||||
}
|
||||
|
||||
return response.send({ caption });
|
||||
|
||||
} catch (error) {
|
||||
console.error(error);
|
||||
return response.status(500);
|
||||
}
|
||||
});
|
||||
|
||||
router.use('/ollama', ollama);
|
||||
router.use('/llamacpp', llamacpp);
|
||||
|
||||
module.exports = { router };
|
||||
|
@@ -4,6 +4,7 @@ const readline = require('readline');
|
||||
const express = require('express');
|
||||
const sanitize = require('sanitize-filename');
|
||||
const writeFileAtomicSync = require('write-file-atomic').sync;
|
||||
const yaml = require('yaml');
|
||||
const _ = require('lodash');
|
||||
|
||||
const encode = require('png-chunks-encode');
|
||||
@@ -19,6 +20,7 @@ const characterCardParser = require('../character-card-parser.js');
|
||||
const { readWorldInfoFile } = require('./worldinfo');
|
||||
const { invalidateThumbnail } = require('./thumbnails');
|
||||
const { importRisuSprites } = require('./sprites');
|
||||
const defaultAvatarPath = './public/img/ai4.png';
|
||||
|
||||
let characters = {};
|
||||
|
||||
@@ -394,6 +396,36 @@ function convertWorldInfoToCharacterBook(name, entries) {
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* Import a character from a YAML file.
|
||||
* @param {string} uploadPath Path to the uploaded file
|
||||
* @param {import('express').Response} response Express response object
|
||||
*/
|
||||
function importFromYaml(uploadPath, response) {
|
||||
const fileText = fs.readFileSync(uploadPath, 'utf8');
|
||||
fs.rmSync(uploadPath);
|
||||
const yamlData = yaml.parse(fileText);
|
||||
console.log('importing from yaml');
|
||||
yamlData.name = sanitize(yamlData.name);
|
||||
const fileName = getPngName(yamlData.name);
|
||||
let char = convertToV2({
|
||||
'name': yamlData.name,
|
||||
'description': yamlData.context ?? '',
|
||||
'first_mes': yamlData.greeting ?? '',
|
||||
'create_date': humanizedISO8601DateTime(),
|
||||
'chat': `${yamlData.name} - ${humanizedISO8601DateTime()}`,
|
||||
'personality': '',
|
||||
'creatorcomment': '',
|
||||
'avatar': 'none',
|
||||
'mes_example': '',
|
||||
'scenario': '',
|
||||
'talkativeness': 0.5,
|
||||
'creator': '',
|
||||
'tags': '',
|
||||
});
|
||||
charaWrite(defaultAvatarPath, JSON.stringify(char), fileName, response, { file_name: fileName });
|
||||
}
|
||||
|
||||
const router = express.Router();
|
||||
|
||||
router.post('/create', urlencodedParser, async function (request, response) {
|
||||
@@ -760,144 +792,147 @@ function getPngName(file) {
|
||||
}
|
||||
|
||||
router.post('/import', urlencodedParser, async function (request, response) {
|
||||
|
||||
if (!request.body || request.file === undefined) return response.sendStatus(400);
|
||||
if (!request.body || !request.file) return response.sendStatus(400);
|
||||
|
||||
let png_name = '';
|
||||
let filedata = request.file;
|
||||
let uploadPath = path.join(UPLOADS_PATH, filedata.filename);
|
||||
var format = request.body.file_type;
|
||||
const defaultAvatarPath = './public/img/ai4.png';
|
||||
//console.log(format);
|
||||
if (filedata) {
|
||||
if (format == 'json') {
|
||||
fs.readFile(uploadPath, 'utf8', async (err, data) => {
|
||||
fs.unlinkSync(uploadPath);
|
||||
let format = request.body.file_type;
|
||||
|
||||
if (err) {
|
||||
console.log(err);
|
||||
response.send({ error: true });
|
||||
}
|
||||
if (format == 'yaml' || format == 'yml') {
|
||||
try {
|
||||
importFromYaml(uploadPath, response);
|
||||
} catch (err) {
|
||||
console.log(err);
|
||||
response.send({ error: true });
|
||||
}
|
||||
} else if (format == 'json') {
|
||||
fs.readFile(uploadPath, 'utf8', async (err, data) => {
|
||||
fs.unlinkSync(uploadPath);
|
||||
|
||||
let jsonData = JSON.parse(data);
|
||||
|
||||
if (jsonData.spec !== undefined) {
|
||||
console.log('importing from v2 json');
|
||||
importRisuSprites(jsonData);
|
||||
unsetFavFlag(jsonData);
|
||||
jsonData = readFromV2(jsonData);
|
||||
jsonData['create_date'] = humanizedISO8601DateTime();
|
||||
png_name = getPngName(jsonData.data?.name || jsonData.name);
|
||||
let char = JSON.stringify(jsonData);
|
||||
charaWrite(defaultAvatarPath, char, png_name, response, { file_name: png_name });
|
||||
} else if (jsonData.name !== undefined) {
|
||||
console.log('importing from v1 json');
|
||||
jsonData.name = sanitize(jsonData.name);
|
||||
if (jsonData.creator_notes) {
|
||||
jsonData.creator_notes = jsonData.creator_notes.replace('Creator\'s notes go here.', '');
|
||||
}
|
||||
png_name = getPngName(jsonData.name);
|
||||
let char = {
|
||||
'name': jsonData.name,
|
||||
'description': jsonData.description ?? '',
|
||||
'creatorcomment': jsonData.creatorcomment ?? jsonData.creator_notes ?? '',
|
||||
'personality': jsonData.personality ?? '',
|
||||
'first_mes': jsonData.first_mes ?? '',
|
||||
'avatar': 'none',
|
||||
'chat': jsonData.name + ' - ' + humanizedISO8601DateTime(),
|
||||
'mes_example': jsonData.mes_example ?? '',
|
||||
'scenario': jsonData.scenario ?? '',
|
||||
'create_date': humanizedISO8601DateTime(),
|
||||
'talkativeness': jsonData.talkativeness ?? 0.5,
|
||||
'creator': jsonData.creator ?? '',
|
||||
'tags': jsonData.tags ?? '',
|
||||
};
|
||||
char = convertToV2(char);
|
||||
let charJSON = JSON.stringify(char);
|
||||
charaWrite(defaultAvatarPath, charJSON, png_name, response, { file_name: png_name });
|
||||
} else if (jsonData.char_name !== undefined) {//json Pygmalion notepad
|
||||
console.log('importing from gradio json');
|
||||
jsonData.char_name = sanitize(jsonData.char_name);
|
||||
if (jsonData.creator_notes) {
|
||||
jsonData.creator_notes = jsonData.creator_notes.replace('Creator\'s notes go here.', '');
|
||||
}
|
||||
png_name = getPngName(jsonData.char_name);
|
||||
let char = {
|
||||
'name': jsonData.char_name,
|
||||
'description': jsonData.char_persona ?? '',
|
||||
'creatorcomment': jsonData.creatorcomment ?? jsonData.creator_notes ?? '',
|
||||
'personality': '',
|
||||
'first_mes': jsonData.char_greeting ?? '',
|
||||
'avatar': 'none',
|
||||
'chat': jsonData.name + ' - ' + humanizedISO8601DateTime(),
|
||||
'mes_example': jsonData.example_dialogue ?? '',
|
||||
'scenario': jsonData.world_scenario ?? '',
|
||||
'create_date': humanizedISO8601DateTime(),
|
||||
'talkativeness': jsonData.talkativeness ?? 0.5,
|
||||
'creator': jsonData.creator ?? '',
|
||||
'tags': jsonData.tags ?? '',
|
||||
};
|
||||
char = convertToV2(char);
|
||||
let charJSON = JSON.stringify(char);
|
||||
charaWrite(defaultAvatarPath, charJSON, png_name, response, { file_name: png_name });
|
||||
} else {
|
||||
console.log('Incorrect character format .json');
|
||||
response.send({ error: true });
|
||||
}
|
||||
});
|
||||
} else {
|
||||
try {
|
||||
var img_data = await charaRead(uploadPath, format);
|
||||
if (img_data === undefined) throw new Error('Failed to read character data');
|
||||
|
||||
let jsonData = JSON.parse(img_data);
|
||||
|
||||
jsonData.name = sanitize(jsonData.data?.name || jsonData.name);
|
||||
png_name = getPngName(jsonData.name);
|
||||
|
||||
if (jsonData.spec !== undefined) {
|
||||
console.log('Found a v2 character file.');
|
||||
importRisuSprites(jsonData);
|
||||
unsetFavFlag(jsonData);
|
||||
jsonData = readFromV2(jsonData);
|
||||
jsonData['create_date'] = humanizedISO8601DateTime();
|
||||
const char = JSON.stringify(jsonData);
|
||||
await charaWrite(uploadPath, char, png_name, response, { file_name: png_name });
|
||||
fs.unlinkSync(uploadPath);
|
||||
} else if (jsonData.name !== undefined) {
|
||||
console.log('Found a v1 character file.');
|
||||
|
||||
if (jsonData.creator_notes) {
|
||||
jsonData.creator_notes = jsonData.creator_notes.replace('Creator\'s notes go here.', '');
|
||||
}
|
||||
|
||||
let char = {
|
||||
'name': jsonData.name,
|
||||
'description': jsonData.description ?? '',
|
||||
'creatorcomment': jsonData.creatorcomment ?? jsonData.creator_notes ?? '',
|
||||
'personality': jsonData.personality ?? '',
|
||||
'first_mes': jsonData.first_mes ?? '',
|
||||
'avatar': 'none',
|
||||
'chat': jsonData.name + ' - ' + humanizedISO8601DateTime(),
|
||||
'mes_example': jsonData.mes_example ?? '',
|
||||
'scenario': jsonData.scenario ?? '',
|
||||
'create_date': humanizedISO8601DateTime(),
|
||||
'talkativeness': jsonData.talkativeness ?? 0.5,
|
||||
'creator': jsonData.creator ?? '',
|
||||
'tags': jsonData.tags ?? '',
|
||||
};
|
||||
char = convertToV2(char);
|
||||
const charJSON = JSON.stringify(char);
|
||||
await charaWrite(uploadPath, charJSON, png_name, response, { file_name: png_name });
|
||||
fs.unlinkSync(uploadPath);
|
||||
} else {
|
||||
console.log('Unknown character card format');
|
||||
response.send({ error: true });
|
||||
}
|
||||
} catch (err) {
|
||||
if (err) {
|
||||
console.log(err);
|
||||
response.send({ error: true });
|
||||
}
|
||||
|
||||
let jsonData = JSON.parse(data);
|
||||
|
||||
if (jsonData.spec !== undefined) {
|
||||
console.log('importing from v2 json');
|
||||
importRisuSprites(jsonData);
|
||||
unsetFavFlag(jsonData);
|
||||
jsonData = readFromV2(jsonData);
|
||||
jsonData['create_date'] = humanizedISO8601DateTime();
|
||||
png_name = getPngName(jsonData.data?.name || jsonData.name);
|
||||
let char = JSON.stringify(jsonData);
|
||||
charaWrite(defaultAvatarPath, char, png_name, response, { file_name: png_name });
|
||||
} else if (jsonData.name !== undefined) {
|
||||
console.log('importing from v1 json');
|
||||
jsonData.name = sanitize(jsonData.name);
|
||||
if (jsonData.creator_notes) {
|
||||
jsonData.creator_notes = jsonData.creator_notes.replace('Creator\'s notes go here.', '');
|
||||
}
|
||||
png_name = getPngName(jsonData.name);
|
||||
let char = {
|
||||
'name': jsonData.name,
|
||||
'description': jsonData.description ?? '',
|
||||
'creatorcomment': jsonData.creatorcomment ?? jsonData.creator_notes ?? '',
|
||||
'personality': jsonData.personality ?? '',
|
||||
'first_mes': jsonData.first_mes ?? '',
|
||||
'avatar': 'none',
|
||||
'chat': jsonData.name + ' - ' + humanizedISO8601DateTime(),
|
||||
'mes_example': jsonData.mes_example ?? '',
|
||||
'scenario': jsonData.scenario ?? '',
|
||||
'create_date': humanizedISO8601DateTime(),
|
||||
'talkativeness': jsonData.talkativeness ?? 0.5,
|
||||
'creator': jsonData.creator ?? '',
|
||||
'tags': jsonData.tags ?? '',
|
||||
};
|
||||
char = convertToV2(char);
|
||||
let charJSON = JSON.stringify(char);
|
||||
charaWrite(defaultAvatarPath, charJSON, png_name, response, { file_name: png_name });
|
||||
} else if (jsonData.char_name !== undefined) {//json Pygmalion notepad
|
||||
console.log('importing from gradio json');
|
||||
jsonData.char_name = sanitize(jsonData.char_name);
|
||||
if (jsonData.creator_notes) {
|
||||
jsonData.creator_notes = jsonData.creator_notes.replace('Creator\'s notes go here.', '');
|
||||
}
|
||||
png_name = getPngName(jsonData.char_name);
|
||||
let char = {
|
||||
'name': jsonData.char_name,
|
||||
'description': jsonData.char_persona ?? '',
|
||||
'creatorcomment': jsonData.creatorcomment ?? jsonData.creator_notes ?? '',
|
||||
'personality': '',
|
||||
'first_mes': jsonData.char_greeting ?? '',
|
||||
'avatar': 'none',
|
||||
'chat': jsonData.name + ' - ' + humanizedISO8601DateTime(),
|
||||
'mes_example': jsonData.example_dialogue ?? '',
|
||||
'scenario': jsonData.world_scenario ?? '',
|
||||
'create_date': humanizedISO8601DateTime(),
|
||||
'talkativeness': jsonData.talkativeness ?? 0.5,
|
||||
'creator': jsonData.creator ?? '',
|
||||
'tags': jsonData.tags ?? '',
|
||||
};
|
||||
char = convertToV2(char);
|
||||
let charJSON = JSON.stringify(char);
|
||||
charaWrite(defaultAvatarPath, charJSON, png_name, response, { file_name: png_name });
|
||||
} else {
|
||||
console.log('Incorrect character format .json');
|
||||
response.send({ error: true });
|
||||
}
|
||||
});
|
||||
} else {
|
||||
try {
|
||||
var img_data = await charaRead(uploadPath, format);
|
||||
if (img_data === undefined) throw new Error('Failed to read character data');
|
||||
|
||||
let jsonData = JSON.parse(img_data);
|
||||
|
||||
jsonData.name = sanitize(jsonData.data?.name || jsonData.name);
|
||||
png_name = getPngName(jsonData.name);
|
||||
|
||||
if (jsonData.spec !== undefined) {
|
||||
console.log('Found a v2 character file.');
|
||||
importRisuSprites(jsonData);
|
||||
unsetFavFlag(jsonData);
|
||||
jsonData = readFromV2(jsonData);
|
||||
jsonData['create_date'] = humanizedISO8601DateTime();
|
||||
const char = JSON.stringify(jsonData);
|
||||
await charaWrite(uploadPath, char, png_name, response, { file_name: png_name });
|
||||
fs.unlinkSync(uploadPath);
|
||||
} else if (jsonData.name !== undefined) {
|
||||
console.log('Found a v1 character file.');
|
||||
|
||||
if (jsonData.creator_notes) {
|
||||
jsonData.creator_notes = jsonData.creator_notes.replace('Creator\'s notes go here.', '');
|
||||
}
|
||||
|
||||
let char = {
|
||||
'name': jsonData.name,
|
||||
'description': jsonData.description ?? '',
|
||||
'creatorcomment': jsonData.creatorcomment ?? jsonData.creator_notes ?? '',
|
||||
'personality': jsonData.personality ?? '',
|
||||
'first_mes': jsonData.first_mes ?? '',
|
||||
'avatar': 'none',
|
||||
'chat': jsonData.name + ' - ' + humanizedISO8601DateTime(),
|
||||
'mes_example': jsonData.mes_example ?? '',
|
||||
'scenario': jsonData.scenario ?? '',
|
||||
'create_date': humanizedISO8601DateTime(),
|
||||
'talkativeness': jsonData.talkativeness ?? 0.5,
|
||||
'creator': jsonData.creator ?? '',
|
||||
'tags': jsonData.tags ?? '',
|
||||
};
|
||||
char = convertToV2(char);
|
||||
const charJSON = JSON.stringify(char);
|
||||
await charaWrite(uploadPath, charJSON, png_name, response, { file_name: png_name });
|
||||
fs.unlinkSync(uploadPath);
|
||||
} else {
|
||||
console.log('Unknown character card format');
|
||||
response.send({ error: true });
|
||||
}
|
||||
} catch (err) {
|
||||
console.log(err);
|
||||
response.send({ error: true });
|
||||
}
|
||||
}
|
||||
});
|
||||
|
@@ -1,20 +1,30 @@
|
||||
const fetch = require('node-fetch').default;
|
||||
const express = require('express');
|
||||
const AIHorde = require('../ai_horde');
|
||||
const { getVersion, delay } = require('../util');
|
||||
const { getVersion, delay, Cache } = require('../util');
|
||||
const { readSecret, SECRET_KEYS } = require('./secrets');
|
||||
const { jsonParser } = require('../express-common');
|
||||
|
||||
const ANONYMOUS_KEY = '0000000000';
|
||||
const cache = new Cache(60 * 1000);
|
||||
const router = express.Router();
|
||||
|
||||
/**
|
||||
* Returns the AIHorde client agent.
|
||||
* @returns {Promise<string>} AIHorde client agent
|
||||
*/
|
||||
async function getClientAgent() {
|
||||
const version = await getVersion();
|
||||
return version?.agent || 'SillyTavern:UNKNOWN:Cohee#1207';
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the AIHorde client.
|
||||
* @returns {Promise<AIHorde>} AIHorde client
|
||||
*/
|
||||
async function getHordeClient() {
|
||||
const version = await getVersion();
|
||||
const ai_horde = new AIHorde({
|
||||
client_agent: version?.agent || 'SillyTavern:UNKNOWN:Cohee#1207',
|
||||
client_agent: await getClientAgent(),
|
||||
});
|
||||
return ai_horde;
|
||||
}
|
||||
@@ -36,29 +46,122 @@ function sanitizeHordeImagePrompt(prompt) {
|
||||
prompt = prompt.replace(/\b(boy)\b/gmi, 'man');
|
||||
prompt = prompt.replace(/\b(girls)\b/gmi, 'women');
|
||||
prompt = prompt.replace(/\b(boys)\b/gmi, 'men');
|
||||
|
||||
//always remove these high risk words from prompt, as they add little value to image gen while increasing the risk the prompt gets flagged
|
||||
prompt = prompt.replace(/\b(under.age|under.aged|underage|underaged|loli|pedo|pedophile|(\w+).year.old|(\w+).years.old|minor|prepubescent|minors|shota)\b/gmi, '');
|
||||
|
||||
//if nsfw is detected, do not remove it but apply additional precautions
|
||||
let isNsfw = prompt.match(/\b(cock|ahegao|hentai|uncensored|lewd|cocks|deepthroat|deepthroating|dick|dicks|cumshot|lesbian|fuck|fucked|fucking|sperm|naked|nipples|tits|boobs|breasts|boob|breast|topless|ass|butt|fingering|masturbate|masturbating|bitch|blowjob|pussy|piss|asshole|dildo|dildos|vibrator|erection|foreskin|handjob|nude|penis|porn|vibrator|virgin|vagina|vulva|threesome|orgy|bdsm|hickey|condom|testicles|anal|bareback|bukkake|creampie|stripper|strap-on|missionary|clitoris|clit|clitty|cowgirl|fleshlight|sex|buttplug|milf|oral|sucking|bondage|orgasm|scissoring|railed|slut|sluts|slutty|cumming|cunt|faggot|sissy|anal|anus|cum|semen|scat|nsfw|xxx|explicit|erotic|horny|aroused|jizz|moan|rape|raped|raping|throbbing|humping)\b/gmi);
|
||||
|
||||
if (isNsfw) {
|
||||
//replace risky subject nouns with person
|
||||
prompt = prompt.replace(/\b(youngster|infant|baby|toddler|child|teen|kid|kiddie|kiddo|teenager|student|preteen|pre.teen)\b/gmi, 'person');
|
||||
|
||||
//remove risky adjectives and related words
|
||||
prompt = prompt.replace(/\b(young|younger|youthful|youth|small|smaller|smallest|girly|boyish|lil|tiny|teenaged|lit[tl]le|school.aged|school|highschool|kindergarten|teens|children|kids)\b/gmi, '');
|
||||
}
|
||||
//replace risky subject nouns with person
|
||||
prompt = prompt.replace(/\b(youngster|infant|baby|toddler|child|teen|kid|kiddie|kiddo|teenager|student|preteen|pre.teen)\b/gmi, 'person');
|
||||
//remove risky adjectives and related words
|
||||
prompt = prompt.replace(/\b(young|younger|youthful|youth|small|smaller|smallest|girly|boyish|lil|tiny|teenaged|lit[tl]le|school.aged|school|highschool|kindergarten|teens|children|kids)\b/gmi, '');
|
||||
|
||||
return prompt;
|
||||
}
|
||||
|
||||
const router = express.Router();
|
||||
router.post('/text-workers', jsonParser, async (request, response) => {
|
||||
try {
|
||||
const cachedWorkers = cache.get('workers');
|
||||
|
||||
if (cachedWorkers && !request.body.force) {
|
||||
return response.send(cachedWorkers);
|
||||
}
|
||||
|
||||
const agent = await getClientAgent();
|
||||
const fetchResult = await fetch('https://horde.koboldai.net/api/v2/workers?type=text', {
|
||||
headers: {
|
||||
'Client-Agent': agent,
|
||||
},
|
||||
});
|
||||
const data = await fetchResult.json();
|
||||
cache.set('workers', data);
|
||||
return response.send(data);
|
||||
} catch (error) {
|
||||
console.error(error);
|
||||
response.sendStatus(500);
|
||||
}
|
||||
});
|
||||
|
||||
router.post('/text-models', jsonParser, async (request, response) => {
|
||||
try {
|
||||
const cachedModels = cache.get('models');
|
||||
|
||||
if (cachedModels && !request.body.force) {
|
||||
return response.send(cachedModels);
|
||||
}
|
||||
|
||||
const agent = await getClientAgent();
|
||||
const fetchResult = await fetch('https://horde.koboldai.net/api/v2/status/models?type=text', {
|
||||
headers: {
|
||||
'Client-Agent': agent,
|
||||
},
|
||||
});
|
||||
|
||||
const data = await fetchResult.json();
|
||||
cache.set('models', data);
|
||||
return response.send(data);
|
||||
} catch (error) {
|
||||
console.error(error);
|
||||
response.sendStatus(500);
|
||||
}
|
||||
});
|
||||
|
||||
router.post('/status', jsonParser, async (_, response) => {
|
||||
try {
|
||||
const agent = await getClientAgent();
|
||||
const fetchResult = await fetch('https://horde.koboldai.net/api/v2/status/heartbeat', {
|
||||
headers: {
|
||||
'Client-Agent': agent,
|
||||
},
|
||||
});
|
||||
|
||||
return response.send({ ok: fetchResult.ok });
|
||||
} catch (error) {
|
||||
console.error(error);
|
||||
response.sendStatus(500);
|
||||
}
|
||||
});
|
||||
|
||||
router.post('/cancel-task', jsonParser, async (request, response) => {
|
||||
try {
|
||||
const taskId = request.body.taskId;
|
||||
const agent = await getClientAgent();
|
||||
const fetchResult = await fetch(`https://horde.koboldai.net/api/v2/generate/text/status/${taskId}`, {
|
||||
method: 'DELETE',
|
||||
headers: {
|
||||
'Client-Agent': agent,
|
||||
},
|
||||
});
|
||||
|
||||
const data = await fetchResult.json();
|
||||
console.log(`Cancelled Horde task ${taskId}`);
|
||||
return response.send(data);
|
||||
} catch (error) {
|
||||
console.error(error);
|
||||
response.sendStatus(500);
|
||||
}
|
||||
});
|
||||
|
||||
router.post('/task-status', jsonParser, async (request, response) => {
|
||||
try {
|
||||
const taskId = request.body.taskId;
|
||||
const agent = await getClientAgent();
|
||||
const fetchResult = await fetch(`https://horde.koboldai.net/api/v2/generate/text/status/${taskId}`, {
|
||||
headers: {
|
||||
'Client-Agent': agent,
|
||||
},
|
||||
});
|
||||
|
||||
const data = await fetchResult.json();
|
||||
console.log(`Horde task ${taskId} status:`, data);
|
||||
return response.send(data);
|
||||
} catch (error) {
|
||||
console.error(error);
|
||||
response.sendStatus(500);
|
||||
}
|
||||
});
|
||||
|
||||
router.post('/generate-text', jsonParser, async (request, response) => {
|
||||
const api_key_horde = readSecret(SECRET_KEYS.HORDE) || ANONYMOUS_KEY;
|
||||
const apiKey = readSecret(SECRET_KEYS.HORDE) || ANONYMOUS_KEY;
|
||||
const url = 'https://horde.koboldai.net/api/v2/generate/text/async';
|
||||
const agent = await getClientAgent();
|
||||
|
||||
console.log(request.body);
|
||||
try {
|
||||
@@ -67,8 +170,8 @@ router.post('/generate-text', jsonParser, async (request, response) => {
|
||||
body: JSON.stringify(request.body),
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
'apikey': api_key_horde,
|
||||
'Client-Agent': String(request.header('Client-Agent')),
|
||||
'apikey': apiKey,
|
||||
'Client-Agent': agent,
|
||||
},
|
||||
});
|
||||
|
||||
|
@@ -4,22 +4,35 @@ const express = require('express');
|
||||
const FormData = require('form-data');
|
||||
const fs = require('fs');
|
||||
const { jsonParser, urlencodedParser } = require('../express-common');
|
||||
const { getConfigValue, mergeObjectWithYaml, excludeKeysByYaml } = require('../util');
|
||||
|
||||
const router = express.Router();
|
||||
|
||||
router.post('/caption-image', jsonParser, async (request, response) => {
|
||||
try {
|
||||
let key = '';
|
||||
let headers = {};
|
||||
let bodyParams = {};
|
||||
|
||||
if (request.body.api === 'openai') {
|
||||
if (request.body.api === 'openai' && !request.body.reverse_proxy) {
|
||||
key = readSecret(SECRET_KEYS.OPENAI);
|
||||
}
|
||||
|
||||
if (request.body.api === 'openrouter') {
|
||||
if (request.body.api === 'openrouter' && !request.body.reverse_proxy) {
|
||||
key = readSecret(SECRET_KEYS.OPENROUTER);
|
||||
}
|
||||
|
||||
if (!key) {
|
||||
if (request.body.reverse_proxy && request.body.proxy_password) {
|
||||
key = request.body.proxy_password;
|
||||
}
|
||||
|
||||
if (request.body.api === 'custom') {
|
||||
key = readSecret(SECRET_KEYS.CUSTOM);
|
||||
mergeObjectWithYaml(bodyParams, request.body.custom_include_body);
|
||||
mergeObjectWithYaml(headers, request.body.custom_include_headers);
|
||||
}
|
||||
|
||||
if (!key && !request.body.reverse_proxy && request.body.api !== 'custom') {
|
||||
console.log('No key found for API', request.body.api);
|
||||
return response.sendStatus(400);
|
||||
}
|
||||
@@ -36,12 +49,24 @@ router.post('/caption-image', jsonParser, async (request, response) => {
|
||||
},
|
||||
],
|
||||
max_tokens: 500,
|
||||
...bodyParams,
|
||||
};
|
||||
|
||||
const captionSystemPrompt = getConfigValue('openai.captionSystemPrompt');
|
||||
if (captionSystemPrompt) {
|
||||
body.messages.unshift({
|
||||
role: 'system',
|
||||
content: captionSystemPrompt,
|
||||
});
|
||||
}
|
||||
|
||||
if (request.body.api === 'custom') {
|
||||
excludeKeysByYaml(body, request.body.custom_exclude_body);
|
||||
}
|
||||
|
||||
console.log('Multimodal captioning request', body);
|
||||
|
||||
let apiUrl = '';
|
||||
let headers = {};
|
||||
|
||||
if (request.body.api === 'openrouter') {
|
||||
apiUrl = 'https://openrouter.ai/api/v1/chat/completions';
|
||||
@@ -52,6 +77,14 @@ router.post('/caption-image', jsonParser, async (request, response) => {
|
||||
apiUrl = 'https://api.openai.com/v1/chat/completions';
|
||||
}
|
||||
|
||||
if (request.body.reverse_proxy) {
|
||||
apiUrl = `${request.body.reverse_proxy}/chat/completions`;
|
||||
}
|
||||
|
||||
if (request.body.api === 'custom') {
|
||||
apiUrl = `${request.body.server_url}/chat/completions`;
|
||||
}
|
||||
|
||||
const result = await fetch(apiUrl, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
|
@@ -1,74 +1,67 @@
|
||||
/**
|
||||
* Convert a prompt from the ChatML objects to the format used by Claude.
|
||||
* @param {object[]} messages Array of messages
|
||||
* @param {boolean} addHumanPrefix Add Human prefix
|
||||
* @param {boolean} addAssistantPostfix Add Assistant postfix
|
||||
* @param {boolean} withSystemPrompt Build system prompt before "\n\nHuman: "
|
||||
* @param {boolean} addAssistantPostfix Add Assistant postfix.
|
||||
* @param {string} addAssistantPrefill Add Assistant prefill after the assistant postfix.
|
||||
* @param {boolean} withSysPromptSupport Indicates if the Claude model supports the system prompt format.
|
||||
* @param {boolean} useSystemPrompt Indicates if the system prompt format should be used.
|
||||
* @param {string} addSysHumanMsg Add Human message between system prompt and assistant.
|
||||
* @returns {string} Prompt for Claude
|
||||
* @copyright Prompt Conversion script taken from RisuAI by kwaroran (GPLv3).
|
||||
*/
|
||||
function convertClaudePrompt(messages, addHumanPrefix, addAssistantPostfix, withSystemPrompt) {
|
||||
// Claude doesn't support message names, so we'll just add them to the message content.
|
||||
for (const message of messages) {
|
||||
if (message.name && message.role !== 'system') {
|
||||
message.content = message.name + ': ' + message.content;
|
||||
delete message.name;
|
||||
function convertClaudePrompt(messages, addAssistantPostfix, addAssistantPrefill, withSysPromptSupport, useSystemPrompt, addSysHumanMsg) {
|
||||
|
||||
//Prepare messages for claude.
|
||||
if (messages.length > 0) {
|
||||
messages[0].role = 'system';
|
||||
//Add the assistant's message to the end of messages.
|
||||
if (addAssistantPostfix) {
|
||||
messages.push({
|
||||
role: 'assistant',
|
||||
content: addAssistantPrefill || '',
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
let systemPrompt = '';
|
||||
if (withSystemPrompt) {
|
||||
let lastSystemIdx = -1;
|
||||
|
||||
for (let i = 0; i < messages.length - 1; i++) {
|
||||
const message = messages[i];
|
||||
if (message.role === 'system' && !message.name) {
|
||||
systemPrompt += message.content + '\n\n';
|
||||
} else {
|
||||
lastSystemIdx = i - 1;
|
||||
break;
|
||||
// Find the index of the first message with an assistant role and check for a "'user' role/Human:" before it.
|
||||
let hasUser = false;
|
||||
const firstAssistantIndex = messages.findIndex((message, i) => {
|
||||
if (i >= 0 && (message.role === 'user' || message.content.includes('\n\nHuman: '))) {
|
||||
hasUser = true;
|
||||
}
|
||||
return message.role === 'assistant' && i > 0;
|
||||
});
|
||||
// When 2.1+ and 'Use system prompt" checked, switches to the system prompt format by setting the first message's role to the 'system'.
|
||||
// Inserts the human's message before the first the assistant one, if there are no such message or prefix found.
|
||||
if (withSysPromptSupport && useSystemPrompt) {
|
||||
messages[0].role = 'system';
|
||||
if (firstAssistantIndex > 0 && addSysHumanMsg && !hasUser) {
|
||||
messages.splice(firstAssistantIndex, 0, {
|
||||
role: 'user',
|
||||
content: addSysHumanMsg,
|
||||
});
|
||||
}
|
||||
} else {
|
||||
// Otherwise, use the default message format by setting the first message's role to 'user'(compatible with all claude models including 2.1.)
|
||||
messages[0].role = 'user';
|
||||
// Fix messages order for default message format when(messages > Context Size) by merging two messages with "\n\nHuman: " prefixes into one, before the first Assistant's message.
|
||||
if (firstAssistantIndex > 0) {
|
||||
messages[firstAssistantIndex - 1].role = firstAssistantIndex - 1 !== 0 && messages[firstAssistantIndex - 1].role === 'user' ? 'FixHumMsg' : messages[firstAssistantIndex - 1].role;
|
||||
}
|
||||
}
|
||||
if (lastSystemIdx >= 0) {
|
||||
messages.splice(0, lastSystemIdx + 1);
|
||||
}
|
||||
}
|
||||
|
||||
let requestPrompt = messages.map((v) => {
|
||||
let prefix = '';
|
||||
switch (v.role) {
|
||||
case 'assistant':
|
||||
prefix = '\n\nAssistant: ';
|
||||
break;
|
||||
case 'user':
|
||||
prefix = '\n\nHuman: ';
|
||||
break;
|
||||
case 'system':
|
||||
// According to the Claude docs, H: and A: should be used for example conversations.
|
||||
if (v.name === 'example_assistant') {
|
||||
prefix = '\n\nA: ';
|
||||
} else if (v.name === 'example_user') {
|
||||
prefix = '\n\nH: ';
|
||||
} else {
|
||||
prefix = '\n\n';
|
||||
}
|
||||
break;
|
||||
}
|
||||
return prefix + v.content;
|
||||
// Convert messages to the prompt.
|
||||
let requestPrompt = messages.map((v, i) => {
|
||||
// Set prefix according to the role.
|
||||
let prefix = {
|
||||
'assistant': '\n\nAssistant: ',
|
||||
'user': '\n\nHuman: ',
|
||||
'system': i === 0 ? '' : v.name === 'example_assistant' ? '\n\nA: ' : v.name === 'example_user' ? '\n\nH: ' : '\n\n',
|
||||
'FixHumMsg': '\n\nFirst message: ',
|
||||
}[v.role] ?? '';
|
||||
// Claude doesn't support message names, so we'll just add them to the message content.
|
||||
return `${prefix}${v.name && v.role !== 'system' ? `${v.name}: ` : ''}${v.content}`;
|
||||
}).join('');
|
||||
|
||||
if (addHumanPrefix) {
|
||||
requestPrompt = '\n\nHuman: ' + requestPrompt;
|
||||
}
|
||||
|
||||
if (addAssistantPostfix) {
|
||||
requestPrompt = requestPrompt + '\n\nAssistant: ';
|
||||
}
|
||||
|
||||
if (withSystemPrompt) {
|
||||
requestPrompt = systemPrompt + requestPrompt;
|
||||
}
|
||||
|
||||
return requestPrompt;
|
||||
}
|
||||
|
||||
|
@@ -25,7 +25,9 @@ const SECRET_KEYS = {
|
||||
DEEPLX_URL: 'deeplx_url',
|
||||
MAKERSUITE: 'api_key_makersuite',
|
||||
SERPAPI: 'api_key_serpapi',
|
||||
TOGETHERAI: 'api_key_togetherai',
|
||||
MISTRALAI: 'api_key_mistralai',
|
||||
CUSTOM: 'api_key_custom',
|
||||
};
|
||||
|
||||
/**
|
||||
|
@@ -1,11 +1,12 @@
|
||||
const express = require('express');
|
||||
const fetch = require('node-fetch').default;
|
||||
const sanitize = require('sanitize-filename');
|
||||
const { getBasicAuthHeader, delay } = require('../util.js');
|
||||
const { getBasicAuthHeader, delay, getHexString } = require('../util.js');
|
||||
const fs = require('fs');
|
||||
const { DIRECTORIES } = require('../constants.js');
|
||||
const writeFileAtomicSync = require('write-file-atomic').sync;
|
||||
const { jsonParser } = require('../express-common');
|
||||
const { readSecret, SECRET_KEYS } = require('./secrets.js');
|
||||
|
||||
/**
|
||||
* Sanitizes a string.
|
||||
@@ -545,6 +546,99 @@ comfy.post('/generate', jsonParser, async (request, response) => {
|
||||
}
|
||||
});
|
||||
|
||||
const together = express.Router();
|
||||
|
||||
together.post('/models', jsonParser, async (_, response) => {
|
||||
try {
|
||||
const key = readSecret(SECRET_KEYS.TOGETHERAI);
|
||||
|
||||
if (!key) {
|
||||
console.log('TogetherAI key not found.');
|
||||
return response.sendStatus(400);
|
||||
}
|
||||
|
||||
const modelsResponse = await fetch('https://api.together.xyz/api/models', {
|
||||
method: 'GET',
|
||||
headers: {
|
||||
'Authorization': `Bearer ${key}`,
|
||||
},
|
||||
});
|
||||
|
||||
if (!modelsResponse.ok) {
|
||||
console.log('TogetherAI returned an error.');
|
||||
return response.sendStatus(500);
|
||||
}
|
||||
|
||||
const data = await modelsResponse.json();
|
||||
|
||||
if (!Array.isArray(data)) {
|
||||
console.log('TogetherAI returned invalid data.');
|
||||
return response.sendStatus(500);
|
||||
}
|
||||
|
||||
const models = data
|
||||
.filter(x => x.display_type === 'image')
|
||||
.map(x => ({ value: x.name, text: x.display_name }));
|
||||
|
||||
return response.send(models);
|
||||
} catch (error) {
|
||||
console.log(error);
|
||||
return response.sendStatus(500);
|
||||
}
|
||||
});
|
||||
|
||||
together.post('/generate', jsonParser, async (request, response) => {
|
||||
try {
|
||||
const key = readSecret(SECRET_KEYS.TOGETHERAI);
|
||||
|
||||
if (!key) {
|
||||
console.log('TogetherAI key not found.');
|
||||
return response.sendStatus(400);
|
||||
}
|
||||
|
||||
console.log('TogetherAI request:', request.body);
|
||||
|
||||
const result = await fetch('https://api.together.xyz/api/inference', {
|
||||
method: 'POST',
|
||||
body: JSON.stringify({
|
||||
request_type: 'image-model-inference',
|
||||
prompt: request.body.prompt,
|
||||
negative_prompt: request.body.negative_prompt,
|
||||
height: request.body.height,
|
||||
width: request.body.width,
|
||||
model: request.body.model,
|
||||
steps: request.body.steps,
|
||||
n: 1,
|
||||
seed: Math.floor(Math.random() * 10_000_000), // Limited to 10000 on playground, works fine with more.
|
||||
sessionKey: getHexString(40), // Don't know if that's supposed to be random or not. It works either way.
|
||||
}),
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
'Authorization': `Bearer ${key}`,
|
||||
},
|
||||
});
|
||||
|
||||
if (!result.ok) {
|
||||
console.log('TogetherAI returned an error.');
|
||||
return response.sendStatus(500);
|
||||
}
|
||||
|
||||
const data = await result.json();
|
||||
console.log('TogetherAI response:', data);
|
||||
|
||||
if (data.status !== 'finished') {
|
||||
console.log('TogetherAI job failed.');
|
||||
return response.sendStatus(500);
|
||||
}
|
||||
|
||||
return response.send(data);
|
||||
} catch (error) {
|
||||
console.log(error);
|
||||
return response.sendStatus(500);
|
||||
}
|
||||
});
|
||||
|
||||
router.use('/comfy', comfy);
|
||||
router.use('/together', together);
|
||||
|
||||
module.exports = { router };
|
||||
|
@@ -111,7 +111,8 @@ async function generateThumbnail(type, file) {
|
||||
try {
|
||||
const quality = getConfigValue('thumbnailsQuality', 95);
|
||||
const image = await jimp.read(pathToOriginalFile);
|
||||
buffer = await image.cover(mySize[0], mySize[1]).quality(quality).getBufferAsync('image/jpeg');
|
||||
const imgType = type == 'avatar' && getConfigValue('avatarThumbnailsPng', false) ? 'image/png' : 'image/jpeg';
|
||||
buffer = await image.cover(mySize[0], mySize[1]).quality(quality).getBufferAsync(imgType);
|
||||
}
|
||||
catch (inner) {
|
||||
console.warn(`Thumbnailer can not process the image: ${pathToOriginalFile}. Using original size`);
|
||||
|
@@ -622,6 +622,10 @@ router.post('/remote/textgenerationwebui/encode', jsonParser, async function (re
|
||||
url += '/api/extra/tokencount';
|
||||
args.body = JSON.stringify({ 'prompt': text });
|
||||
break;
|
||||
case TEXTGEN_TYPES.LLAMACPP:
|
||||
url += '/tokenize';
|
||||
args.body = JSON.stringify({ 'content': text });
|
||||
break;
|
||||
default:
|
||||
url += '/v1/internal/encode';
|
||||
args.body = JSON.stringify({ 'text': text });
|
||||
@@ -637,7 +641,7 @@ router.post('/remote/textgenerationwebui/encode', jsonParser, async function (re
|
||||
}
|
||||
|
||||
const data = await result.json();
|
||||
const count = legacyApi ? data?.results[0]?.tokens : (data?.length ?? data?.value);
|
||||
const count = legacyApi ? data?.results[0]?.tokens : (data?.length ?? data?.value ?? data?.tokens?.length);
|
||||
const ids = legacyApi ? [] : (data?.tokens ?? data?.ids ?? []);
|
||||
|
||||
return response.send({ count, ids });
|
||||
|
@@ -106,6 +106,10 @@ router.post('/deepl', jsonParser, async (request, response) => {
|
||||
return response.sendStatus(400);
|
||||
}
|
||||
|
||||
if (request.body.lang === 'zh-CN' || request.body.lang === 'zh-TW') {
|
||||
request.body.lang = 'ZH';
|
||||
}
|
||||
|
||||
const text = request.body.text;
|
||||
const lang = request.body.lang;
|
||||
const formality = getConfigValue('deepl.formality', 'default');
|
||||
@@ -221,7 +225,7 @@ router.post('/deeplx', jsonParser, async (request, response) => {
|
||||
|
||||
const text = request.body.text;
|
||||
let lang = request.body.lang;
|
||||
if (request.body.lang === 'zh-CN') {
|
||||
if (request.body.lang === 'zh-CN' || request.body.lang === 'zh-TW') {
|
||||
lang = 'ZH';
|
||||
}
|
||||
|
||||
|
143
src/util.js
143
src/util.js
@@ -105,6 +105,21 @@ function delay(ms) {
|
||||
return new Promise(resolve => setTimeout(resolve, ms));
|
||||
}
|
||||
|
||||
/**
|
||||
* Generates a random hex string of the given length.
|
||||
* @param {number} length String length
|
||||
* @returns {string} Random hex string
|
||||
* @example getHexString(8) // 'a1b2c3d4'
|
||||
*/
|
||||
function getHexString(length) {
|
||||
const chars = '0123456789abcdef';
|
||||
let result = '';
|
||||
for (let i = 0; i < length; i++) {
|
||||
result += chars[Math.floor(Math.random() * chars.length)];
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* Extracts a file with given extension from an ArrayBuffer containing a ZIP archive.
|
||||
* @param {ArrayBuffer} archiveBuffer Buffer containing a ZIP archive
|
||||
@@ -384,6 +399,129 @@ function forwardFetchResponse(from, to) {
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Adds YAML-serialized object to the object.
|
||||
* @param {object} obj Object
|
||||
* @param {string} yamlString YAML-serialized object
|
||||
* @returns
|
||||
*/
|
||||
function mergeObjectWithYaml(obj, yamlString) {
|
||||
if (!yamlString) {
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
const parsedObject = yaml.parse(yamlString);
|
||||
|
||||
if (Array.isArray(parsedObject)) {
|
||||
for (const item of parsedObject) {
|
||||
if (typeof item === 'object' && item && !Array.isArray(item)) {
|
||||
Object.assign(obj, item);
|
||||
}
|
||||
}
|
||||
}
|
||||
else if (parsedObject && typeof parsedObject === 'object') {
|
||||
Object.assign(obj, parsedObject);
|
||||
}
|
||||
} catch {
|
||||
// Do nothing
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Removes keys from the object by YAML-serialized array.
|
||||
* @param {object} obj Object
|
||||
* @param {string} yamlString YAML-serialized array
|
||||
* @returns {void} Nothing
|
||||
*/
|
||||
function excludeKeysByYaml(obj, yamlString) {
|
||||
if (!yamlString) {
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
const parsedObject = yaml.parse(yamlString);
|
||||
|
||||
if (Array.isArray(parsedObject)) {
|
||||
parsedObject.forEach(key => {
|
||||
delete obj[key];
|
||||
});
|
||||
} else if (typeof parsedObject === 'object') {
|
||||
Object.keys(parsedObject).forEach(key => {
|
||||
delete obj[key];
|
||||
});
|
||||
} else if (typeof parsedObject === 'string') {
|
||||
delete obj[parsedObject];
|
||||
}
|
||||
} catch {
|
||||
// Do nothing
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Removes trailing slash and /v1 from a string.
|
||||
* @param {string} str Input string
|
||||
* @returns {string} Trimmed string
|
||||
*/
|
||||
function trimV1(str) {
|
||||
return String(str ?? '').replace(/\/$/, '').replace(/\/v1$/, '');
|
||||
}
|
||||
|
||||
/**
|
||||
* Simple TTL memory cache.
|
||||
*/
|
||||
class Cache {
|
||||
/**
|
||||
* @param {number} ttl Time to live in milliseconds
|
||||
*/
|
||||
constructor(ttl) {
|
||||
this.cache = new Map();
|
||||
this.ttl = ttl;
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets a value from the cache.
|
||||
* @param {string} key Cache key
|
||||
*/
|
||||
get(key) {
|
||||
const value = this.cache.get(key);
|
||||
if (value?.expiry > Date.now()) {
|
||||
return value.value;
|
||||
}
|
||||
|
||||
// Cache miss or expired, remove the key
|
||||
this.cache.delete(key);
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets a value in the cache.
|
||||
* @param {string} key Key
|
||||
* @param {object} value Value
|
||||
*/
|
||||
set(key, value) {
|
||||
this.cache.set(key, {
|
||||
value: value,
|
||||
expiry: Date.now() + this.ttl,
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Removes a value from the cache.
|
||||
* @param {string} key Key
|
||||
*/
|
||||
remove(key) {
|
||||
this.cache.delete(key);
|
||||
}
|
||||
|
||||
/**
|
||||
* Clears the cache.
|
||||
*/
|
||||
clear() {
|
||||
this.cache.clear();
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
getConfig,
|
||||
getConfigValue,
|
||||
@@ -404,4 +542,9 @@ module.exports = {
|
||||
removeOldBackups,
|
||||
getImages,
|
||||
forwardFetchResponse,
|
||||
getHexString,
|
||||
mergeObjectWithYaml,
|
||||
excludeKeysByYaml,
|
||||
trimV1,
|
||||
Cache,
|
||||
};
|
||||
|
Reference in New Issue
Block a user