Merge branch 'staging' into qr-rewrite

This commit is contained in:
LenAnderson 2023-12-22 13:56:04 +00:00
commit 6e3159db3f
42 changed files with 3098 additions and 1309 deletions

View File

@ -55,7 +55,7 @@ openai:
# Will send a random user ID to OpenAI completion API # Will send a random user ID to OpenAI completion API
randomizeUserId: false randomizeUserId: false
# If not empty, will add this as a system message to the start of every caption completion prompt # If not empty, will add this as a system message to the start of every caption completion prompt
# Example: "Perform the instructions to the best of your ability.\n\n" (for LLaVA) # Example: "Perform the instructions to the best of your ability.\n" (for LLaVA)
# Not used in image inlining mode # Not used in image inlining mode
captionSystemPrompt: "" captionSystemPrompt: ""
# -- DEEPL TRANSLATION CONFIGURATION -- # -- DEEPL TRANSLATION CONFIGURATION --

View File

@ -2,7 +2,6 @@
"firstRun": true, "firstRun": true,
"username": "User", "username": "User",
"api_server": "http://127.0.0.1:5000/api", "api_server": "http://127.0.0.1:5000/api",
"api_server_textgenerationwebui": "http://127.0.0.1:5000/api",
"preset_settings": "RecoveredRuins", "preset_settings": "RecoveredRuins",
"user_avatar": "user-default.png", "user_avatar": "user-default.png",
"amount_gen": 250, "amount_gen": 250,

View File

@ -121,7 +121,7 @@
} }
/* Add the custom checkbox */ /* Add the custom checkbox */
.select2-results__option:before { .select2-results__option::before {
content: ''; content: '';
display: inline-block; display: inline-block;
position: absolute; position: absolute;
@ -141,11 +141,19 @@
} }
/* Add the custom checkbox checkmark */ /* Add the custom checkbox checkmark */
.select2-results__option--selected.select2-results__option:before { .select2-results__option--selected.select2-results__option::before {
content: '\2713'; content: '\2713';
font-weight: bold; font-weight: bold;
color: var(--SmartThemeBodyColor); color: var(--SmartThemeBodyColor);
background-color: var(--SmartThemeBlurTintColor); background-color: var(--SmartThemeBlurTintColor);
text-align: center; text-align: center;
line-height: 14px; line-height: 14px;
} }
.select2-results__option.select2-results__message {
background-color: inherit;
}
.select2-results__option.select2-results__message::before {
display: none;
}

View File

@ -5,8 +5,7 @@
width="64.000000pt" height="64.000000pt" viewBox="0 0 53.000000 60.000000" width="64.000000pt" height="64.000000pt" viewBox="0 0 53.000000 60.000000"
preserveAspectRatio="xMidYMid meet"> preserveAspectRatio="xMidYMid meet">
<g transform="translate(0.000000,63.000000) scale(0.100000,-0.100000)" <g transform="translate(0.000000,63.000000) scale(0.100000,-0.100000)" stroke="none">
fill="#000000" stroke="none">
<path d="M40 320 l0 -240 70 0 70 0 0 95 c0 95 0 95 25 95 23 0 25 -3 25 -50 <path d="M40 320 l0 -240 70 0 70 0 0 95 c0 95 0 95 25 95 23 0 25 -3 25 -50
l0 -50 70 0 70 0 0 50 c0 47 2 50 25 50 25 0 25 0 25 -95 l0 -95 70 0 70 0 0 l0 -50 70 0 70 0 0 50 c0 47 2 50 25 50 25 0 25 0 25 -95 l0 -95 70 0 70 0 0
240 0 240 -70 0 -70 0 0 -44 0 -45 -47 -3 -48 -3 -3 -47 c-3 -43 -5 -48 -28 240 0 240 -70 0 -70 0 0 -44 0 -45 -47 -3 -48 -3 -3 -47 c-3 -43 -5 -48 -28

Before

Width:  |  Height:  |  Size: 731 B

After

Width:  |  Height:  |  Size: 716 B

56
public/img/ollama.svg Normal file
View File

@ -0,0 +1,56 @@
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<svg
version="1.0"
width="467.388pt"
height="618.89093pt"
viewBox="0 0 467.388 618.89093"
preserveAspectRatio="xMidYMid"
id="svg5"
sodipodi:docname="ollama.svg"
inkscape:version="1.3 (0e150ed, 2023-07-21)"
xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape"
xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd"
xmlns="http://www.w3.org/2000/svg"
xmlns:svg="http://www.w3.org/2000/svg">
<defs
id="defs5" />
<sodipodi:namedview
id="namedview5"
pagecolor="#ffffff"
bordercolor="#000000"
borderopacity="0.25"
inkscape:showpageshadow="2"
inkscape:pageopacity="0.0"
inkscape:pagecheckerboard="false"
inkscape:deskcolor="#d1d1d1"
inkscape:document-units="pt"
inkscape:zoom="0.20971564"
inkscape:cx="309.9435"
inkscape:cy="278.94915"
inkscape:window-width="1280"
inkscape:window-height="688"
inkscape:window-x="0"
inkscape:window-y="25"
inkscape:window-maximized="1"
inkscape:current-layer="svg5" />
<g
transform="matrix(0.1,0,0,-0.1,-188.01849,632.89095)"
stroke="none"
id="g5">
<path
d="m 2849,6312 c -219,-73 -378,-347 -444,-768 -34,-213 -29,-629 9,-774 l 13,-49 -105,-103 c -143,-140 -201,-210 -265,-320 -85,-145 -143,-312 -167,-477 -20,-135 -8,-404 23,-522 34,-129 78,-237 138,-337 l 50,-83 -50,-117 c -96,-227 -130,-376 -138,-618 -12,-345 48,-589 208,-854 l 21,-35 -35,-57 c -43,-72 -100,-243 -122,-368 -26,-149 -31,-393 -11,-523 10,-59 22,-121 28,-138 l 10,-29 177,2 176,3 -1,40 c 0,22 -11,76 -23,120 -42,149 -26,433 34,610 13,39 51,120 84,179 33,60 63,122 67,138 10,46 -4,109 -34,154 -15,22 -46,69 -69,103 -171,254 -206,664 -88,1017 27,80 77,185 130,274 63,105 56,178 -25,260 -138,138 -221,394 -207,634 21,357 227,680 532,833 130,66 183,77 375,78 96,0 183,4 193,9 10,5 36,45 58,90 121,242 304,391 594,484 72,23 96,26 235,26 148,0 160,-1 250,-32 281,-94 469,-249 577,-478 50,-105 54,-107 215,-99 153,8 244,-6 365,-57 143,-59 293,-181 389,-314 62,-87 130,-236 161,-351 22,-84 26,-119 26,-243 0,-124 -4,-159 -26,-242 -31,-118 -101,-257 -167,-332 -83,-95 -88,-166 -19,-277 128,-206 190,-431 191,-689 1,-277 -53,-446 -217,-684 -36,-52 -51,-114 -41,-164 4,-16 34,-78 67,-138 33,-59 71,-140 84,-178 60,-182 76,-461 34,-611 -12,-44 -23,-98 -23,-120 l -1,-40 176,-3 177,-2 11,31 c 46,134 52,474 11,683 -25,129 -78,281 -121,351 l -31,50 21,35 c 159,261 219,507 208,848 -8,252 -53,444 -155,663 l -40,86 31,49 c 59,94 119,235 150,352 29,112 31,126 31,317 1,224 -9,294 -70,472 -19,55 -34,106 -34,113 0,21 -109,198 -159,257 -26,32 -98,107 -159,167 -61,60 -109,113 -106,118 16,25 35,205 41,368 8,260 -15,478 -72,675 -88,303 -214,474 -393,534 -207,70 -405,-47 -542,-318 -75,-151 -139,-379 -156,-558 l -7,-72 -99,50 c -189,95 -399,149 -578,149 -173,0 -383,-52 -560,-138 -52,-26 -98,-48 -101,-50 -3,-1 -9,28 -13,65 -29,288 -146,595 -282,742 -121,130 -274,179 -415,133 z m 153,-374 c 119,-127 208,-471 208,-804 0,-85 -4,-112 -20,-144 -17,-34 -25,-40 -53,-40 -51,0 -267,-30 -326,-45 -30,-8 -56,-13 -58,-12 -1,2 -7,67 -14,145 -16,215 7,467 62,657 39,133 121,275 159,275 7,0 25,-14 42,-32 z m 2529,1 c 124,-133 208,-558 179,-909 -6,-74 -13,-136 -15,-138 -2,-2 -25,3 -52,11 -39,12 -122,24 -352,50 -7,1 -22,18 -33,37 -18,32 -19,50 -15,200 8,255 53,468 132,635 34,71 93,145 115,145 7,0 25,-14 41,-31 z"
id="path1" />
<path
d="m 4115,3729 c -390,-29 -735,-284 -824,-609 -26,-93 -28,-244 -5,-334 38,-149 171,-324 306,-404 85,-50 204,-99 288,-117 99,-22 453,-32 584,-16 350,41 626,253 700,538 20,78 21,240 1,318 -36,140 -144,303 -266,401 -218,174 -474,247 -784,223 z m 329,-258 c 291,-76 497,-291 500,-521 3,-227 -192,-414 -479,-460 -80,-13 -403,-13 -485,1 -212,34 -390,160 -452,319 -29,77 -29,194 1,272 79,206 278,353 544,404 97,18 269,11 371,-15 z"
id="path2" />
<path
d="m 4038,3151 c -58,-52 -40,-123 47,-177 43,-27 45,-31 40,-64 -19,-120 -19,-127 8,-154 22,-22 35,-26 85,-26 91,0 123,41 103,130 -17,74 -15,83 33,113 56,35 76,66 76,116 0,32 -6,44 -31,65 -39,33 -81,33 -136,1 l -43,-24 -42,24 c -58,33 -100,32 -140,-4 z"
id="path3" />
<path
d="m 2932,3664 c -107,-53 -169,-209 -128,-319 44,-115 194,-177 303,-124 89,43 153,148 153,250 0,171 -171,271 -328,193 z"
id="path4" />
<path
d="m 5320,3675 c -119,-54 -165,-193 -104,-320 27,-58 88,-118 141,-141 68,-29 162,-10 227,47 86,76 97,174 35,297 -45,89 -101,125 -198,129 -44,2 -78,-2 -101,-12 z"
id="path5" />
</g>
</svg>

After

Width:  |  Height:  |  Size: 4.5 KiB

23
public/img/tabby.svg Normal file
View File

@ -0,0 +1,23 @@
<?xml version="1.0" standalone="no"?>
<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 20010904//EN"
"http://www.w3.org/TR/2001/REC-SVG-20010904/DTD/svg10.dtd">
<svg version="1.0" xmlns="http://www.w3.org/2000/svg"
width="176.000000pt" height="176.000000pt" viewBox="0 0 176.000000 176.000000"
preserveAspectRatio="xMidYMid meet">
<g transform="translate(0.000000,176.000000) scale(0.100000,-0.100000)" stroke="none">
<path d="M197 1670 c-16 -19 -32 -58 -43 -107 -19 -87 -16 -222 11 -422 21
-162 19 -218 -10 -306 -49 -144 -43 -332 14 -443 54 -106 160 -180 297 -207
164 -33 202 -44 270 -77 59 -28 80 -33 144 -33 66 0 84 4 154 38 53 25 110 43
170 53 122 21 177 38 241 74 158 90 225 282 180 515 -8 42 -21 90 -30 107 -20
41 -19 144 1 284 9 60 17 177 17 259 1 134 -1 156 -21 206 -31 77 -50 93 -104
85 -84 -13 -183 -89 -319 -243 l-54 -62 -75 19 c-100 26 -224 26 -321 0 l-74
-20 -54 63 c-95 109 -182 186 -244 217 -79 39 -117 39 -150 0z m1121 -897 c2
-18 -5 -52 -16 -76 -25 -55 -61 -73 -171 -83 l-84 -7 5 51 c7 74 45 114 138
146 8 3 40 4 70 3 54 -2 55 -2 58 -34z m-693 16 c24 -7 55 -27 78 -51 33 -34
37 -45 37 -88 0 -57 5 -56 -119 -40 -96 13 -136 48 -141 125 -5 64 -4 65 53
65 28 0 70 -5 92 -11z m391 -384 c21 -28 18 -33 -31 -63 -32 -19 -48 -36 -53
-57 -6 -23 -14 -30 -32 -30 -18 0 -26 7 -32 32 -6 24 -19 38 -48 53 -31 16
-40 26 -40 46 0 34 27 42 134 40 73 -2 91 -6 102 -21z"/>
</g>
</svg>

After

Width:  |  Height:  |  Size: 1.3 KiB

View File

@ -437,14 +437,15 @@
Streaming</span> Streaming</span>
</label> </label>
<div class="toggle-description justifyLeft"> <div class="toggle-description justifyLeft">
<span data-i18n="Display the response bit by bit as it is generated.">Display <span data-i18n="Display the response bit by bit as it is generated.">
the response bit by bit as it is generated.</span><br> Display the response bit by bit as it is generated.
<span data-i18n="When this is off, responses will be displayed all at once when they are complete.">When </span><br>
this is off, responses will be displayed all at once when they are <span data-i18n="When this is off, responses will be displayed all at once when they are complete.">
complete.</span> When this is off, responses will be displayed all at once when they are complete.
</span>
</div> </div>
</div> </div>
<div class="range-block" data-source="openai,claude,windowai,openrouter,ai21,scale,makersuite,mistralai"> <div class="range-block" data-source="openai,claude,windowai,openrouter,ai21,scale,makersuite,mistralai,custom">
<div class="range-block-title" data-i18n="Temperature"> <div class="range-block-title" data-i18n="Temperature">
Temperature Temperature
</div> </div>
@ -457,7 +458,7 @@
</div> </div>
</div> </div>
</div> </div>
<div data-newbie-hidden class="range-block" data-source="openai,openrouter,ai21"> <div data-newbie-hidden class="range-block" data-source="openai,openrouter,ai21,custom">
<div class="range-block-title" data-i18n="Frequency Penalty"> <div class="range-block-title" data-i18n="Frequency Penalty">
Frequency Penalty Frequency Penalty
</div> </div>
@ -470,7 +471,7 @@
</div> </div>
</div> </div>
</div> </div>
<div data-newbie-hidden class="range-block" data-source="openai,openrouter,ai21"> <div data-newbie-hidden class="range-block" data-source="openai,openrouter,ai21,custom">
<div class="range-block-title" data-i18n="Presence Penalty"> <div class="range-block-title" data-i18n="Presence Penalty">
Presence Penalty Presence Penalty
</div> </div>
@ -509,7 +510,7 @@
</div> </div>
</div> </div>
</div> </div>
<div data-newbie-hidden class="range-block" data-source="openai,claude,openrouter,ai21,scale,makersuite,mistralai"> <div data-newbie-hidden class="range-block" data-source="openai,claude,openrouter,ai21,scale,makersuite,mistralai,custom">
<div class="range-block-title" data-i18n="Top-p"> <div class="range-block-title" data-i18n="Top-p">
Top P Top P
</div> </div>
@ -546,10 +547,6 @@
<textarea id="jailbreak_prompt_quick_edit_textarea" class="text_pole textarea_compact autoSetHeight" rows="6" placeholder="&mdash;" data-pm-prompt="jailbreak"></textarea> <textarea id="jailbreak_prompt_quick_edit_textarea" class="text_pole textarea_compact autoSetHeight" rows="6" placeholder="&mdash;" data-pm-prompt="jailbreak"></textarea>
</div> </div>
</div> </div>
<div id="claude_assistant_prefill_block" data-source="claude" class="range-block">
<span id="claude_assistant_prefill_text" data-i18n="Assistant Prefill">Assistant Prefill</span>
<textarea id="claude_assistant_prefill" class="text_pole textarea_compact" name="assistant_prefill autoSetHeight" rows="3" maxlength="10000" data-i18n="[placeholder]Start Claude's answer with..." placeholder="Start Claude's answer with..."></textarea>
</div>
</div> </div>
</div> </div>
<div data-newbie-hidden class="inline-drawer wide100p"> <div data-newbie-hidden class="inline-drawer wide100p">
@ -581,7 +578,7 @@
</div> </div>
<div class="toggle-description justifyLeft" data-i18n="Wraps activated World Info entries before inserting into the prompt."> <div class="toggle-description justifyLeft" data-i18n="Wraps activated World Info entries before inserting into the prompt.">
Wraps activated World Info entries before inserting into the prompt. Use Wraps activated World Info entries before inserting into the prompt. Use
<tt>{0}</tt> to mark a place where the content is inserted. <code>{0}</code> to mark a place where the content is inserted.
</div> </div>
<div class="wide100p"> <div class="wide100p">
<textarea id="wi_format_textarea" class="text_pole textarea_compact autoSetHeight" rows="3" placeholder="&mdash;"></textarea> <textarea id="wi_format_textarea" class="text_pole textarea_compact autoSetHeight" rows="3" placeholder="&mdash;"></textarea>
@ -595,7 +592,7 @@
</div> </div>
</div> </div>
<div class="toggle-description justifyLeft" data-i18n="Use scenario to mark a place where the content is inserted."> <div class="toggle-description justifyLeft" data-i18n="Use scenario to mark a place where the content is inserted.">
Use <tt>{{scenario}}</tt> to mark a place where the content is inserted. Use <code>{{scenario}}</code> to mark a place where the content is inserted.
</div> </div>
<div class="wide100p"> <div class="wide100p">
<textarea id="scenario_format_textarea" class="text_pole textarea_compact autoSetHeight" rows="3" placeholder="&mdash;"></textarea> <textarea id="scenario_format_textarea" class="text_pole textarea_compact autoSetHeight" rows="3" placeholder="&mdash;"></textarea>
@ -609,7 +606,7 @@
</div> </div>
</div> </div>
<div class="toggle-description justifyLeft" data-i18n="Use personality to mark a place where the content is inserted."> <div class="toggle-description justifyLeft" data-i18n="Use personality to mark a place where the content is inserted.">
Use <tt>{{personality}}</tt> to mark a place where the content is inserted. Use <code>{{personality}}</code> to mark a place where the content is inserted.
</div> </div>
<div class="wide100p"> <div class="wide100p">
<textarea id="personality_format_textarea" class="text_pole textarea_compact autoSetHeight" rows="3" placeholder="&mdash;"></textarea> <textarea id="personality_format_textarea" class="text_pole textarea_compact autoSetHeight" rows="3" placeholder="&mdash;"></textarea>
@ -733,6 +730,9 @@
</div> </div>
<div class="wide100p"> <div class="wide100p">
<input id="openai_reverse_proxy" type="text" class="text_pole" placeholder="https://api.openai.com/v1" maxlength="500" /> <input id="openai_reverse_proxy" type="text" class="text_pole" placeholder="https://api.openai.com/v1" maxlength="500" />
<small class="reverse_proxy_warning">
Doesn't work? Try adding <code>/v1</code> at the end!
</small>
</div> </div>
</div> </div>
<div class="range-block" data-source="openai,claude"> <div class="range-block" data-source="openai,claude">
@ -749,7 +749,7 @@
<div id="openai_proxy_password_show" title="Peek a password" class="menu_button fa-solid fa-eye-slash fa-fw"></div> <div id="openai_proxy_password_show" title="Peek a password" class="menu_button fa-solid fa-eye-slash fa-fw"></div>
</div> </div>
</div> </div>
<div data-newbie-hidden class="range-block" data-source="openai,openrouter,mistralai"> <div data-newbie-hidden class="range-block" data-source="openai,openrouter,mistralai,custom">
<div class="range-block-title justifyLeft" data-i18n="Seed"> <div class="range-block-title justifyLeft" data-i18n="Seed">
Seed Seed
</div> </div>
@ -1512,7 +1512,7 @@
</span> </span>
</div> </div>
</div> </div>
<div class="range-block" data-source="openai,openrouter,makersuite"> <div class="range-block" data-source="openai,openrouter,makersuite,custom">
<label for="openai_image_inlining" class="checkbox_label flexWrap widthFreeExpand"> <label for="openai_image_inlining" class="checkbox_label flexWrap widthFreeExpand">
<input id="openai_image_inlining" type="checkbox" /> <input id="openai_image_inlining" type="checkbox" />
<span data-i18n="Send inline images">Send inline images</span> <span data-i18n="Send inline images">Send inline images</span>
@ -1541,10 +1541,38 @@
</div> </div>
<div data-newbie-hidden class="range-block" data-source="claude"> <div data-newbie-hidden class="range-block" data-source="claude">
<label for="exclude_assistant" title="Exclude Assistant suffix" class="checkbox_label widthFreeExpand"> <label for="exclude_assistant" title="Exclude Assistant suffix" class="checkbox_label widthFreeExpand">
<input id="exclude_assistant" type="checkbox" /><span data-i18n="Exclude Assistant suffix">Exclude Assistant suffix</span> <input id="exclude_assistant" type="checkbox" />
<span data-i18n="Exclude Assistant suffix">Exclude Assistant suffix</span>
</label> </label>
<div class="toggle-description justifyLeft"> <div class="toggle-description justifyLeft">
<span data-i18n="Exclude the assistant suffix from being added to the end of prompt.">Exclude the assistant suffix from being added to the end of prompt (Requires jailbreak with 'Assistant:' in it).</span> <span data-i18n="Exclude the assistant suffix from being added to the end of prompt.">
Exclude the assistant suffix from being added to the end of prompt (Requires jailbreak with 'Assistant:' in it).
</span>
</div>
<div id="claude_assistant_prefill_block" class="wide100p">
<span id="claude_assistant_prefill_text" data-i18n="Assistant Prefill">Assistant Prefill</span>
<textarea id="claude_assistant_prefill" class="text_pole textarea_compact" name="assistant_prefill autoSetHeight" rows="3" maxlength="10000" data-i18n="[placeholder]Start Claude's answer with..." placeholder="Start Claude's answer with..."></textarea>
</div>
<label for="claude_use_sysprompt" class="checkbox_label widthFreeExpand">
<input id="claude_use_sysprompt" type="checkbox" />
<span data-i18n="Use system prompt (Claude 2.1+ only)">
Use system prompt (Claude 2.1+ only)
</span>
</label>
<div class="toggle-description justifyLeft">
<span data-i18n="Exclude the 'Human: ' prefix from being added to the beginning of the prompt.">
Exclude the 'Human: ' prefix from being added to the beginning of the prompt.
Instead, place it between the system prompt and the first message with the role 'assistant' (right before 'Chat History' by default).
</span>
</div>
<div id="claude_human_sysprompt_message_block" class="wide100p">
<div class="range-block-title openai_restorable">
<span data-i18n="Human: first message">Human: first message</span>
<div id="claude_human_sysprompt_message_restore" title="Restore Human: first message" class="right_menu_button">
<div class="fa-solid fa-clock-rotate-left"></div>
</div>
</div>
<textarea id="claude_human_sysprompt_textarea" class="text_pole textarea_compact" rows="4" maxlength="10000" data-i18n="[placeholder]Human message" placeholder="Human message, instruction, etc.&#10;Adds nothing when empty, i.e. requires a new prompt with the role 'user' or manually adding the 'Human: ' prefix."></textarea>
</div> </div>
</div> </div>
</div> </div>
@ -1739,6 +1767,7 @@
<option value="tabby">TabbyAPI</option> <option value="tabby">TabbyAPI</option>
<option value="koboldcpp">KoboldCpp</option> <option value="koboldcpp">KoboldCpp</option>
<option value="llamacpp">llama.cpp</option> <option value="llamacpp">llama.cpp</option>
<option value="ollama">Ollama</option>
<option value="togetherai">TogetherAI</option> <option value="togetherai">TogetherAI</option>
</select> </select>
</div> </div>
@ -1779,7 +1808,11 @@
</div> </div>
<div class="flex1"> <div class="flex1">
<h4>Mancer Model</h4> <h4>Mancer Model</h4>
<select id="mancer_model"></select> <select id="mancer_model">
<option>
-- Connect to the API --
</option>
</select>
</div> </div>
</div> </div>
<div data-tg-type="ooba" class="flex-container flexFlowColumn"> <div data-tg-type="ooba" class="flex-container flexFlowColumn">
@ -1788,7 +1821,7 @@
oobabooga/text-generation-webui oobabooga/text-generation-webui
</a> </a>
<span data-i18n="Make sure you run it with"> <span data-i18n="Make sure you run it with">
Make sure you run it with <tt>--api</tt> flag Make sure you run it with <code>--api</code> flag
</span> </span>
</div> </div>
<div class="flex1"> <div class="flex1">
@ -1830,6 +1863,32 @@
<input id="llamacpp_api_url_text" class="text_pole wide100p" maxlength="500" value="" autocomplete="off" data-server-history="llamacpp"> <input id="llamacpp_api_url_text" class="text_pole wide100p" maxlength="500" value="" autocomplete="off" data-server-history="llamacpp">
</div> </div>
</div> </div>
<div data-tg-type="ollama">
<div class="flex-container flexFlowColumn">
<a href="https://github.com/jmorganca/ollama" target="_blank">
jmorganca/ollama
</a>
</div>
<div class="flex1">
<h4 data-i18n="API url">API URL</h4>
<small data-i18n="Example: http://127.0.0.1:11434">Example: http://127.0.0.1:11434</small>
<input id="ollama_api_url_text" class="text_pole wide100p" maxlength="500" value="" autocomplete="off" data-server-history="ollama">
</div>
<div class="flex1">
<h4>
<span data-i18n="Ollama Model">Ollama Model</h4>
</h4>
<select id="ollama_model">
<option>
-- Connect to the API --
</option>
</select>
<div id="ollama_download_model" class="menu_button menu_button_icon">
<i class="fa-solid fa-download"></i>
<span data-i18n="Download">Download</span>
</div>
</div>
</div>
<div data-tg-type="tabby"> <div data-tg-type="tabby">
<div class="flex-container flexFlowColumn"> <div class="flex-container flexFlowColumn">
<a href="https://github.com/theroyallab/tabbyAPI" target="_blank"> <a href="https://github.com/theroyallab/tabbyAPI" target="_blank">
@ -1890,6 +1949,7 @@
<option value="ai21">AI21</option> <option value="ai21">AI21</option>
<option value="makersuite">Google MakerSuite</option> <option value="makersuite">Google MakerSuite</option>
<option value="mistralai">MistralAI</option> <option value="mistralai">MistralAI</option>
<option value="custom">Custom (OpenAI-compatible)</option>
</select> </select>
<form id="openai_form" data-source="openai" action="javascript:void(null);" method="post" enctype="multipart/form-data"> <form id="openai_form" data-source="openai" action="javascript:void(null);" method="post" enctype="multipart/form-data">
<h4><span data-i18n="OpenAI API key">OpenAI API key</span></h4> <h4><span data-i18n="OpenAI API key">OpenAI API key</span></h4>
@ -2194,9 +2254,40 @@
</select> </select>
</div> </div>
</form> </form>
<form id="custom_form" data-source="custom">
<h4 data-i18n="Custom Endpoint (Base URL)">Custom Endpoint (Base URL)</h4>
<div class="flex-container">
<input id="custom_api_url_text" class="text_pole wide100p" maxlength="500" value="" autocomplete="off" placeholder="Example: http://localhost:1234/v1">
</div>
<div>
<small>
Doesn't work? Try adding <code>/v1</code> at the end of the URL!
</small>
</div>
<h4>
<span data-i18n="Custom API Key">Custom API Key</span>
<small>(Optional)</small>
</h4>
<div class="flex-container">
<input id="api_key_custom" name="api_key_custom" class="text_pole flex1" maxlength="500" value="" type="text" autocomplete="off">
<div title="Clear your API key" data-i18n="[title]Clear your API key" class="menu_button fa-solid fa-circle-xmark clear-api-key" data-key="api_key_custom"></div>
</div>
<div data-for="api_key_custom" class="neutral_warning">
For privacy reasons, your API key will be hidden after you reload the page.
</div>
<h4>Enter a Model ID</h4>
<div class="flex-container">
<input id="custom_model_id" class="text_pole wide100p" maxlength="500" value="" autocomplete="off" placeholder="Example: gpt-3.5-turbo">
</div>
<h4 data-i18n="Available Models">Available Models</h4>
<div class="flex-container">
<select id="model_custom_select" class="text_pole"></select>
</div>
</form>
<div class="flex-container flex"> <div class="flex-container flex">
<div id="api_button_openai" class="api_button menu_button menu_button_icon" type="submit" data-i18n="Connect">Connect</div> <div id="api_button_openai" class="api_button menu_button menu_button_icon" type="submit" data-i18n="Connect">Connect</div>
<div class="api_loading menu_button" data-i18n="Cancel">Cancel</div> <div class="api_loading menu_button" data-i18n="Cancel">Cancel</div>
<div data-source="custom" id="customize_additional_parameters" class="menu_button menu_button_icon">Additional Parameters</div>
<div data-source="openrouter" id="openrouter_authorize" class="menu_button menu_button_icon" title="Get your OpenRouter API token using OAuth flow. You will be redirected to openrouter.ai" data-i18n="[title]Get your OpenRouter API token using OAuth flow. You will be redirected to openrouter.ai">Authorize</div> <div data-source="openrouter" id="openrouter_authorize" class="menu_button menu_button_icon" title="Get your OpenRouter API token using OAuth flow. You will be redirected to openrouter.ai" data-i18n="[title]Get your OpenRouter API token using OAuth flow. You will be redirected to openrouter.ai">Authorize</div>
<div id="test_api_button" class="menu_button menu_button_icon" title="Verifies your API connection by sending a short test message. Be aware that you'll be credited for it!" data-i18n="[title]Verifies your API connection by sending a short test message. Be aware that you'll be credited for it!">Test Message</div> <div id="test_api_button" class="menu_button menu_button_icon" title="Verifies your API connection by sending a short test message. Be aware that you'll be credited for it!" data-i18n="[title]Verifies your API connection by sending a short test message. Be aware that you'll be credited for it!">Test Message</div>
</div> </div>
@ -2313,11 +2404,15 @@
<span class="fa-solid fa-circle-question note-link-span"></span> <span class="fa-solid fa-circle-question note-link-span"></span>
</a> </a>
</h4> </h4>
<div> <div class="flex-container">
<label for="instruct_enabled" class="checkbox_label"> <label for="instruct_enabled" class="checkbox_label flex1">
<input id="instruct_enabled" type="checkbox" /> <input id="instruct_enabled" type="checkbox" />
<span data-i18n="Enabled">Enabled</span> <span data-i18n="Enabled">Enabled</span>
</label> </label>
<label for="instruct_bind_to_context" class="checkbox_label flex1" title="If enabled, Context templates will be automatically selected based on selected Instruct template name or by preference.">
<input id="instruct_bind_to_context" type="checkbox" />
<span data-i18n="Bind to Context">Bind to Context</span>
</label>
</div> </div>
<label for="instruct_presets"> <label for="instruct_presets">
<span data-i18n="Presets">Presets</span> <span data-i18n="Presets">Presets</span>
@ -3743,7 +3838,7 @@
</div> </div>
<div id="rm_character_import" class="right_menu" style="display: none;"> <div id="rm_character_import" class="right_menu" style="display: none;">
<form id="form_import" action="javascript:void(null);" method="post" enctype="multipart/form-data"> <form id="form_import" action="javascript:void(null);" method="post" enctype="multipart/form-data">
<input multiple type="file" id="character_import_file" accept=".json, image/png" name="avatar"> <input multiple type="file" id="character_import_file" accept=".json, image/png, .yaml, .yml" name="avatar">
<input id="character_import_file_type" name="file_type" class="text_pole" maxlength="999" size="2" value="" autocomplete="off"> <input id="character_import_file_type" name="file_type" class="text_pole" maxlength="999" size="2" value="" autocomplete="off">
</form> </form>
</div> </div>

View File

@ -8,7 +8,7 @@
"system_sequence_prefix": "[INST] <<SYS>>\n", "system_sequence_prefix": "[INST] <<SYS>>\n",
"system_sequence_suffix": "\n<</SYS>>\n", "system_sequence_suffix": "\n<</SYS>>\n",
"stop_sequence": "", "stop_sequence": "",
"separator_sequence": "\n", "separator_sequence": " ",
"wrap": false, "wrap": false,
"macro": true, "macro": true,
"names": false, "names": false,

File diff suppressed because one or more lines are too long

765
public/lib/css-parser.mjs Normal file
View File

@ -0,0 +1,765 @@
function $parcel$defineInteropFlag(a) {
Object.defineProperty(a, '__esModule', {value: true, configurable: true});
}
function $parcel$export(e, n, v, s) {
Object.defineProperty(e, n, {get: v, set: s, enumerable: true, configurable: true});
}
var $009ddb00d3ec72b8$exports = {};
$parcel$defineInteropFlag($009ddb00d3ec72b8$exports);
$parcel$export($009ddb00d3ec72b8$exports, "default", () => $009ddb00d3ec72b8$export$2e2bcd8739ae039);
class $009ddb00d3ec72b8$export$2e2bcd8739ae039 extends Error {
constructor(filename, msg, lineno, column, css){
super(filename + ":" + lineno + ":" + column + ": " + msg);
this.reason = msg;
this.filename = filename;
this.line = lineno;
this.column = column;
this.source = css;
}
}
var $0865a9fb4cc365fe$exports = {};
$parcel$defineInteropFlag($0865a9fb4cc365fe$exports);
$parcel$export($0865a9fb4cc365fe$exports, "default", () => $0865a9fb4cc365fe$export$2e2bcd8739ae039);
/**
* Store position information for a node
*/ class $0865a9fb4cc365fe$export$2e2bcd8739ae039 {
constructor(start, end, source){
this.start = start;
this.end = end;
this.source = source;
}
}
var $b2e137848b48cf4f$exports = {};
$parcel$export($b2e137848b48cf4f$exports, "CssTypes", () => $b2e137848b48cf4f$export$9be5dd6e61d5d73a);
var $b2e137848b48cf4f$export$9be5dd6e61d5d73a;
(function(CssTypes) {
CssTypes["stylesheet"] = "stylesheet";
CssTypes["rule"] = "rule";
CssTypes["declaration"] = "declaration";
CssTypes["comment"] = "comment";
CssTypes["container"] = "container";
CssTypes["charset"] = "charset";
CssTypes["document"] = "document";
CssTypes["customMedia"] = "custom-media";
CssTypes["fontFace"] = "font-face";
CssTypes["host"] = "host";
CssTypes["import"] = "import";
CssTypes["keyframes"] = "keyframes";
CssTypes["keyframe"] = "keyframe";
CssTypes["layer"] = "layer";
CssTypes["media"] = "media";
CssTypes["namespace"] = "namespace";
CssTypes["page"] = "page";
CssTypes["supports"] = "supports";
})($b2e137848b48cf4f$export$9be5dd6e61d5d73a || ($b2e137848b48cf4f$export$9be5dd6e61d5d73a = {}));
// http://www.w3.org/TR/CSS21/grammar.html
// https://github.com/visionmedia/css-parse/pull/49#issuecomment-30088027
// New rule => https://www.w3.org/TR/CSS22/syndata.html#comments
// [^] is equivalent to [.\n\r]
const $d708735ed1303b43$var$commentre = /\/\*[^]*?(?:\*\/|$)/g;
const $d708735ed1303b43$export$98e6a39c04603d36 = (css, options)=>{
options = options || {};
/**
* Positional.
*/ let lineno = 1;
let column = 1;
/**
* Update lineno and column based on `str`.
*/ function updatePosition(str) {
const lines = str.match(/\n/g);
if (lines) lineno += lines.length;
const i = str.lastIndexOf("\n");
column = ~i ? str.length - i : column + str.length;
}
/**
* Mark position and patch `node.position`.
*/ function position() {
const start = {
line: lineno,
column: column
};
return function(node) {
node.position = new (0, $0865a9fb4cc365fe$export$2e2bcd8739ae039)(start, {
line: lineno,
column: column
}, options?.source || "");
whitespace();
return node;
};
}
/**
* Error `msg`.
*/ const errorsList = [];
function error(msg) {
const err = new (0, $009ddb00d3ec72b8$export$2e2bcd8739ae039)(options?.source || "", msg, lineno, column, css);
if (options?.silent) errorsList.push(err);
else throw err;
}
/**
* Parse stylesheet.
*/ function stylesheet() {
const rulesList = rules();
const result = {
type: (0, $b2e137848b48cf4f$export$9be5dd6e61d5d73a).stylesheet,
stylesheet: {
source: options?.source,
rules: rulesList,
parsingErrors: errorsList
}
};
return result;
}
/**
* Opening brace.
*/ function open() {
return match(/^{\s*/);
}
/**
* Closing brace.
*/ function close() {
return match(/^}/);
}
/**
* Parse ruleset.
*/ function rules() {
let node;
const rules = [];
whitespace();
comments(rules);
while(css.length && css.charAt(0) !== "}" && (node = atrule() || rule()))if (node) {
rules.push(node);
comments(rules);
}
return rules;
}
/**
* Match `re` and return captures.
*/ function match(re) {
const m = re.exec(css);
if (!m) return;
const str = m[0];
updatePosition(str);
css = css.slice(str.length);
return m;
}
/**
* Parse whitespace.
*/ function whitespace() {
match(/^\s*/);
}
/**
* Parse comments;
*/ function comments(rules) {
let c;
rules = rules || [];
while(c = comment())if (c) rules.push(c);
return rules;
}
/**
* Parse comment.
*/ function comment() {
const pos = position();
if ("/" !== css.charAt(0) || "*" !== css.charAt(1)) return;
const m = match(/^\/\*[^]*?\*\//);
if (!m) return error("End of comment missing");
return pos({
type: (0, $b2e137848b48cf4f$export$9be5dd6e61d5d73a).comment,
comment: m[0].slice(2, -2)
});
}
function findClosingParenthese(str, start, depth) {
let ptr = start + 1;
let found = false;
let closeParentheses = str.indexOf(")", ptr);
while(!found && closeParentheses !== -1){
const nextParentheses = str.indexOf("(", ptr);
if (nextParentheses !== -1 && nextParentheses < closeParentheses) {
const nextSearch = findClosingParenthese(str, nextParentheses + 1, depth + 1);
ptr = nextSearch + 1;
closeParentheses = str.indexOf(")", ptr);
} else found = true;
}
if (found && closeParentheses !== -1) return closeParentheses;
else return -1;
}
/**
* Parse selector.
*/ function selector() {
const m = match(/^([^{]+)/);
if (!m) return;
// remove comment in selector;
let res = $d708735ed1303b43$var$trim(m[0]).replace($d708735ed1303b43$var$commentre, "");
// Optimisation: If there is no ',' no need to split or post-process (this is less costly)
if (res.indexOf(",") === -1) return [
res
];
// Replace all the , in the parentheses by \u200C
let ptr = 0;
let startParentheses = res.indexOf("(", ptr);
while(startParentheses !== -1){
const closeParentheses = findClosingParenthese(res, startParentheses, 0);
if (closeParentheses === -1) break;
ptr = closeParentheses + 1;
res = res.substring(0, startParentheses) + res.substring(startParentheses, closeParentheses).replace(/,/g, "\u200C") + res.substring(closeParentheses);
startParentheses = res.indexOf("(", ptr);
}
// Replace all the , in ' and " by \u200C
res = res/**
* replace ',' by \u200C for data selector (div[data-lang="fr,de,us"])
*
* Examples:
* div[data-lang="fr,\"de,us"]
* div[data-lang='fr,\'de,us']
*
* Regex logic:
* ("|')(?:\\\1|.)*?\1 => Handle the " and '
*
* Optimization 1:
* No greedy capture (see docs about the difference between .* and .*?)
*
* Optimization 2:
* ("|')(?:\\\1|.)*?\1 this use reference to capture group, it work faster.
*/ .replace(/("|')(?:\\\1|.)*?\1/g, (m)=>m.replace(/,/g, "\u200C"));
// Split all the left , and replace all the \u200C by ,
return res// Split the selector by ','
.split(",")// Replace back \u200C by ','
.map((s)=>{
return $d708735ed1303b43$var$trim(s.replace(/\u200C/g, ","));
});
}
/**
* Parse declaration.
*/ function declaration() {
const pos = position();
// prop
const propMatch = match(/^(\*?[-#/*\\\w]+(\[[0-9a-z_-]+\])?)\s*/);
if (!propMatch) return;
const propValue = $d708735ed1303b43$var$trim(propMatch[0]);
// :
if (!match(/^:\s*/)) return error("property missing ':'");
// val
const val = match(/^((?:'(?:\\'|.)*?'|"(?:\\"|.)*?"|\([^)]*?\)|[^};])+)/);
const ret = pos({
type: (0, $b2e137848b48cf4f$export$9be5dd6e61d5d73a).declaration,
property: propValue.replace($d708735ed1303b43$var$commentre, ""),
value: val ? $d708735ed1303b43$var$trim(val[0]).replace($d708735ed1303b43$var$commentre, "") : ""
});
// ;
match(/^[;\s]*/);
return ret;
}
/**
* Parse declarations.
*/ function declarations() {
const decls = [];
if (!open()) return error("missing '{'");
comments(decls);
// declarations
let decl;
while(decl = declaration())if (decl) {
decls.push(decl);
comments(decls);
}
if (!close()) return error("missing '}'");
return decls;
}
/**
* Parse keyframe.
*/ function keyframe() {
let m;
const vals = [];
const pos = position();
while(m = match(/^((\d+\.\d+|\.\d+|\d+)%?|[a-z]+)\s*/)){
vals.push(m[1]);
match(/^,\s*/);
}
if (!vals.length) return;
return pos({
type: (0, $b2e137848b48cf4f$export$9be5dd6e61d5d73a).keyframe,
values: vals,
declarations: declarations() || []
});
}
/**
* Parse keyframes.
*/ function atkeyframes() {
const pos = position();
const m1 = match(/^@([-\w]+)?keyframes\s*/);
if (!m1) return;
const vendor = m1[1];
// identifier
const m2 = match(/^([-\w]+)\s*/);
if (!m2) return error("@keyframes missing name");
const name = m2[1];
if (!open()) return error("@keyframes missing '{'");
let frame;
let frames = comments();
while(frame = keyframe()){
frames.push(frame);
frames = frames.concat(comments());
}
if (!close()) return error("@keyframes missing '}'");
return pos({
type: (0, $b2e137848b48cf4f$export$9be5dd6e61d5d73a).keyframes,
name: name,
vendor: vendor,
keyframes: frames
});
}
/**
* Parse supports.
*/ function atsupports() {
const pos = position();
const m = match(/^@supports *([^{]+)/);
if (!m) return;
const supports = $d708735ed1303b43$var$trim(m[1]);
if (!open()) return error("@supports missing '{'");
const style = comments().concat(rules());
if (!close()) return error("@supports missing '}'");
return pos({
type: (0, $b2e137848b48cf4f$export$9be5dd6e61d5d73a).supports,
supports: supports,
rules: style
});
}
/**
* Parse host.
*/ function athost() {
const pos = position();
const m = match(/^@host\s*/);
if (!m) return;
if (!open()) return error("@host missing '{'");
const style = comments().concat(rules());
if (!close()) return error("@host missing '}'");
return pos({
type: (0, $b2e137848b48cf4f$export$9be5dd6e61d5d73a).host,
rules: style
});
}
/**
* Parse container.
*/ function atcontainer() {
const pos = position();
const m = match(/^@container *([^{]+)/);
if (!m) return;
const container = $d708735ed1303b43$var$trim(m[1]);
if (!open()) return error("@container missing '{'");
const style = comments().concat(rules());
if (!close()) return error("@container missing '}'");
return pos({
type: (0, $b2e137848b48cf4f$export$9be5dd6e61d5d73a).container,
container: container,
rules: style
});
}
/**
* Parse container.
*/ function atlayer() {
const pos = position();
const m = match(/^@layer *([^{;@]+)/);
if (!m) return;
const layer = $d708735ed1303b43$var$trim(m[1]);
if (!open()) {
match(/^[;\s]*/);
return pos({
type: (0, $b2e137848b48cf4f$export$9be5dd6e61d5d73a).layer,
layer: layer
});
}
const style = comments().concat(rules());
if (!close()) return error("@layer missing '}'");
return pos({
type: (0, $b2e137848b48cf4f$export$9be5dd6e61d5d73a).layer,
layer: layer,
rules: style
});
}
/**
* Parse media.
*/ function atmedia() {
const pos = position();
const m = match(/^@media *([^{]+)/);
if (!m) return;
const media = $d708735ed1303b43$var$trim(m[1]);
if (!open()) return error("@media missing '{'");
const style = comments().concat(rules());
if (!close()) return error("@media missing '}'");
return pos({
type: (0, $b2e137848b48cf4f$export$9be5dd6e61d5d73a).media,
media: media,
rules: style
});
}
/**
* Parse custom-media.
*/ function atcustommedia() {
const pos = position();
const m = match(/^@custom-media\s+(--\S+)\s*([^{;\s][^{;]*);/);
if (!m) return;
return pos({
type: (0, $b2e137848b48cf4f$export$9be5dd6e61d5d73a).customMedia,
name: $d708735ed1303b43$var$trim(m[1]),
media: $d708735ed1303b43$var$trim(m[2])
});
}
/**
* Parse paged media.
*/ function atpage() {
const pos = position();
const m = match(/^@page */);
if (!m) return;
const sel = selector() || [];
if (!open()) return error("@page missing '{'");
let decls = comments();
// declarations
let decl;
while(decl = declaration()){
decls.push(decl);
decls = decls.concat(comments());
}
if (!close()) return error("@page missing '}'");
return pos({
type: (0, $b2e137848b48cf4f$export$9be5dd6e61d5d73a).page,
selectors: sel,
declarations: decls
});
}
/**
* Parse document.
*/ function atdocument() {
const pos = position();
const m = match(/^@([-\w]+)?document *([^{]+)/);
if (!m) return;
const vendor = $d708735ed1303b43$var$trim(m[1]);
const doc = $d708735ed1303b43$var$trim(m[2]);
if (!open()) return error("@document missing '{'");
const style = comments().concat(rules());
if (!close()) return error("@document missing '}'");
return pos({
type: (0, $b2e137848b48cf4f$export$9be5dd6e61d5d73a).document,
document: doc,
vendor: vendor,
rules: style
});
}
/**
* Parse font-face.
*/ function atfontface() {
const pos = position();
const m = match(/^@font-face\s*/);
if (!m) return;
if (!open()) return error("@font-face missing '{'");
let decls = comments();
// declarations
let decl;
while(decl = declaration()){
decls.push(decl);
decls = decls.concat(comments());
}
if (!close()) return error("@font-face missing '}'");
return pos({
type: (0, $b2e137848b48cf4f$export$9be5dd6e61d5d73a).fontFace,
declarations: decls
});
}
/**
* Parse import
*/ const atimport = _compileAtrule("import");
/**
* Parse charset
*/ const atcharset = _compileAtrule("charset");
/**
* Parse namespace
*/ const atnamespace = _compileAtrule("namespace");
/**
* Parse non-block at-rules
*/ function _compileAtrule(name) {
const re = new RegExp("^@" + name + "\\s*((?::?[^;'\"]|\"(?:\\\\\"|[^\"])*?\"|'(?:\\\\'|[^'])*?')+)(?:;|$)");
// ^@import\s*([^;"']|("|')(?:\\\2|.)*?\2)+(;|$)
return function() {
const pos = position();
const m = match(re);
if (!m) return;
const ret = {
type: name
};
ret[name] = m[1].trim();
return pos(ret);
};
}
/**
* Parse at rule.
*/ function atrule() {
if (css[0] !== "@") return;
return atkeyframes() || atmedia() || atcustommedia() || atsupports() || atimport() || atcharset() || atnamespace() || atdocument() || atpage() || athost() || atfontface() || atcontainer() || atlayer();
}
/**
* Parse rule.
*/ function rule() {
const pos = position();
const sel = selector();
if (!sel) return error("selector missing");
comments();
return pos({
type: (0, $b2e137848b48cf4f$export$9be5dd6e61d5d73a).rule,
selectors: sel,
declarations: declarations() || []
});
}
return $d708735ed1303b43$var$addParent(stylesheet());
};
/**
* Trim `str`.
*/ function $d708735ed1303b43$var$trim(str) {
return str ? str.trim() : "";
}
/**
* Adds non-enumerable parent node reference to each node.
*/ function $d708735ed1303b43$var$addParent(obj, parent) {
const isNode = obj && typeof obj.type === "string";
const childParent = isNode ? obj : parent;
for(const k in obj){
const value = obj[k];
if (Array.isArray(value)) value.forEach((v)=>{
$d708735ed1303b43$var$addParent(v, childParent);
});
else if (value && typeof value === "object") $d708735ed1303b43$var$addParent(value, childParent);
}
if (isNode) Object.defineProperty(obj, "parent", {
configurable: true,
writable: true,
enumerable: false,
value: parent || null
});
return obj;
}
var $d708735ed1303b43$export$2e2bcd8739ae039 = $d708735ed1303b43$export$98e6a39c04603d36;
class $de9540138ed1fd01$var$Compiler {
constructor(options){
this.level = 0;
this.indentation = " ";
this.compress = false;
if (typeof options?.indent === "string") this.indentation = options?.indent;
if (options?.compress) this.compress = true;
}
// We disable no-unused-vars for _position. We keep position for potential reintroduction of source-map
// eslint-disable-next-line @typescript-eslint/no-unused-vars
emit(str, _position) {
return str;
}
/**
* Increase, decrease or return current indentation.
*/ indent(level) {
this.level = this.level || 1;
if (level) {
this.level += level;
return "";
}
return Array(this.level).join(this.indentation);
}
visit(node) {
switch(node.type){
case (0, $b2e137848b48cf4f$export$9be5dd6e61d5d73a).stylesheet:
return this.stylesheet(node);
case (0, $b2e137848b48cf4f$export$9be5dd6e61d5d73a).rule:
return this.rule(node);
case (0, $b2e137848b48cf4f$export$9be5dd6e61d5d73a).declaration:
return this.declaration(node);
case (0, $b2e137848b48cf4f$export$9be5dd6e61d5d73a).comment:
return this.comment(node);
case (0, $b2e137848b48cf4f$export$9be5dd6e61d5d73a).container:
return this.container(node);
case (0, $b2e137848b48cf4f$export$9be5dd6e61d5d73a).charset:
return this.charset(node);
case (0, $b2e137848b48cf4f$export$9be5dd6e61d5d73a).document:
return this.document(node);
case (0, $b2e137848b48cf4f$export$9be5dd6e61d5d73a).customMedia:
return this.customMedia(node);
case (0, $b2e137848b48cf4f$export$9be5dd6e61d5d73a).fontFace:
return this.fontFace(node);
case (0, $b2e137848b48cf4f$export$9be5dd6e61d5d73a).host:
return this.host(node);
case (0, $b2e137848b48cf4f$export$9be5dd6e61d5d73a).import:
return this.import(node);
case (0, $b2e137848b48cf4f$export$9be5dd6e61d5d73a).keyframes:
return this.keyframes(node);
case (0, $b2e137848b48cf4f$export$9be5dd6e61d5d73a).keyframe:
return this.keyframe(node);
case (0, $b2e137848b48cf4f$export$9be5dd6e61d5d73a).layer:
return this.layer(node);
case (0, $b2e137848b48cf4f$export$9be5dd6e61d5d73a).media:
return this.media(node);
case (0, $b2e137848b48cf4f$export$9be5dd6e61d5d73a).namespace:
return this.namespace(node);
case (0, $b2e137848b48cf4f$export$9be5dd6e61d5d73a).page:
return this.page(node);
case (0, $b2e137848b48cf4f$export$9be5dd6e61d5d73a).supports:
return this.supports(node);
}
}
mapVisit(nodes, delim) {
let buf = "";
delim = delim || "";
for(let i = 0, length = nodes.length; i < length; i++){
buf += this.visit(nodes[i]);
if (delim && i < length - 1) buf += this.emit(delim);
}
return buf;
}
compile(node) {
if (this.compress) return node.stylesheet.rules.map(this.visit, this).join("");
return this.stylesheet(node);
}
/**
* Visit stylesheet node.
*/ stylesheet(node) {
return this.mapVisit(node.stylesheet.rules, "\n\n");
}
/**
* Visit comment node.
*/ comment(node) {
if (this.compress) return this.emit("", node.position);
return this.emit(this.indent() + "/*" + node.comment + "*/", node.position);
}
/**
* Visit container node.
*/ container(node) {
if (this.compress) return this.emit("@container " + node.container, node.position) + this.emit("{") + this.mapVisit(node.rules) + this.emit("}");
return this.emit(this.indent() + "@container " + node.container, node.position) + this.emit(" {\n" + this.indent(1)) + this.mapVisit(node.rules, "\n\n") + this.emit("\n" + this.indent(-1) + this.indent() + "}");
}
/**
* Visit container node.
*/ layer(node) {
if (this.compress) return this.emit("@layer " + node.layer, node.position) + (node.rules ? this.emit("{") + this.mapVisit(node.rules) + this.emit("}") : ";");
return this.emit(this.indent() + "@layer " + node.layer, node.position) + (node.rules ? this.emit(" {\n" + this.indent(1)) + this.mapVisit(node.rules, "\n\n") + this.emit("\n" + this.indent(-1) + this.indent() + "}") : ";");
}
/**
* Visit import node.
*/ import(node) {
return this.emit("@import " + node.import + ";", node.position);
}
/**
* Visit media node.
*/ media(node) {
if (this.compress) return this.emit("@media " + node.media, node.position) + this.emit("{") + this.mapVisit(node.rules) + this.emit("}");
return this.emit(this.indent() + "@media " + node.media, node.position) + this.emit(" {\n" + this.indent(1)) + this.mapVisit(node.rules, "\n\n") + this.emit("\n" + this.indent(-1) + this.indent() + "}");
}
/**
* Visit document node.
*/ document(node) {
const doc = "@" + (node.vendor || "") + "document " + node.document;
if (this.compress) return this.emit(doc, node.position) + this.emit("{") + this.mapVisit(node.rules) + this.emit("}");
return this.emit(doc, node.position) + this.emit(" {\n" + this.indent(1)) + this.mapVisit(node.rules, "\n\n") + this.emit(this.indent(-1) + "\n}");
}
/**
* Visit charset node.
*/ charset(node) {
return this.emit("@charset " + node.charset + ";", node.position);
}
/**
* Visit namespace node.
*/ namespace(node) {
return this.emit("@namespace " + node.namespace + ";", node.position);
}
/**
* Visit supports node.
*/ supports(node) {
if (this.compress) return this.emit("@supports " + node.supports, node.position) + this.emit("{") + this.mapVisit(node.rules) + this.emit("}");
return this.emit(this.indent() + "@supports " + node.supports, node.position) + this.emit(" {\n" + this.indent(1)) + this.mapVisit(node.rules, "\n\n") + this.emit("\n" + this.indent(-1) + this.indent() + "}");
}
/**
* Visit keyframes node.
*/ keyframes(node) {
if (this.compress) return this.emit("@" + (node.vendor || "") + "keyframes " + node.name, node.position) + this.emit("{") + this.mapVisit(node.keyframes) + this.emit("}");
return this.emit("@" + (node.vendor || "") + "keyframes " + node.name, node.position) + this.emit(" {\n" + this.indent(1)) + this.mapVisit(node.keyframes, "\n") + this.emit(this.indent(-1) + "}");
}
/**
* Visit keyframe node.
*/ keyframe(node) {
const decls = node.declarations;
if (this.compress) return this.emit(node.values.join(","), node.position) + this.emit("{") + this.mapVisit(decls) + this.emit("}");
return this.emit(this.indent()) + this.emit(node.values.join(", "), node.position) + this.emit(" {\n" + this.indent(1)) + this.mapVisit(decls, "\n") + this.emit(this.indent(-1) + "\n" + this.indent() + "}\n");
}
/**
* Visit page node.
*/ page(node) {
if (this.compress) {
const sel = node.selectors.length ? node.selectors.join(", ") : "";
return this.emit("@page " + sel, node.position) + this.emit("{") + this.mapVisit(node.declarations) + this.emit("}");
}
const sel = node.selectors.length ? node.selectors.join(", ") + " " : "";
return this.emit("@page " + sel, node.position) + this.emit("{\n") + this.emit(this.indent(1)) + this.mapVisit(node.declarations, "\n") + this.emit(this.indent(-1)) + this.emit("\n}");
}
/**
* Visit font-face node.
*/ fontFace(node) {
if (this.compress) return this.emit("@font-face", node.position) + this.emit("{") + this.mapVisit(node.declarations) + this.emit("}");
return this.emit("@font-face ", node.position) + this.emit("{\n") + this.emit(this.indent(1)) + this.mapVisit(node.declarations, "\n") + this.emit(this.indent(-1)) + this.emit("\n}");
}
/**
* Visit host node.
*/ host(node) {
if (this.compress) return this.emit("@host", node.position) + this.emit("{") + this.mapVisit(node.rules) + this.emit("}");
return this.emit("@host", node.position) + this.emit(" {\n" + this.indent(1)) + this.mapVisit(node.rules, "\n\n") + this.emit(this.indent(-1) + "\n}");
}
/**
* Visit custom-media node.
*/ customMedia(node) {
return this.emit("@custom-media " + node.name + " " + node.media + ";", node.position);
}
/**
* Visit rule node.
*/ rule(node) {
const decls = node.declarations;
if (!decls.length) return "";
if (this.compress) return this.emit(node.selectors.join(","), node.position) + this.emit("{") + this.mapVisit(decls) + this.emit("}");
const indent = this.indent();
return this.emit(node.selectors.map((s)=>{
return indent + s;
}).join(",\n"), node.position) + this.emit(" {\n") + this.emit(this.indent(1)) + this.mapVisit(decls, "\n") + this.emit(this.indent(-1)) + this.emit("\n" + this.indent() + "}");
}
/**
* Visit declaration node.
*/ declaration(node) {
if (this.compress) return this.emit(node.property + ":" + node.value, node.position) + this.emit(";");
return this.emit(this.indent()) + this.emit(node.property + ": " + node.value, node.position) + this.emit(";");
}
}
var $de9540138ed1fd01$export$2e2bcd8739ae039 = $de9540138ed1fd01$var$Compiler;
var $fdf773ab87e20450$export$2e2bcd8739ae039 = (node, options)=>{
const compiler = new (0, $de9540138ed1fd01$export$2e2bcd8739ae039)(options || {});
return compiler.compile(node);
};
const $149c1bd638913645$export$98e6a39c04603d36 = (0, $d708735ed1303b43$export$2e2bcd8739ae039);
const $149c1bd638913645$export$fac44ee5b035f737 = (0, $fdf773ab87e20450$export$2e2bcd8739ae039);
var $149c1bd638913645$export$2e2bcd8739ae039 = {
parse: $149c1bd638913645$export$98e6a39c04603d36,
stringify: $149c1bd638913645$export$fac44ee5b035f737
};
export {$149c1bd638913645$export$98e6a39c04603d36 as parse, $149c1bd638913645$export$fac44ee5b035f737 as stringify, $149c1bd638913645$export$2e2bcd8739ae039 as default, $b2e137848b48cf4f$export$9be5dd6e61d5d73a as CssTypes};
//# sourceMappingURL=index.mjs.map

View File

@ -69,6 +69,27 @@ EventEmitter.prototype.emit = async function (event) {
} }
}; };
EventEmitter.prototype.emitAndWait = function (event) {
console.debug('Event emitted: ' + event);
var i, listeners, length, args = [].slice.call(arguments, 1);
if (typeof this.events[event] === 'object') {
listeners = this.events[event].slice();
length = listeners.length;
for (i = 0; i < length; i++) {
try {
listeners[i].apply(this, args);
}
catch (err) {
console.error(err);
console.trace('Error in event listener');
}
}
}
};
EventEmitter.prototype.once = function (event, listener) { EventEmitter.prototype.once = function (event, listener) {
this.on(event, function g () { this.on(event, function g () {
this.removeListener(event, g); this.removeListener(event, g);

File diff suppressed because it is too large Load Diff

View File

@ -4,7 +4,6 @@ import {
online_status, online_status,
main_api, main_api,
api_server, api_server,
api_server_textgenerationwebui,
is_send_press, is_send_press,
max_context, max_context,
saveSettingsDebounced, saveSettingsDebounced,
@ -35,7 +34,7 @@ import {
import { debounce, delay, getStringHash, isValidUrl } from './utils.js'; import { debounce, delay, getStringHash, isValidUrl } from './utils.js';
import { chat_completion_sources, oai_settings } from './openai.js'; import { chat_completion_sources, oai_settings } from './openai.js';
import { getTokenCount } from './tokenizers.js'; import { getTokenCount } from './tokenizers.js';
import { textgen_types, textgenerationwebui_settings as textgen_settings } from './textgen-settings.js'; import { textgen_types, textgenerationwebui_settings as textgen_settings, getTextGenServer } from './textgen-settings.js';
import Bowser from '../lib/bowser.min.js'; import Bowser from '../lib/bowser.min.js';
@ -387,7 +386,7 @@ function RA_autoconnect(PrevApi) {
) { ) {
$('#api_button_textgenerationwebui').trigger('click'); $('#api_button_textgenerationwebui').trigger('click');
} }
else if (api_server_textgenerationwebui && isValidUrl(api_server_textgenerationwebui)) { else if (isValidUrl(getTextGenServer())) {
$('#api_button_textgenerationwebui').trigger('click'); $('#api_button_textgenerationwebui').trigger('click');
} }
break; break;
@ -400,6 +399,7 @@ function RA_autoconnect(PrevApi) {
|| (secret_state[SECRET_KEYS.AI21] && oai_settings.chat_completion_source == chat_completion_sources.AI21) || (secret_state[SECRET_KEYS.AI21] && oai_settings.chat_completion_source == chat_completion_sources.AI21)
|| (secret_state[SECRET_KEYS.MAKERSUITE] && oai_settings.chat_completion_source == chat_completion_sources.MAKERSUITE) || (secret_state[SECRET_KEYS.MAKERSUITE] && oai_settings.chat_completion_source == chat_completion_sources.MAKERSUITE)
|| (secret_state[SECRET_KEYS.MISTRALAI] && oai_settings.chat_completion_source == chat_completion_sources.MISTRALAI) || (secret_state[SECRET_KEYS.MISTRALAI] && oai_settings.chat_completion_source == chat_completion_sources.MISTRALAI)
|| (isValidUrl(oai_settings.custom_url) && oai_settings.chat_completion_source == chat_completion_sources.CUSTOM)
) { ) {
$('#api_button_openai').trigger('click'); $('#api_button_openai').trigger('click');
} }

View File

@ -1,5 +1,6 @@
// Move chat functions here from script.js (eventually) // Move chat functions here from script.js (eventually)
import css from '../lib/css-parser.mjs';
import { import {
addCopyToCodeBlocks, addCopyToCodeBlocks,
appendMediaToMessage, appendMediaToMessage,
@ -360,6 +361,61 @@ export async function appendFileContent(message, messageText) {
return messageText; return messageText;
} }
/**
* Replaces style tags in the message text with custom tags with encoded content.
* @param {string} text
* @returns {string} Encoded message text
* @copyright https://github.com/kwaroran/risuAI
*/
export function encodeStyleTags(text) {
const styleRegex = /<style>(.+?)<\/style>/gms;
return text.replaceAll(styleRegex, (_, match) => {
return `<custom-style>${escape(match)}</custom-style>`;
});
}
/**
* Sanitizes custom style tags in the message text to prevent DOM pollution.
* @param {string} text Message text
* @returns {string} Sanitized message text
* @copyright https://github.com/kwaroran/risuAI
*/
export function decodeStyleTags(text) {
const styleDecodeRegex = /<custom-style>(.+?)<\/custom-style>/gms;
return text.replaceAll(styleDecodeRegex, (_, style) => {
try {
const ast = css.parse(unescape(style));
const rules = ast?.stylesheet?.rules;
if (rules) {
for (const rule of rules) {
if (rule.type === 'rule') {
if (rule.selectors) {
for (let i = 0; i < rule.selectors.length; i++) {
let selector = rule.selectors[i];
if (selector) {
let selectors = (selector.split(' ') ?? []).map((v) => {
if (v.startsWith('.')) {
return '.custom-' + v.substring(1);
}
return v;
}).join(' ');
rule.selectors[i] = '.mes_text ' + selectors;
}
}
}
}
}
}
return `<style>${css.stringify(ast)}</style>`;
} catch (error) {
return `CSS ERROR: ${error}`;
}
});
}
jQuery(function () { jQuery(function () {
$(document).on('click', '.mes_hide', async function () { $(document).on('click', '.mes_hide', async function () {
const messageBlock = $(this).closest('.mes'); const messageBlock = $(this).closest('.mes');

View File

@ -47,8 +47,6 @@ export function saveMetadataDebounced() {
}, 1000); }, 1000);
} }
export const extensionsHandlebars = Handlebars.create();
/** /**
* Provides an ability for extensions to render HTML templates. * Provides an ability for extensions to render HTML templates.
* Templates sanitation and localization is forced. * Templates sanitation and localization is forced.
@ -61,40 +59,6 @@ export function renderExtensionTemplate(extensionName, templateId, templateData
return renderTemplate(`scripts/extensions/${extensionName}/${templateId}.html`, templateData, sanitize, localize, true); return renderTemplate(`scripts/extensions/${extensionName}/${templateId}.html`, templateData, sanitize, localize, true);
} }
/**
* Registers a Handlebars helper for use in extensions.
* @param {string} name Handlebars helper name
* @param {function} helper Handlebars helper function
*/
export function registerExtensionHelper(name, helper) {
extensionsHandlebars.registerHelper(name, helper);
}
/**
* Applies handlebars extension helpers to a message.
* @param {number} messageId Message index in the chat.
*/
export function processExtensionHelpers(messageId) {
const context = getContext();
const message = context.chat[messageId];
if (!message?.mes || typeof message.mes !== 'string') {
return;
}
// Don't waste time if there are no mustaches
if (!substituteParams(message.mes).includes('{{')) {
return;
}
try {
const template = extensionsHandlebars.compile(substituteParams(message.mes), { noEscape: true });
message.mes = template({});
} catch {
// Ignore
}
}
// Disables parallel updates // Disables parallel updates
class ModuleWorkerWrapper { class ModuleWorkerWrapper {
constructor(callback) { constructor(callback) {

View File

@ -4,6 +4,7 @@ import { callPopup, getRequestHeaders, saveSettingsDebounced, substituteParams }
import { getMessageTimeStamp } from '../../RossAscends-mods.js'; import { getMessageTimeStamp } from '../../RossAscends-mods.js';
import { SECRET_KEYS, secret_state } from '../../secrets.js'; import { SECRET_KEYS, secret_state } from '../../secrets.js';
import { getMultimodalCaption } from '../shared.js'; import { getMultimodalCaption } from '../shared.js';
import { textgen_types, textgenerationwebui_settings } from '../../textgen-settings.js';
export { MODULE_NAME }; export { MODULE_NAME };
const MODULE_NAME = 'caption'; const MODULE_NAME = 'caption';
@ -134,7 +135,7 @@ async function doCaptionRequest(base64Img, fileData) {
case 'horde': case 'horde':
return await captionHorde(base64Img); return await captionHorde(base64Img);
case 'multimodal': case 'multimodal':
return await captionMultimodal(extension_settings.caption.multimodal_api === 'google' ? base64Img : fileData); return await captionMultimodal(fileData);
default: default:
throw new Error('Unknown caption source.'); throw new Error('Unknown caption source.');
} }
@ -216,7 +217,16 @@ async function captionHorde(base64Img) {
* @returns {Promise<{caption: string}>} Generated caption * @returns {Promise<{caption: string}>} Generated caption
*/ */
async function captionMultimodal(base64Img) { async function captionMultimodal(base64Img) {
const prompt = extension_settings.caption.prompt || PROMPT_DEFAULT; let prompt = extension_settings.caption.prompt || PROMPT_DEFAULT;
if (extension_settings.caption.prompt_ask) {
const customPrompt = await callPopup('<h3>Enter a comment or question:</h3>', 'input', prompt, { rows: 2 });
if (!customPrompt) {
throw new Error('User aborted the caption sending.');
}
prompt = String(customPrompt).trim();
}
const caption = await getMultimodalCaption(base64Img, prompt); const caption = await getMultimodalCaption(base64Img, prompt);
return { caption }; return { caption };
} }
@ -271,9 +281,12 @@ jQuery(function () {
$(sendButton).on('click', () => { $(sendButton).on('click', () => {
const hasCaptionModule = const hasCaptionModule =
(modules.includes('caption') && extension_settings.caption.source === 'extras') || (modules.includes('caption') && extension_settings.caption.source === 'extras') ||
(extension_settings.caption.source === 'multimodal' && extension_settings.caption.multimodal_api === 'openai' && secret_state[SECRET_KEYS.OPENAI]) || (extension_settings.caption.source === 'multimodal' && extension_settings.caption.multimodal_api === 'openai' && (secret_state[SECRET_KEYS.OPENAI] || extension_settings.caption.allow_reverse_proxy)) ||
(extension_settings.caption.source === 'multimodal' && extension_settings.caption.multimodal_api === 'openrouter' && secret_state[SECRET_KEYS.OPENROUTER]) || (extension_settings.caption.source === 'multimodal' && extension_settings.caption.multimodal_api === 'openrouter' && secret_state[SECRET_KEYS.OPENROUTER]) ||
(extension_settings.caption.source === 'multimodal' && extension_settings.caption.multimodal_api === 'google' && secret_state[SECRET_KEYS.MAKERSUITE]) || (extension_settings.caption.source === 'multimodal' && extension_settings.caption.multimodal_api === 'google' && secret_state[SECRET_KEYS.MAKERSUITE]) ||
(extension_settings.caption.source === 'multimodal' && extension_settings.caption.multimodal_api === 'ollama' && textgenerationwebui_settings.server_urls[textgen_types.OLLAMA]) ||
(extension_settings.caption.source === 'multimodal' && extension_settings.caption.multimodal_api === 'llamacpp' && textgenerationwebui_settings.server_urls[textgen_types.LLAMACPP]) ||
(extension_settings.caption.source === 'multimodal' && extension_settings.caption.multimodal_api === 'custom') ||
extension_settings.caption.source === 'local' || extension_settings.caption.source === 'local' ||
extension_settings.caption.source === 'horde'; extension_settings.caption.source === 'horde';
@ -329,7 +342,7 @@ jQuery(function () {
<label for="caption_source">Source</label> <label for="caption_source">Source</label>
<select id="caption_source" class="text_pole"> <select id="caption_source" class="text_pole">
<option value="local">Local</option> <option value="local">Local</option>
<option value="multimodal">Multimodal (OpenAI / OpenRouter / Google)</option> <option value="multimodal">Multimodal (OpenAI / llama / Google)</option>
<option value="extras">Extras</option> <option value="extras">Extras</option>
<option value="horde">Horde</option> <option value="horde">Horde</option>
</select> </select>
@ -337,9 +350,12 @@ jQuery(function () {
<div class="flex1 flex-container flexFlowColumn flexNoGap"> <div class="flex1 flex-container flexFlowColumn flexNoGap">
<label for="caption_multimodal_api">API</label> <label for="caption_multimodal_api">API</label>
<select id="caption_multimodal_api" class="flex1 text_pole"> <select id="caption_multimodal_api" class="flex1 text_pole">
<option value="llamacpp">llama.cpp</option>
<option value="ollama">Ollama</option>
<option value="openai">OpenAI</option> <option value="openai">OpenAI</option>
<option value="openrouter">OpenRouter</option> <option value="openrouter">OpenRouter</option>
<option value="google">Google</option> <option value="google">Google MakerSuite</option>
<option value="custom">Custom (OpenAI-compatible)</option>
</select> </select>
</div> </div>
<div class="flex1 flex-container flexFlowColumn flexNoGap"> <div class="flex1 flex-container flexFlowColumn flexNoGap">
@ -349,16 +365,28 @@ jQuery(function () {
<option data-type="google" value="gemini-pro-vision">gemini-pro-vision</option> <option data-type="google" value="gemini-pro-vision">gemini-pro-vision</option>
<option data-type="openrouter" value="openai/gpt-4-vision-preview">openai/gpt-4-vision-preview</option> <option data-type="openrouter" value="openai/gpt-4-vision-preview">openai/gpt-4-vision-preview</option>
<option data-type="openrouter" value="haotian-liu/llava-13b">haotian-liu/llava-13b</option> <option data-type="openrouter" value="haotian-liu/llava-13b">haotian-liu/llava-13b</option>
<option data-type="ollama" value="ollama_current">[Currently selected]</option>
<option data-type="ollama" value="bakllava:latest">bakllava:latest</option>
<option data-type="ollama" value="llava:latest">llava:latest</option>
<option data-type="llamacpp" value="llamacpp_current">[Currently loaded]</option>
<option data-type="custom" value="custom_current">[Currently selected]</option>
</select> </select>
</div> </div>
<label data-type="openai" class="checkbox_label flexBasis100p" for="caption_allow_reverse_proxy" title="Allow using reverse proxy if defined and valid."> <label data-type="openai" class="checkbox_label flexBasis100p" for="caption_allow_reverse_proxy" title="Allow using reverse proxy if defined and valid.">
<input id="caption_allow_reverse_proxy" type="checkbox" class="checkbox"> <input id="caption_allow_reverse_proxy" type="checkbox" class="checkbox">
Allow reverse proxy Allow reverse proxy
</label> </label>
<div class="flexBasis100p m-b-1">
<small><b>Hint:</b> Set your API keys and endpoints in the 'API Connections' tab first.</small>
</div>
</div> </div>
<div id="caption_prompt_block"> <div id="caption_prompt_block">
<label for="caption_prompt">Caption Prompt</label> <label for="caption_prompt">Caption Prompt</label>
<textarea id="caption_prompt" class="text_pole" rows="1" placeholder="&lt; Use default &gt;">${PROMPT_DEFAULT}</textarea> <textarea id="caption_prompt" class="text_pole" rows="1" placeholder="&lt; Use default &gt;">${PROMPT_DEFAULT}</textarea>
<label class="checkbox_label margin-bot-10px" for="caption_prompt_ask" title="Ask for a custom prompt every time an image is captioned.">
<input id="caption_prompt_ask" type="checkbox" class="checkbox">
Ask every time
</label>
</div> </div>
<label for="caption_template">Message Template <small>(use <code>{{caption}}</code> macro)</small></label> <label for="caption_template">Message Template <small>(use <code>{{caption}}</code> macro)</small></label>
<textarea id="caption_template" class="text_pole" rows="2" placeholder="&lt; Use default &gt;">${TEMPLATE_DEFAULT}</textarea> <textarea id="caption_template" class="text_pole" rows="2" placeholder="&lt; Use default &gt;">${TEMPLATE_DEFAULT}</textarea>
@ -382,6 +410,7 @@ jQuery(function () {
$('#caption_refine_mode').prop('checked', !!(extension_settings.caption.refine_mode)); $('#caption_refine_mode').prop('checked', !!(extension_settings.caption.refine_mode));
$('#caption_allow_reverse_proxy').prop('checked', !!(extension_settings.caption.allow_reverse_proxy)); $('#caption_allow_reverse_proxy').prop('checked', !!(extension_settings.caption.allow_reverse_proxy));
$('#caption_prompt_ask').prop('checked', !!(extension_settings.caption.prompt_ask));
$('#caption_source').val(extension_settings.caption.source); $('#caption_source').val(extension_settings.caption.source);
$('#caption_prompt').val(extension_settings.caption.prompt); $('#caption_prompt').val(extension_settings.caption.prompt);
$('#caption_template').val(extension_settings.caption.template); $('#caption_template').val(extension_settings.caption.template);
@ -403,4 +432,8 @@ jQuery(function () {
extension_settings.caption.allow_reverse_proxy = $('#caption_allow_reverse_proxy').prop('checked'); extension_settings.caption.allow_reverse_proxy = $('#caption_allow_reverse_proxy').prop('checked');
saveSettingsDebounced(); saveSettingsDebounced();
}); });
$('#caption_prompt_ask').on('input', () => {
extension_settings.caption.prompt_ask = $('#caption_prompt_ask').prop('checked');
saveSettingsDebounced();
});
}); });

View File

@ -992,8 +992,7 @@ async function getExpressionsList() {
} }
const result = await resolveExpressionsList(); const result = await resolveExpressionsList();
result.push(...extension_settings.expressions.custom); return [...result, ...extension_settings.expressions.custom];
return result;
} }
async function setExpression(character, expression, force) { async function setExpression(character, expression, force) {

View File

@ -5,6 +5,7 @@ import { is_group_generating, selected_group } from '../../group-chats.js';
import { registerSlashCommand } from '../../slash-commands.js'; import { registerSlashCommand } from '../../slash-commands.js';
import { loadMovingUIState } from '../../power-user.js'; import { loadMovingUIState } from '../../power-user.js';
import { dragElement } from '../../RossAscends-mods.js'; import { dragElement } from '../../RossAscends-mods.js';
import { getTextTokens, tokenizers } from '../../tokenizers.js';
export { MODULE_NAME }; export { MODULE_NAME };
const MODULE_NAME = '1_memory'; const MODULE_NAME = '1_memory';
@ -42,26 +43,6 @@ const defaultPrompt = '[Pause your roleplay. Summarize the most important facts
const defaultTemplate = '[Summary: {{summary}}]'; const defaultTemplate = '[Summary: {{summary}}]';
const defaultSettings = { const defaultSettings = {
minLongMemory: 16,
maxLongMemory: 1024,
longMemoryLength: 128,
shortMemoryLength: 512,
minShortMemory: 128,
maxShortMemory: 1024,
shortMemoryStep: 16,
longMemoryStep: 8,
repetitionPenaltyStep: 0.05,
repetitionPenalty: 1.2,
maxRepetitionPenalty: 2.0,
minRepetitionPenalty: 1.0,
temperature: 1.0,
minTemperature: 0.1,
maxTemperature: 2.0,
temperatureStep: 0.05,
lengthPenalty: 1,
minLengthPenalty: -4,
maxLengthPenalty: 4,
lengthPenaltyStep: 0.1,
memoryFrozen: false, memoryFrozen: false,
SkipWIAN: false, SkipWIAN: false,
source: summary_sources.extras, source: summary_sources.extras,
@ -95,11 +76,6 @@ function loadSettings() {
} }
$('#summary_source').val(extension_settings.memory.source).trigger('change'); $('#summary_source').val(extension_settings.memory.source).trigger('change');
$('#memory_long_length').val(extension_settings.memory.longMemoryLength).trigger('input');
$('#memory_short_length').val(extension_settings.memory.shortMemoryLength).trigger('input');
$('#memory_repetition_penalty').val(extension_settings.memory.repetitionPenalty).trigger('input');
$('#memory_temperature').val(extension_settings.memory.temperature).trigger('input');
$('#memory_length_penalty').val(extension_settings.memory.lengthPenalty).trigger('input');
$('#memory_frozen').prop('checked', extension_settings.memory.memoryFrozen).trigger('input'); $('#memory_frozen').prop('checked', extension_settings.memory.memoryFrozen).trigger('input');
$('#memory_skipWIAN').prop('checked', extension_settings.memory.SkipWIAN).trigger('input'); $('#memory_skipWIAN').prop('checked', extension_settings.memory.SkipWIAN).trigger('input');
$('#memory_prompt').val(extension_settings.memory.prompt).trigger('input'); $('#memory_prompt').val(extension_settings.memory.prompt).trigger('input');
@ -126,51 +102,6 @@ function switchSourceControls(value) {
}); });
} }
function onMemoryShortInput() {
const value = $(this).val();
extension_settings.memory.shortMemoryLength = Number(value);
$('#memory_short_length_tokens').text(value);
saveSettingsDebounced();
// Don't let long buffer be bigger than short
if (extension_settings.memory.longMemoryLength > extension_settings.memory.shortMemoryLength) {
$('#memory_long_length').val(extension_settings.memory.shortMemoryLength).trigger('input');
}
}
function onMemoryLongInput() {
const value = $(this).val();
extension_settings.memory.longMemoryLength = Number(value);
$('#memory_long_length_tokens').text(value);
saveSettingsDebounced();
// Don't let long buffer be bigger than short
if (extension_settings.memory.longMemoryLength > extension_settings.memory.shortMemoryLength) {
$('#memory_short_length').val(extension_settings.memory.longMemoryLength).trigger('input');
}
}
function onMemoryRepetitionPenaltyInput() {
const value = $(this).val();
extension_settings.memory.repetitionPenalty = Number(value);
$('#memory_repetition_penalty_value').text(extension_settings.memory.repetitionPenalty.toFixed(2));
saveSettingsDebounced();
}
function onMemoryTemperatureInput() {
const value = $(this).val();
extension_settings.memory.temperature = Number(value);
$('#memory_temperature_value').text(extension_settings.memory.temperature.toFixed(2));
saveSettingsDebounced();
}
function onMemoryLengthPenaltyInput() {
const value = $(this).val();
extension_settings.memory.lengthPenalty = Number(value);
$('#memory_length_penalty_value').text(extension_settings.memory.lengthPenalty.toFixed(2));
saveSettingsDebounced();
}
function onMemoryFrozenInput() { function onMemoryFrozenInput() {
const value = Boolean($(this).prop('checked')); const value = Boolean($(this).prop('checked'));
extension_settings.memory.memoryFrozen = value; extension_settings.memory.memoryFrozen = value;
@ -444,33 +375,36 @@ async function summarizeChatExtras(context) {
const longMemory = getLatestMemoryFromChat(chat); const longMemory = getLatestMemoryFromChat(chat);
const reversedChat = chat.slice().reverse(); const reversedChat = chat.slice().reverse();
reversedChat.shift(); reversedChat.shift();
let memoryBuffer = []; const memoryBuffer = [];
const CONTEXT_SIZE = 1024 - 64;
for (let mes of reversedChat) { for (const message of reversedChat) {
// we reached the point of latest memory // we reached the point of latest memory
if (longMemory && mes.extra && mes.extra.memory == longMemory) { if (longMemory && message.extra && message.extra.memory == longMemory) {
break; break;
} }
// don't care about system // don't care about system
if (mes.is_system) { if (message.is_system) {
continue; continue;
} }
// determine the sender's name // determine the sender's name
const name = mes.is_user ? (context.name1 ?? 'You') : (mes.force_avatar ? mes.name : context.name2); const entry = `${message.name}:\n${message.mes}`;
const entry = `${name}:\n${mes['mes']}`;
memoryBuffer.push(entry); memoryBuffer.push(entry);
// check if token limit was reached // check if token limit was reached
if (context.getTokenCount(getMemoryString()) >= extension_settings.memory.shortMemoryLength) { const tokens = getTextTokens(tokenizers.GPT2, getMemoryString()).length;
if (tokens >= CONTEXT_SIZE) {
break; break;
} }
} }
const resultingString = getMemoryString(); const resultingString = getMemoryString();
const resultingTokens = getTextTokens(tokenizers.GPT2, resultingString).length;
if (context.getTokenCount(resultingString) < extension_settings.memory.shortMemoryLength) { if (!resultingString || resultingTokens < CONTEXT_SIZE) {
console.debug('Not enough context to summarize');
return; return;
} }
@ -488,13 +422,7 @@ async function summarizeChatExtras(context) {
}, },
body: JSON.stringify({ body: JSON.stringify({
text: resultingString, text: resultingString,
params: { params: {},
min_length: extension_settings.memory.longMemoryLength * 0, // testing how it behaves 0 min length
max_length: extension_settings.memory.longMemoryLength,
repetition_penalty: extension_settings.memory.repetitionPenalty,
temperature: extension_settings.memory.temperature,
length_penalty: extension_settings.memory.lengthPenalty,
},
}), }),
}); });
@ -623,11 +551,6 @@ function setupListeners() {
//setup shared listeners for popout and regular ext menu //setup shared listeners for popout and regular ext menu
$('#memory_restore').off('click').on('click', onMemoryRestoreClick); $('#memory_restore').off('click').on('click', onMemoryRestoreClick);
$('#memory_contents').off('click').on('input', onMemoryContentInput); $('#memory_contents').off('click').on('input', onMemoryContentInput);
$('#memory_long_length').off('click').on('input', onMemoryLongInput);
$('#memory_short_length').off('click').on('input', onMemoryShortInput);
$('#memory_repetition_penalty').off('click').on('input', onMemoryRepetitionPenaltyInput);
$('#memory_temperature').off('click').on('input', onMemoryTemperatureInput);
$('#memory_length_penalty').off('click').on('input', onMemoryLengthPenaltyInput);
$('#memory_frozen').off('click').on('input', onMemoryFrozenInput); $('#memory_frozen').off('click').on('input', onMemoryFrozenInput);
$('#memory_skipWIAN').off('click').on('input', onMemorySkipWIANInput); $('#memory_skipWIAN').off('click').on('input', onMemorySkipWIANInput);
$('#summary_source').off('click').on('change', onSummarySourceChange); $('#summary_source').off('click').on('change', onSummarySourceChange);
@ -720,18 +643,6 @@ jQuery(function () {
<input id="memory_prompt_words_force" type="range" value="${defaultSettings.promptForceWords}" min="${defaultSettings.promptMinForceWords}" max="${defaultSettings.promptMaxForceWords}" step="${defaultSettings.promptForceWordsStep}" /> <input id="memory_prompt_words_force" type="range" value="${defaultSettings.promptForceWords}" min="${defaultSettings.promptMinForceWords}" max="${defaultSettings.promptMaxForceWords}" step="${defaultSettings.promptForceWordsStep}" />
<small>If both sliders are non-zero, then both will trigger summary updates a their respective intervals.</small> <small>If both sliders are non-zero, then both will trigger summary updates a their respective intervals.</small>
</div> </div>
<div data-source="extras">
<label for="memory_short_length">Chat to Summarize buffer length (<span id="memory_short_length_tokens"></span> tokens)</label>
<input id="memory_short_length" type="range" value="${defaultSettings.shortMemoryLength}" min="${defaultSettings.minShortMemory}" max="${defaultSettings.maxShortMemory}" step="${defaultSettings.shortMemoryStep}" />
<label for="memory_long_length">Summary output length (<span id="memory_long_length_tokens"></span> tokens)</label>
<input id="memory_long_length" type="range" value="${defaultSettings.longMemoryLength}" min="${defaultSettings.minLongMemory}" max="${defaultSettings.maxLongMemory}" step="${defaultSettings.longMemoryStep}" />
<label for="memory_temperature">Temperature (<span id="memory_temperature_value"></span>)</label>
<input id="memory_temperature" type="range" value="${defaultSettings.temperature}" min="${defaultSettings.minTemperature}" max="${defaultSettings.maxTemperature}" step="${defaultSettings.temperatureStep}" />
<label for="memory_repetition_penalty">Repetition penalty (<span id="memory_repetition_penalty_value"></span>)</label>
<input id="memory_repetition_penalty" type="range" value="${defaultSettings.repetitionPenalty}" min="${defaultSettings.minRepetitionPenalty}" max="${defaultSettings.maxRepetitionPenalty}" step="${defaultSettings.repetitionPenaltyStep}" />
<label for="memory_length_penalty">Length preference <small>[higher = longer summaries]</small> (<span id="memory_length_penalty_value"></span>)</label>
<input id="memory_length_penalty" type="range" value="${defaultSettings.lengthPenalty}" min="${defaultSettings.minLengthPenalty}" max="${defaultSettings.maxLengthPenalty}" step="${defaultSettings.lengthPenaltyStep}" />
</div>
</div> </div>
</div> </div>
</div> </div>

View File

@ -2,6 +2,7 @@ import { getRequestHeaders } from '../../script.js';
import { extension_settings } from '../extensions.js'; import { extension_settings } from '../extensions.js';
import { oai_settings } from '../openai.js'; import { oai_settings } from '../openai.js';
import { SECRET_KEYS, secret_state } from '../secrets.js'; import { SECRET_KEYS, secret_state } from '../secrets.js';
import { textgen_types, textgenerationwebui_settings } from '../textgen-settings.js';
import { createThumbnail, isValidUrl } from '../utils.js'; import { createThumbnail, isValidUrl } from '../utils.js';
/** /**
@ -11,20 +12,19 @@ import { createThumbnail, isValidUrl } from '../utils.js';
* @returns {Promise<string>} Generated caption * @returns {Promise<string>} Generated caption
*/ */
export async function getMultimodalCaption(base64Img, prompt) { export async function getMultimodalCaption(base64Img, prompt) {
if (extension_settings.caption.multimodal_api === 'openai' && !secret_state[SECRET_KEYS.OPENAI]) { throwIfInvalidModel();
throw new Error('OpenAI API key is not set.');
}
if (extension_settings.caption.multimodal_api === 'openrouter' && !secret_state[SECRET_KEYS.OPENROUTER]) { const noPrefix = ['google', 'ollama', 'llamacpp'].includes(extension_settings.caption.multimodal_api);
throw new Error('OpenRouter API key is not set.');
}
if (extension_settings.caption.multimodal_api === 'google' && !secret_state[SECRET_KEYS.MAKERSUITE]) { if (noPrefix && base64Img.startsWith('data:image/')) {
throw new Error('MakerSuite API key is not set.'); base64Img = base64Img.split(',')[1];
} }
// OpenRouter has a payload limit of ~2MB. Google is 4MB, but we love democracy. // OpenRouter has a payload limit of ~2MB. Google is 4MB, but we love democracy.
const isGoogle = extension_settings.caption.multimodal_api === 'google'; const isGoogle = extension_settings.caption.multimodal_api === 'google';
const isOllama = extension_settings.caption.multimodal_api === 'ollama';
const isLlamaCpp = extension_settings.caption.multimodal_api === 'llamacpp';
const isCustom = extension_settings.caption.multimodal_api === 'custom';
const base64Bytes = base64Img.length * 0.75; const base64Bytes = base64Img.length * 0.75;
const compressionLimit = 2 * 1024 * 1024; const compressionLimit = 2 * 1024 * 1024;
if (['google', 'openrouter'].includes(extension_settings.caption.multimodal_api) && base64Bytes > compressionLimit) { if (['google', 'openrouter'].includes(extension_settings.caption.multimodal_api) && base64Bytes > compressionLimit) {
@ -45,27 +45,91 @@ export async function getMultimodalCaption(base64Img, prompt) {
const proxyUrl = useReverseProxy ? oai_settings.reverse_proxy : ''; const proxyUrl = useReverseProxy ? oai_settings.reverse_proxy : '';
const proxyPassword = useReverseProxy ? oai_settings.proxy_password : ''; const proxyPassword = useReverseProxy ? oai_settings.proxy_password : '';
const apiResult = await fetch(`/api/${isGoogle ? 'google' : 'openai'}/caption-image`, { const requestBody = {
image: base64Img,
prompt: prompt,
};
if (!isGoogle) {
requestBody.api = extension_settings.caption.multimodal_api || 'openai';
requestBody.model = extension_settings.caption.multimodal_model || 'gpt-4-vision-preview';
requestBody.reverse_proxy = proxyUrl;
requestBody.proxy_password = proxyPassword;
}
if (isOllama) {
if (extension_settings.caption.multimodal_model === 'ollama_current') {
requestBody.model = textgenerationwebui_settings.ollama_model;
}
requestBody.server_url = textgenerationwebui_settings.server_urls[textgen_types.OLLAMA];
}
if (isLlamaCpp) {
requestBody.server_url = textgenerationwebui_settings.server_urls[textgen_types.LLAMACPP];
}
if (isCustom) {
requestBody.server_url = oai_settings.custom_url;
requestBody.model = oai_settings.custom_model || 'gpt-4-vision-preview';
requestBody.custom_include_headers = oai_settings.custom_include_headers;
requestBody.custom_include_body = oai_settings.custom_include_body;
requestBody.custom_exclude_body = oai_settings.custom_exclude_body;
}
function getEndpointUrl() {
switch (extension_settings.caption.multimodal_api) {
case 'google':
return '/api/google/caption-image';
case 'llamacpp':
return '/api/backends/text-completions/llamacpp/caption-image';
case 'ollama':
return '/api/backends/text-completions/ollama/caption-image';
default:
return '/api/openai/caption-image';
}
}
const apiResult = await fetch(getEndpointUrl(), {
method: 'POST', method: 'POST',
headers: getRequestHeaders(), headers: getRequestHeaders(),
body: JSON.stringify({ body: JSON.stringify(requestBody),
image: base64Img,
prompt: prompt,
...(isGoogle
? {}
: {
api: extension_settings.caption.multimodal_api || 'openai',
model: extension_settings.caption.multimodal_model || 'gpt-4-vision-preview',
reverse_proxy: proxyUrl,
proxy_password: proxyPassword,
}),
}),
}); });
if (!apiResult.ok) { if (!apiResult.ok) {
throw new Error('Failed to caption image via OpenAI.'); throw new Error('Failed to caption image via Multimodal API.');
} }
const { caption } = await apiResult.json(); const { caption } = await apiResult.json();
return caption; return String(caption).trim();
}
function throwIfInvalidModel() {
if (extension_settings.caption.multimodal_api === 'openai' && !secret_state[SECRET_KEYS.OPENAI]) {
throw new Error('OpenAI API key is not set.');
}
if (extension_settings.caption.multimodal_api === 'openrouter' && !secret_state[SECRET_KEYS.OPENROUTER]) {
throw new Error('OpenRouter API key is not set.');
}
if (extension_settings.caption.multimodal_api === 'google' && !secret_state[SECRET_KEYS.MAKERSUITE]) {
throw new Error('MakerSuite API key is not set.');
}
if (extension_settings.caption.multimodal_api === 'ollama' && !textgenerationwebui_settings.server_urls[textgen_types.OLLAMA]) {
throw new Error('Ollama server URL is not set.');
}
if (extension_settings.caption.multimodal_api === 'ollama' && extension_settings.caption.multimodal_model === 'ollama_current' && !textgenerationwebui_settings.ollama_model) {
throw new Error('Ollama model is not set.');
}
if (extension_settings.caption.multimodal_api === 'llamacpp' && !textgenerationwebui_settings.server_urls[textgen_types.LLAMACPP]) {
throw new Error('LlamaCPP server URL is not set.');
}
if (extension_settings.caption.multimodal_api === 'custom' && !oai_settings.custom_url) {
throw new Error('Custom API URL is not set.');
}
} }

View File

@ -12,6 +12,7 @@ import {
} from '../../../script.js'; } from '../../../script.js';
import { extension_settings, getContext } from '../../extensions.js'; import { extension_settings, getContext } from '../../extensions.js';
import { secret_state, writeSecret } from '../../secrets.js'; import { secret_state, writeSecret } from '../../secrets.js';
import { splitRecursive } from '../../utils.js';
export const autoModeOptions = { export const autoModeOptions = {
NONE: 'none', NONE: 'none',
@ -315,6 +316,28 @@ async function translateProviderBing(text, lang) {
throw new Error(response.statusText); throw new Error(response.statusText);
} }
/**
* Splits text into chunks and translates each chunk separately
* @param {string} text Text to translate
* @param {string} lang Target language code
* @param {(text: string, lang: string) => Promise<string>} translateFn Function to translate a single chunk (must return a Promise)
* @param {number} chunkSize Maximum chunk size
* @returns {Promise<string>} Translated text
*/
async function chunkedTranslate(text, lang, translateFn, chunkSize = 5000) {
if (text.length <= chunkSize) {
return await translateFn(text, lang);
}
const chunks = splitRecursive(text, chunkSize);
let result = '';
for (const chunk of chunks) {
result += await translateFn(chunk, lang);
}
return result;
}
/** /**
* Translates text using the selected translation provider * Translates text using the selected translation provider
* @param {string} text Text to translate * @param {string} text Text to translate
@ -331,15 +354,15 @@ async function translate(text, lang) {
case 'libre': case 'libre':
return await translateProviderLibre(text, lang); return await translateProviderLibre(text, lang);
case 'google': case 'google':
return await translateProviderGoogle(text, lang); return await chunkedTranslate(text, lang, translateProviderGoogle, 5000);
case 'deepl': case 'deepl':
return await translateProviderDeepl(text, lang); return await translateProviderDeepl(text, lang);
case 'deeplx': case 'deeplx':
return await translateProviderDeepLX(text, lang); return await chunkedTranslate(text, lang, translateProviderDeepLX, 1500);
case 'oneringtranslator': case 'oneringtranslator':
return await translateProviderOneRing(text, lang); return await translateProviderOneRing(text, lang);
case 'bing': case 'bing':
return await translateProviderBing(text, lang); return await chunkedTranslate(text, lang, translateProviderBing, 1000);
default: default:
console.error('Unknown translation provider', extension_settings.translate.provider); console.error('Unknown translation provider', extension_settings.translate.provider);
return text; return text;

View File

@ -29,6 +29,7 @@ const controls = [
{ id: 'instruct_first_output_sequence', property: 'first_output_sequence', isCheckbox: false }, { id: 'instruct_first_output_sequence', property: 'first_output_sequence', isCheckbox: false },
{ id: 'instruct_last_output_sequence', property: 'last_output_sequence', isCheckbox: false }, { id: 'instruct_last_output_sequence', property: 'last_output_sequence', isCheckbox: false },
{ id: 'instruct_activation_regex', property: 'activation_regex', isCheckbox: false }, { id: 'instruct_activation_regex', property: 'activation_regex', isCheckbox: false },
{ id: 'instruct_bind_to_context', property: 'bind_to_context', isCheckbox: true },
]; ];
/** /**
@ -136,7 +137,7 @@ export function autoSelectInstructPreset(modelId) {
let foundMatch = false; let foundMatch = false;
for (const instruct_preset of instruct_presets) { for (const instruct_preset of instruct_presets) {
// If instruct preset matches the context template // If instruct preset matches the context template
if (instruct_preset.name === power_user.context.preset) { if (power_user.instruct.bind_to_context && instruct_preset.name === power_user.context.preset) {
foundMatch = true; foundMatch = true;
selectInstructPreset(instruct_preset.name); selectInstructPreset(instruct_preset.name);
break; break;
@ -163,7 +164,7 @@ export function autoSelectInstructPreset(modelId) {
} }
} }
if (power_user.default_instruct && power_user.instruct.preset !== power_user.default_instruct) { if (power_user.instruct.bind_to_context && power_user.default_instruct && power_user.instruct.preset !== power_user.default_instruct) {
if (instruct_presets.some(p => p.name === power_user.default_instruct)) { if (instruct_presets.some(p => p.name === power_user.default_instruct)) {
console.log(`Instruct mode: default preset "${power_user.default_instruct}" selected`); console.log(`Instruct mode: default preset "${power_user.default_instruct}" selected`);
$('#instruct_presets').val(power_user.default_instruct).trigger('change'); $('#instruct_presets').val(power_user.default_instruct).trigger('change');
@ -409,6 +410,10 @@ jQuery(() => {
}); });
$('#instruct_enabled').on('change', function () { $('#instruct_enabled').on('change', function () {
if (!power_user.instruct.bind_to_context) {
return;
}
// When instruct mode gets enabled, select context template matching selected instruct preset // When instruct mode gets enabled, select context template matching selected instruct preset
if (power_user.instruct.enabled) { if (power_user.instruct.enabled) {
selectMatchingContextTemplate(power_user.instruct.preset); selectMatchingContextTemplate(power_user.instruct.preset);
@ -440,8 +445,10 @@ jQuery(() => {
} }
}); });
// Select matching context template if (power_user.instruct.bind_to_context) {
selectMatchingContextTemplate(name); // Select matching context template
selectMatchingContextTemplate(name);
}
highlightDefaultPreset(); highlightDefaultPreset();
}); });

View File

@ -8,7 +8,7 @@ import {
substituteParams, substituteParams,
} from '../script.js'; } from '../script.js';
import { getCfgPrompt } from './cfg-scale.js'; import { getCfgPrompt } from './cfg-scale.js';
import { MAX_CONTEXT_DEFAULT, MAX_RESPONSE_DEFAULT } from './power-user.js'; import { MAX_CONTEXT_DEFAULT, MAX_RESPONSE_DEFAULT, power_user } from './power-user.js';
import { getTextTokens, tokenizers } from './tokenizers.js'; import { getTextTokens, tokenizers } from './tokenizers.js';
import EventSourceStream from './sse-stream.js'; import EventSourceStream from './sse-stream.js';
import { import {
@ -437,6 +437,10 @@ export function getNovelGenerationData(finalPrompt, settings, maxLength, isImper
BIAS_CACHE.set(BIAS_KEY, logitBias); BIAS_CACHE.set(BIAS_KEY, logitBias);
} }
if (power_user.console_log_prompts) {
console.log(finalPrompt);
}
return { return {
'input': finalPrompt, 'input': finalPrompt,
'model': nai_settings.model_novel, 'model': nai_settings.model_novel,

View File

@ -21,7 +21,6 @@ import {
MAX_INJECTION_DEPTH, MAX_INJECTION_DEPTH,
name1, name1,
name2, name2,
replaceBiasMarkup,
replaceItemizedPromptText, replaceItemizedPromptText,
resultCheckStatus, resultCheckStatus,
saveSettingsDebounced, saveSettingsDebounced,
@ -90,6 +89,7 @@ const default_wi_format = '[Details of the fictional world the RP is set in:\n{0
const default_new_chat_prompt = '[Start a new Chat]'; const default_new_chat_prompt = '[Start a new Chat]';
const default_new_group_chat_prompt = '[Start a new group chat. Group members: {{group}}]'; const default_new_group_chat_prompt = '[Start a new group chat. Group members: {{group}}]';
const default_new_example_chat_prompt = '[Start a new Chat]'; const default_new_example_chat_prompt = '[Start a new Chat]';
const default_claude_human_sysprompt_message = 'Let\'s get started. Please generate your response based on the information and instructions provided above.';
const default_continue_nudge_prompt = '[Continue the following message. Do not include ANY parts of the original message. Use capitalization and punctuation as if your reply is a part of the original message: {{lastChatMessage}}]'; const default_continue_nudge_prompt = '[Continue the following message. Do not include ANY parts of the original message. Use capitalization and punctuation as if your reply is a part of the original message: {{lastChatMessage}}]';
const default_bias = 'Default (none)'; const default_bias = 'Default (none)';
const default_personality_format = '[{{char}}\'s personality: {{personality}}]'; const default_personality_format = '[{{char}}\'s personality: {{personality}}]';
@ -165,6 +165,7 @@ export const chat_completion_sources = {
AI21: 'ai21', AI21: 'ai21',
MAKERSUITE: 'makersuite', MAKERSUITE: 'makersuite',
MISTRALAI: 'mistralai', MISTRALAI: 'mistralai',
CUSTOM: 'custom',
}; };
const prefixMap = selected_group ? { const prefixMap = selected_group ? {
@ -210,6 +211,11 @@ const default_settings = {
google_model: 'gemini-pro', google_model: 'gemini-pro',
ai21_model: 'j2-ultra', ai21_model: 'j2-ultra',
mistralai_model: 'mistral-medium', mistralai_model: 'mistral-medium',
custom_model: '',
custom_url: '',
custom_include_body: '',
custom_exclude_body: '',
custom_include_headers: '',
windowai_model: '', windowai_model: '',
openrouter_model: openrouter_website_model, openrouter_model: openrouter_website_model,
openrouter_use_fallback: false, openrouter_use_fallback: false,
@ -224,9 +230,11 @@ const default_settings = {
show_external_models: false, show_external_models: false,
proxy_password: '', proxy_password: '',
assistant_prefill: '', assistant_prefill: '',
human_sysprompt_message: default_claude_human_sysprompt_message,
use_ai21_tokenizer: false, use_ai21_tokenizer: false,
use_google_tokenizer: false, use_google_tokenizer: false,
exclude_assistant: false, exclude_assistant: false,
claude_use_sysprompt: false,
use_alt_scale: false, use_alt_scale: false,
squash_system_messages: false, squash_system_messages: false,
image_inlining: false, image_inlining: false,
@ -266,6 +274,11 @@ const oai_settings = {
google_model: 'gemini-pro', google_model: 'gemini-pro',
ai21_model: 'j2-ultra', ai21_model: 'j2-ultra',
mistralai_model: 'mistral-medium', mistralai_model: 'mistral-medium',
custom_model: '',
custom_url: '',
custom_include_body: '',
custom_exclude_body: '',
custom_include_headers: '',
windowai_model: '', windowai_model: '',
openrouter_model: openrouter_website_model, openrouter_model: openrouter_website_model,
openrouter_use_fallback: false, openrouter_use_fallback: false,
@ -280,9 +293,11 @@ const oai_settings = {
show_external_models: false, show_external_models: false,
proxy_password: '', proxy_password: '',
assistant_prefill: '', assistant_prefill: '',
human_sysprompt_message: default_claude_human_sysprompt_message,
use_ai21_tokenizer: false, use_ai21_tokenizer: false,
use_google_tokenizer: false, use_google_tokenizer: false,
exclude_assistant: false, exclude_assistant: false,
claude_use_sysprompt: false,
use_alt_scale: false, use_alt_scale: false,
squash_system_messages: false, squash_system_messages: false,
image_inlining: false, image_inlining: false,
@ -425,8 +440,6 @@ function setOpenAIMessages(chat) {
content = `${chat[j].name}: ${content}`; content = `${chat[j].name}: ${content}`;
} }
} }
content = replaceBiasMarkup(content);
// remove caret return (waste of tokens) // remove caret return (waste of tokens)
content = content.replace(/\r/gm, ''); content = content.replace(/\r/gm, '');
@ -1266,6 +1279,8 @@ function getChatCompletionModel() {
return oai_settings.ai21_model; return oai_settings.ai21_model;
case chat_completion_sources.MISTRALAI: case chat_completion_sources.MISTRALAI:
return oai_settings.mistralai_model; return oai_settings.mistralai_model;
case chat_completion_sources.CUSTOM:
return oai_settings.custom_model;
default: default:
throw new Error(`Unknown chat completion source: ${oai_settings.chat_completion_source}`); throw new Error(`Unknown chat completion source: ${oai_settings.chat_completion_source}`);
} }
@ -1326,6 +1341,23 @@ function saveModelList(data) {
$('#model_openai_select').val(model).trigger('change'); $('#model_openai_select').val(model).trigger('change');
} }
} }
if (oai_settings.chat_completion_source == chat_completion_sources.CUSTOM) {
$('#model_custom_select').empty();
$('#model_custom_select').append('<option value="">None</option>');
model_list.forEach((model) => {
$('#model_custom_select').append(
$('<option>', {
value: model.id,
text: model.id,
selected: model.id == oai_settings.custom_model,
}));
});
if (!oai_settings.custom_model && model_list.length > 0) {
$('#model_custom_select').val(model_list[0].id).trigger('change');
}
}
} }
function appendOpenRouterOptions(model_list, groupModels = false, sort = false) { function appendOpenRouterOptions(model_list, groupModels = false, sort = false) {
@ -1454,6 +1486,7 @@ async function sendOpenAIRequest(type, messages, signal) {
const isGoogle = oai_settings.chat_completion_source == chat_completion_sources.MAKERSUITE; const isGoogle = oai_settings.chat_completion_source == chat_completion_sources.MAKERSUITE;
const isOAI = oai_settings.chat_completion_source == chat_completion_sources.OPENAI; const isOAI = oai_settings.chat_completion_source == chat_completion_sources.OPENAI;
const isMistral = oai_settings.chat_completion_source == chat_completion_sources.MISTRALAI; const isMistral = oai_settings.chat_completion_source == chat_completion_sources.MISTRALAI;
const isCustom = oai_settings.chat_completion_source == chat_completion_sources.CUSTOM;
const isTextCompletion = (isOAI && textCompletionModels.includes(oai_settings.openai_model)) || (isOpenRouter && oai_settings.openrouter_force_instruct && power_user.instruct.enabled); const isTextCompletion = (isOAI && textCompletionModels.includes(oai_settings.openai_model)) || (isOpenRouter && oai_settings.openrouter_force_instruct && power_user.instruct.enabled);
const isQuiet = type === 'quiet'; const isQuiet = type === 'quiet';
const isImpersonate = type === 'impersonate'; const isImpersonate = type === 'impersonate';
@ -1480,7 +1513,7 @@ async function sendOpenAIRequest(type, messages, signal) {
return sendWindowAIRequest(messages, signal, stream); return sendWindowAIRequest(messages, signal, stream);
} }
const logitBiasSources = [chat_completion_sources.OPENAI, chat_completion_sources.OPENROUTER, chat_completion_sources.SCALE]; const logitBiasSources = [chat_completion_sources.OPENAI, chat_completion_sources.OPENROUTER, chat_completion_sources.SCALE, chat_completion_sources.CUSTOM];
if (oai_settings.bias_preset_selected if (oai_settings.bias_preset_selected
&& logitBiasSources.includes(oai_settings.chat_completion_source) && logitBiasSources.includes(oai_settings.chat_completion_source)
&& Array.isArray(oai_settings.bias_presets[oai_settings.bias_preset_selected]) && Array.isArray(oai_settings.bias_presets[oai_settings.bias_preset_selected])
@ -1528,7 +1561,9 @@ async function sendOpenAIRequest(type, messages, signal) {
if (isClaude) { if (isClaude) {
generate_data['top_k'] = Number(oai_settings.top_k_openai); generate_data['top_k'] = Number(oai_settings.top_k_openai);
generate_data['exclude_assistant'] = oai_settings.exclude_assistant; generate_data['exclude_assistant'] = oai_settings.exclude_assistant;
generate_data['claude_use_sysprompt'] = oai_settings.claude_use_sysprompt;
generate_data['stop'] = getCustomStoppingStrings(); // Claude shouldn't have limits on stop strings. generate_data['stop'] = getCustomStoppingStrings(); // Claude shouldn't have limits on stop strings.
generate_data['human_sysprompt_message'] = substituteParams(oai_settings.human_sysprompt_message);
// Don't add a prefill on quiet gens (summarization) // Don't add a prefill on quiet gens (summarization)
if (!isQuiet && !oai_settings.exclude_assistant) { if (!isQuiet && !oai_settings.exclude_assistant) {
generate_data['assistant_prefill'] = substituteParams(oai_settings.assistant_prefill); generate_data['assistant_prefill'] = substituteParams(oai_settings.assistant_prefill);
@ -1565,7 +1600,14 @@ async function sendOpenAIRequest(type, messages, signal) {
generate_data['safe_mode'] = false; // already defaults to false, but just incase they change that in the future. generate_data['safe_mode'] = false; // already defaults to false, but just incase they change that in the future.
} }
if ((isOAI || isOpenRouter || isMistral) && oai_settings.seed >= 0) { if (isCustom) {
generate_data['custom_url'] = oai_settings.custom_url;
generate_data['custom_include_body'] = oai_settings.custom_include_body;
generate_data['custom_exclude_body'] = oai_settings.custom_exclude_body;
generate_data['custom_include_headers'] = oai_settings.custom_include_headers;
}
if ((isOAI || isOpenRouter || isMistral || isCustom) && oai_settings.seed >= 0) {
generate_data['seed'] = oai_settings.seed; generate_data['seed'] = oai_settings.seed;
} }
@ -1964,7 +2006,7 @@ class ChatCompletion {
for (let message of this.messages.collection) { for (let message of this.messages.collection) {
if (!excludeList.includes(message.identifier) && message.role === 'system' && !message.name) { if (!excludeList.includes(message.identifier) && message.role === 'system' && !message.name) {
if (lastMessage && lastMessage.role === 'system') { if (lastMessage && message.content && lastMessage.role === 'system') {
lastMessage.content += '\n' + message.content; lastMessage.content += '\n' + message.content;
lastMessage.tokens = tokenHandler.count({ role: lastMessage.role, content: lastMessage.content }); lastMessage.tokens = tokenHandler.count({ role: lastMessage.role, content: lastMessage.content });
} }
@ -2311,12 +2353,18 @@ function loadOpenAISettings(data, settings) {
oai_settings.openrouter_force_instruct = settings.openrouter_force_instruct ?? default_settings.openrouter_force_instruct; oai_settings.openrouter_force_instruct = settings.openrouter_force_instruct ?? default_settings.openrouter_force_instruct;
oai_settings.ai21_model = settings.ai21_model ?? default_settings.ai21_model; oai_settings.ai21_model = settings.ai21_model ?? default_settings.ai21_model;
oai_settings.mistralai_model = settings.mistralai_model ?? default_settings.mistralai_model; oai_settings.mistralai_model = settings.mistralai_model ?? default_settings.mistralai_model;
oai_settings.custom_model = settings.custom_model ?? default_settings.custom_model;
oai_settings.custom_url = settings.custom_url ?? default_settings.custom_url;
oai_settings.custom_include_body = settings.custom_include_body ?? default_settings.custom_include_body;
oai_settings.custom_exclude_body = settings.custom_exclude_body ?? default_settings.custom_exclude_body;
oai_settings.custom_include_headers = settings.custom_include_headers ?? default_settings.custom_include_headers;
oai_settings.google_model = settings.google_model ?? default_settings.google_model; oai_settings.google_model = settings.google_model ?? default_settings.google_model;
oai_settings.chat_completion_source = settings.chat_completion_source ?? default_settings.chat_completion_source; oai_settings.chat_completion_source = settings.chat_completion_source ?? default_settings.chat_completion_source;
oai_settings.api_url_scale = settings.api_url_scale ?? default_settings.api_url_scale; oai_settings.api_url_scale = settings.api_url_scale ?? default_settings.api_url_scale;
oai_settings.show_external_models = settings.show_external_models ?? default_settings.show_external_models; oai_settings.show_external_models = settings.show_external_models ?? default_settings.show_external_models;
oai_settings.proxy_password = settings.proxy_password ?? default_settings.proxy_password; oai_settings.proxy_password = settings.proxy_password ?? default_settings.proxy_password;
oai_settings.assistant_prefill = settings.assistant_prefill ?? default_settings.assistant_prefill; oai_settings.assistant_prefill = settings.assistant_prefill ?? default_settings.assistant_prefill;
oai_settings.human_sysprompt_message = settings.human_sysprompt_message ?? default_settings.human_sysprompt_message;
oai_settings.image_inlining = settings.image_inlining ?? default_settings.image_inlining; oai_settings.image_inlining = settings.image_inlining ?? default_settings.image_inlining;
oai_settings.bypass_status_check = settings.bypass_status_check ?? default_settings.bypass_status_check; oai_settings.bypass_status_check = settings.bypass_status_check ?? default_settings.bypass_status_check;
@ -2335,11 +2383,13 @@ function loadOpenAISettings(data, settings) {
if (settings.use_ai21_tokenizer !== undefined) { oai_settings.use_ai21_tokenizer = !!settings.use_ai21_tokenizer; oai_settings.use_ai21_tokenizer ? ai21_max = 8191 : ai21_max = 9200; } if (settings.use_ai21_tokenizer !== undefined) { oai_settings.use_ai21_tokenizer = !!settings.use_ai21_tokenizer; oai_settings.use_ai21_tokenizer ? ai21_max = 8191 : ai21_max = 9200; }
if (settings.use_google_tokenizer !== undefined) oai_settings.use_google_tokenizer = !!settings.use_google_tokenizer; if (settings.use_google_tokenizer !== undefined) oai_settings.use_google_tokenizer = !!settings.use_google_tokenizer;
if (settings.exclude_assistant !== undefined) oai_settings.exclude_assistant = !!settings.exclude_assistant; if (settings.exclude_assistant !== undefined) oai_settings.exclude_assistant = !!settings.exclude_assistant;
if (settings.claude_use_sysprompt !== undefined) oai_settings.claude_use_sysprompt = !!settings.claude_use_sysprompt;
if (settings.use_alt_scale !== undefined) { oai_settings.use_alt_scale = !!settings.use_alt_scale; updateScaleForm(); } if (settings.use_alt_scale !== undefined) { oai_settings.use_alt_scale = !!settings.use_alt_scale; updateScaleForm(); }
$('#stream_toggle').prop('checked', oai_settings.stream_openai); $('#stream_toggle').prop('checked', oai_settings.stream_openai);
$('#api_url_scale').val(oai_settings.api_url_scale); $('#api_url_scale').val(oai_settings.api_url_scale);
$('#openai_proxy_password').val(oai_settings.proxy_password); $('#openai_proxy_password').val(oai_settings.proxy_password);
$('#claude_assistant_prefill').val(oai_settings.assistant_prefill); $('#claude_assistant_prefill').val(oai_settings.assistant_prefill);
$('#claude_human_sysprompt_textarea').val(oai_settings.human_sysprompt_message);
$('#openai_image_inlining').prop('checked', oai_settings.image_inlining); $('#openai_image_inlining').prop('checked', oai_settings.image_inlining);
$('#openai_bypass_status_check').prop('checked', oai_settings.bypass_status_check); $('#openai_bypass_status_check').prop('checked', oai_settings.bypass_status_check);
@ -2355,6 +2405,8 @@ function loadOpenAISettings(data, settings) {
$(`#model_ai21_select option[value="${oai_settings.ai21_model}"`).attr('selected', true); $(`#model_ai21_select option[value="${oai_settings.ai21_model}"`).attr('selected', true);
$('#model_mistralai_select').val(oai_settings.mistralai_model); $('#model_mistralai_select').val(oai_settings.mistralai_model);
$(`#model_mistralai_select option[value="${oai_settings.mistralai_model}"`).attr('selected', true); $(`#model_mistralai_select option[value="${oai_settings.mistralai_model}"`).attr('selected', true);
$('#custom_model_id').val(oai_settings.custom_model);
$('#custom_api_url_text').val(oai_settings.custom_url);
$('#openai_max_context').val(oai_settings.openai_max_context); $('#openai_max_context').val(oai_settings.openai_max_context);
$('#openai_max_context_counter').val(`${oai_settings.openai_max_context}`); $('#openai_max_context_counter').val(`${oai_settings.openai_max_context}`);
$('#model_openrouter_select').val(oai_settings.openrouter_model); $('#model_openrouter_select').val(oai_settings.openrouter_model);
@ -2370,6 +2422,7 @@ function loadOpenAISettings(data, settings) {
$('#use_ai21_tokenizer').prop('checked', oai_settings.use_ai21_tokenizer); $('#use_ai21_tokenizer').prop('checked', oai_settings.use_ai21_tokenizer);
$('#use_google_tokenizer').prop('checked', oai_settings.use_google_tokenizer); $('#use_google_tokenizer').prop('checked', oai_settings.use_google_tokenizer);
$('#exclude_assistant').prop('checked', oai_settings.exclude_assistant); $('#exclude_assistant').prop('checked', oai_settings.exclude_assistant);
$('#claude_use_sysprompt').prop('checked', oai_settings.claude_use_sysprompt);
$('#scale-alt').prop('checked', oai_settings.use_alt_scale); $('#scale-alt').prop('checked', oai_settings.use_alt_scale);
$('#openrouter_use_fallback').prop('checked', oai_settings.openrouter_use_fallback); $('#openrouter_use_fallback').prop('checked', oai_settings.openrouter_use_fallback);
$('#openrouter_force_instruct').prop('checked', oai_settings.openrouter_force_instruct); $('#openrouter_force_instruct').prop('checked', oai_settings.openrouter_force_instruct);
@ -2466,7 +2519,13 @@ async function getStatusOpen() {
validateReverseProxy(); validateReverseProxy();
} }
const canBypass = oai_settings.chat_completion_source === chat_completion_sources.OPENAI && oai_settings.bypass_status_check; if (oai_settings.chat_completion_source === chat_completion_sources.CUSTOM) {
$('#model_custom_select').empty();
data.custom_url = oai_settings.custom_url;
data.custom_include_headers = oai_settings.custom_include_headers;
}
const canBypass = (oai_settings.chat_completion_source === chat_completion_sources.OPENAI && oai_settings.bypass_status_check) || oai_settings.chat_completion_source === chat_completion_sources.CUSTOM;
if (canBypass) { if (canBypass) {
setOnlineStatus('Status check bypassed'); setOnlineStatus('Status check bypassed');
} }
@ -2533,6 +2592,7 @@ async function saveOpenAIPreset(name, settings, triggerUi = true) {
openrouter_sort_models: settings.openrouter_sort_models, openrouter_sort_models: settings.openrouter_sort_models,
ai21_model: settings.ai21_model, ai21_model: settings.ai21_model,
mistralai_model: settings.mistralai_model, mistralai_model: settings.mistralai_model,
custom_model: settings.custom_model,
google_model: settings.google_model, google_model: settings.google_model,
temperature: settings.temp_openai, temperature: settings.temp_openai,
frequency_penalty: settings.freq_pen_openai, frequency_penalty: settings.freq_pen_openai,
@ -2566,9 +2626,11 @@ async function saveOpenAIPreset(name, settings, triggerUi = true) {
api_url_scale: settings.api_url_scale, api_url_scale: settings.api_url_scale,
show_external_models: settings.show_external_models, show_external_models: settings.show_external_models,
assistant_prefill: settings.assistant_prefill, assistant_prefill: settings.assistant_prefill,
human_sysprompt_message: settings.human_sysprompt_message,
use_ai21_tokenizer: settings.use_ai21_tokenizer, use_ai21_tokenizer: settings.use_ai21_tokenizer,
use_google_tokenizer: settings.use_google_tokenizer, use_google_tokenizer: settings.use_google_tokenizer,
exclude_assistant: settings.exclude_assistant, exclude_assistant: settings.exclude_assistant,
claude_use_sysprompt: settings.claude_use_sysprompt,
use_alt_scale: settings.use_alt_scale, use_alt_scale: settings.use_alt_scale,
squash_system_messages: settings.squash_system_messages, squash_system_messages: settings.squash_system_messages,
image_inlining: settings.image_inlining, image_inlining: settings.image_inlining,
@ -2905,6 +2967,11 @@ function onSettingsPresetChange() {
openrouter_sort_models: ['#openrouter_sort_models', 'openrouter_sort_models', false], openrouter_sort_models: ['#openrouter_sort_models', 'openrouter_sort_models', false],
ai21_model: ['#model_ai21_select', 'ai21_model', false], ai21_model: ['#model_ai21_select', 'ai21_model', false],
mistralai_model: ['#model_mistralai_select', 'mistralai_model', false], mistralai_model: ['#model_mistralai_select', 'mistralai_model', false],
custom_model: ['#custom_model_id', 'custom_model', false],
custom_url: ['#custom_api_url_text', 'custom_url', false],
custom_include_body: ['#custom_include_body', 'custom_include_body', false],
custom_exclude_body: ['#custom_exclude_body', 'custom_exclude_body', false],
custom_include_headers: ['#custom_include_headers', 'custom_include_headers', false],
google_model: ['#model_google_select', 'google_model', false], google_model: ['#model_google_select', 'google_model', false],
openai_max_context: ['#openai_max_context', 'openai_max_context', false], openai_max_context: ['#openai_max_context', 'openai_max_context', false],
openai_max_tokens: ['#openai_max_tokens', 'openai_max_tokens', false], openai_max_tokens: ['#openai_max_tokens', 'openai_max_tokens', false],
@ -2929,9 +2996,11 @@ function onSettingsPresetChange() {
show_external_models: ['#openai_show_external_models', 'show_external_models', true], show_external_models: ['#openai_show_external_models', 'show_external_models', true],
proxy_password: ['#openai_proxy_password', 'proxy_password', false], proxy_password: ['#openai_proxy_password', 'proxy_password', false],
assistant_prefill: ['#claude_assistant_prefill', 'assistant_prefill', false], assistant_prefill: ['#claude_assistant_prefill', 'assistant_prefill', false],
human_sysprompt_message: ['#claude_human_sysprompt_textarea', 'human_sysprompt_message', false],
use_ai21_tokenizer: ['#use_ai21_tokenizer', 'use_ai21_tokenizer', true], use_ai21_tokenizer: ['#use_ai21_tokenizer', 'use_ai21_tokenizer', true],
use_google_tokenizer: ['#use_google_tokenizer', 'use_google_tokenizer', true], use_google_tokenizer: ['#use_google_tokenizer', 'use_google_tokenizer', true],
exclude_assistant: ['#exclude_assistant', 'exclude_assistant', true], exclude_assistant: ['#exclude_assistant', 'exclude_assistant', true],
claude_use_sysprompt: ['#claude_use_sysprompt', 'claude_use_sysprompt', true],
use_alt_scale: ['#use_alt_scale', 'use_alt_scale', true], use_alt_scale: ['#use_alt_scale', 'use_alt_scale', true],
squash_system_messages: ['#squash_system_messages', 'squash_system_messages', true], squash_system_messages: ['#squash_system_messages', 'squash_system_messages', true],
image_inlining: ['#openai_image_inlining', 'image_inlining', true], image_inlining: ['#openai_image_inlining', 'image_inlining', true],
@ -3094,6 +3163,12 @@ async function onModelChange() {
oai_settings.mistralai_model = value; oai_settings.mistralai_model = value;
} }
if (value && $(this).is('#model_custom_select')) {
console.log('Custom model changed to', value);
oai_settings.custom_model = value;
$('#custom_model_id').val(value).trigger('input');
}
if (oai_settings.chat_completion_source == chat_completion_sources.SCALE) { if (oai_settings.chat_completion_source == chat_completion_sources.SCALE) {
if (oai_settings.max_context_unlocked) { if (oai_settings.max_context_unlocked) {
$('#openai_max_context').attr('max', unlocked_max); $('#openai_max_context').attr('max', unlocked_max);
@ -3241,6 +3316,12 @@ async function onModelChange() {
$('#top_k_openai').attr('max', 200).val(oai_settings.top_k_openai).trigger('input'); $('#top_k_openai').attr('max', 200).val(oai_settings.top_k_openai).trigger('input');
} }
if (oai_settings.chat_completion_source == chat_completion_sources.CUSTOM) {
$('#openai_max_context').attr('max', unlocked_max);
oai_settings.openai_max_context = Math.min(Number($('#openai_max_context').attr('max')), oai_settings.openai_max_context);
$('#openai_max_context').val(oai_settings.openai_max_context).trigger('input');
}
$('#openai_max_context_counter').attr('max', Number($('#openai_max_context').attr('max'))); $('#openai_max_context_counter').attr('max', Number($('#openai_max_context').attr('max')));
saveSettingsDebounced(); saveSettingsDebounced();
@ -3255,7 +3336,7 @@ async function onNewPresetClick() {
const popupText = ` const popupText = `
<h3>Preset name:</h3> <h3>Preset name:</h3>
<h4>Hint: Use a character/group name to bind preset to a specific chat.</h4>`; <h4>Hint: Use a character/group name to bind preset to a specific chat.</h4>`;
const name = await callPopup(popupText, 'input'); const name = await callPopup(popupText, 'input', oai_settings.preset_settings_openai);
if (!name) { if (!name) {
return; return;
@ -3383,6 +3464,19 @@ async function onConnectButtonClick(e) {
} }
} }
if (oai_settings.chat_completion_source == chat_completion_sources.CUSTOM) {
const api_key_custom = String($('#api_key_custom').val()).trim();
if (api_key_custom.length) {
await writeSecret(SECRET_KEYS.CUSTOM, api_key_custom);
}
if (!oai_settings.custom_url) {
console.log('No API URL saved for Custom');
return;
}
}
startStatusLoading(); startStatusLoading();
saveSettingsDebounced(); saveSettingsDebounced();
await getStatusOpen(); await getStatusOpen();
@ -3418,6 +3512,9 @@ function toggleChatCompletionForms() {
else if (oai_settings.chat_completion_source == chat_completion_sources.MISTRALAI) { else if (oai_settings.chat_completion_source == chat_completion_sources.MISTRALAI) {
$('#model_mistralai_select').trigger('change'); $('#model_mistralai_select').trigger('change');
} }
else if (oai_settings.chat_completion_source == chat_completion_sources.CUSTOM) {
$('#model_custom_select').trigger('change');
}
$('[data-source]').each(function () { $('[data-source]').each(function () {
const validSources = $(this).data('source').split(','); const validSources = $(this).data('source').split(',');
$(this).toggle(validSources.includes(oai_settings.chat_completion_source)); $(this).toggle(validSources.includes(oai_settings.chat_completion_source));
@ -3425,6 +3522,7 @@ function toggleChatCompletionForms() {
if (chat_completion_sources.CLAUDE == oai_settings.chat_completion_source) { if (chat_completion_sources.CLAUDE == oai_settings.chat_completion_source) {
$('#claude_assistant_prefill_block').toggle(!oai_settings.exclude_assistant); $('#claude_assistant_prefill_block').toggle(!oai_settings.exclude_assistant);
$('#claude_human_sysprompt_message_block').toggle(oai_settings.claude_use_sysprompt);
} }
} }
@ -3468,6 +3566,42 @@ function updateScaleForm() {
} }
} }
function onCustomizeParametersClick() {
const template = $(`
<div class="flex-container flexFlowColumn height100p">
<h3>Additional Parameters</h3>
<div class="flex1 flex-container flexFlowColumn">
<h4>Include Body Parameters</h4>
<textarea id="custom_include_body" class="flex1" placeholder="Parameters to be included in the Chat Completion request body (YAML object)&#10;&#10;Example:&#10;- top_k: 20&#10;- repetition_penalty: 1.1"></textarea>
</div>
<div class="flex1 flex-container flexFlowColumn">
<h4>Exclude Body Parameters</h4>
<textarea id="custom_exclude_body" class="flex1" placeholder="Parameters to be excluded from the Chat Completion request body (YAML array)&#10;&#10;Example:&#10;- frequency_penalty&#10;- presence_penalty"></textarea>
</div>
<div class="flex1 flex-container flexFlowColumn">
<h4>Include Request Headers</h4>
<textarea id="custom_include_headers" class="flex1" placeholder="Additional headers for Chat Completion requests (YAML object)&#10;&#10;Example:&#10;- CustomHeader: custom-value&#10;- AnotherHeader: custom-value"></textarea>
</div>
</div>`);
template.find('#custom_include_body').val(oai_settings.custom_include_body).on('input', function() {
oai_settings.custom_include_body = String($(this).val());
saveSettingsDebounced();
});
template.find('#custom_exclude_body').val(oai_settings.custom_exclude_body).on('input', function() {
oai_settings.custom_exclude_body = String($(this).val());
saveSettingsDebounced();
});
template.find('#custom_include_headers').val(oai_settings.custom_include_headers).on('input', function() {
oai_settings.custom_include_headers = String($(this).val());
saveSettingsDebounced();
});
callPopup(template, 'text', '', { wide: true, large: true });
}
/** /**
* Check if the model supports image inlining * Check if the model supports image inlining
* @returns {boolean} True if the model supports image inlining * @returns {boolean} True if the model supports image inlining
@ -3479,7 +3613,7 @@ export function isImageInliningSupported() {
const gpt4v = 'gpt-4-vision'; const gpt4v = 'gpt-4-vision';
const geminiProV = 'gemini-pro-vision'; const geminiProV = 'gemini-pro-vision';
const llava13b = 'llava-13b'; const llava = 'llava';
if (!oai_settings.image_inlining) { if (!oai_settings.image_inlining) {
return false; return false;
@ -3491,7 +3625,9 @@ export function isImageInliningSupported() {
case chat_completion_sources.MAKERSUITE: case chat_completion_sources.MAKERSUITE:
return oai_settings.google_model.includes(geminiProV); return oai_settings.google_model.includes(geminiProV);
case chat_completion_sources.OPENROUTER: case chat_completion_sources.OPENROUTER:
return oai_settings.openrouter_model.includes(gpt4v) || oai_settings.openrouter_model.includes(llava13b); return !oai_settings.openrouter_force_instruct && (oai_settings.openrouter_model.includes(gpt4v) || oai_settings.openrouter_model.includes(llava));
case chat_completion_sources.CUSTOM:
return oai_settings.custom_model.includes(gpt4v) || oai_settings.custom_model.includes(llava) || oai_settings.custom_model.includes(geminiProV);
default: default:
return false; return false;
} }
@ -3585,6 +3721,12 @@ $(document).ready(async function () {
saveSettingsDebounced(); saveSettingsDebounced();
}); });
$('#claude_use_sysprompt').on('change', function () {
oai_settings.claude_use_sysprompt = !!$('#claude_use_sysprompt').prop('checked');
$('#claude_human_sysprompt_message_block').toggle(oai_settings.claude_use_sysprompt);
saveSettingsDebounced();
});
$('#names_in_completion').on('change', function () { $('#names_in_completion').on('change', function () {
oai_settings.names_in_completion = !!$('#names_in_completion').prop('checked'); oai_settings.names_in_completion = !!$('#names_in_completion').prop('checked');
saveSettingsDebounced(); saveSettingsDebounced();
@ -3658,6 +3800,12 @@ $(document).ready(async function () {
saveSettingsDebounced(); saveSettingsDebounced();
}); });
$('#claude_human_sysprompt_message_restore').on('click', function () {
oai_settings.human_sysprompt_message = default_claude_human_sysprompt_message;
$('#claude_human_sysprompt_textarea').val(oai_settings.human_sysprompt_message);
saveSettingsDebounced();
});
$('#newgroupchat_prompt_restore').on('click', function () { $('#newgroupchat_prompt_restore').on('click', function () {
oai_settings.new_group_chat_prompt = default_new_group_chat_prompt; oai_settings.new_group_chat_prompt = default_new_group_chat_prompt;
$('#newgroupchat_prompt_textarea').val(oai_settings.new_group_chat_prompt); $('#newgroupchat_prompt_textarea').val(oai_settings.new_group_chat_prompt);
@ -3745,6 +3893,11 @@ $(document).ready(async function () {
saveSettingsDebounced(); saveSettingsDebounced();
}); });
$('#claude_human_sysprompt_textarea').on('input', function () {
oai_settings.human_sysprompt_message = String($('#claude_human_sysprompt_textarea').val());
saveSettingsDebounced();
});
$('#openrouter_use_fallback').on('input', function () { $('#openrouter_use_fallback').on('input', function () {
oai_settings.openrouter_use_fallback = !!$(this).prop('checked'); oai_settings.openrouter_use_fallback = !!$(this).prop('checked');
saveSettingsDebounced(); saveSettingsDebounced();
@ -3780,6 +3933,16 @@ $(document).ready(async function () {
saveSettingsDebounced(); saveSettingsDebounced();
}); });
$('#custom_api_url_text').on('input', function () {
oai_settings.custom_url = String($(this).val());
saveSettingsDebounced();
});
$('#custom_model_id').on('input', function () {
oai_settings.custom_model = String($(this).val());
saveSettingsDebounced();
});
$(document).on('input', '#openai_settings .autoSetHeight', function () { $(document).on('input', '#openai_settings .autoSetHeight', function () {
resetScrollHeight($(this)); resetScrollHeight($(this));
}); });
@ -3796,6 +3959,7 @@ $(document).ready(async function () {
$('#openrouter_sort_models').on('change', onOpenrouterModelSortChange); $('#openrouter_sort_models').on('change', onOpenrouterModelSortChange);
$('#model_ai21_select').on('change', onModelChange); $('#model_ai21_select').on('change', onModelChange);
$('#model_mistralai_select').on('change', onModelChange); $('#model_mistralai_select').on('change', onModelChange);
$('#model_custom_select').on('change', onModelChange);
$('#settings_preset_openai').on('change', onSettingsPresetChange); $('#settings_preset_openai').on('change', onSettingsPresetChange);
$('#new_oai_preset').on('click', onNewPresetClick); $('#new_oai_preset').on('click', onNewPresetClick);
$('#delete_oai_preset').on('click', onDeletePresetClick); $('#delete_oai_preset').on('click', onDeletePresetClick);
@ -3810,4 +3974,5 @@ $(document).ready(async function () {
$('#openai_logit_bias_delete_preset').on('click', onLogitBiasPresetDeleteClick); $('#openai_logit_bias_delete_preset').on('click', onLogitBiasPresetDeleteClick);
$('#import_oai_preset').on('click', onImportPresetClick); $('#import_oai_preset').on('click', onImportPresetClick);
$('#openai_proxy_password_show').on('click', onProxyPasswordShowClick); $('#openai_proxy_password_show').on('click', onProxyPasswordShowClick);
$('#customize_additional_parameters').on('click', onCustomizeParametersClick);
}); });

View File

@ -193,6 +193,22 @@ export function autoSelectPersona(name) {
} }
} }
/**
* Updates the name of a persona if it exists.
* @param {string} avatarId User avatar id
* @param {string} newName New name for the persona
*/
export async function updatePersonaNameIfExists(avatarId, newName) {
if (avatarId in power_user.personas) {
power_user.personas[avatarId] = newName;
await getUserAvatars();
saveSettingsDebounced();
console.log(`Updated persona name for ${avatarId} to ${newName}`);
} else {
console.log(`Persona name ${avatarId} was not updated because it does not exist`);
}
}
async function bindUserNameToPersona() { async function bindUserNameToPersona() {
const avatarId = $(this).closest('.avatar-container').find('.avatar').attr('imgfile'); const avatarId = $(this).closest('.avatar-container').find('.avatar').attr('imgfile');

View File

@ -205,6 +205,7 @@ let power_user = {
names: false, names: false,
names_force_groups: true, names_force_groups: true,
activation_regex: '', activation_regex: '',
bind_to_context: false,
}, },
default_context: 'Default', default_context: 'Default',
@ -1718,17 +1719,18 @@ function loadContextSettings() {
} }
}); });
// Select matching instruct preset if (power_user.instruct.bind_to_context) {
for (const instruct_preset of instruct_presets) { // Select matching instruct preset
// If instruct preset matches the context template for (const instruct_preset of instruct_presets) {
if (instruct_preset.name === name) { // If instruct preset matches the context template
selectInstructPreset(instruct_preset.name); if (instruct_preset.name === name) {
break; selectInstructPreset(instruct_preset.name);
break;
}
} }
} }
highlightDefaultContext(); highlightDefaultContext();
saveSettingsDebounced(); saveSettingsDebounced();
}); });

View File

@ -303,8 +303,14 @@ class PresetManager {
'model_novel', 'model_novel',
'streaming_kobold', 'streaming_kobold',
'enabled', 'enabled',
'bind_to_context',
'seed', 'seed',
'legacy_api',
'mancer_model', 'mancer_model',
'togetherai_model',
'ollama_model',
'server_urls',
'type',
]; ];
const settings = Object.assign({}, getSettingsByApiId(this.apiId)); const settings = Object.assign({}, getSettingsByApiId(this.apiId));

View File

@ -16,6 +16,7 @@ export const SECRET_KEYS = {
SERPAPI: 'api_key_serpapi', SERPAPI: 'api_key_serpapi',
MISTRALAI: 'api_key_mistralai', MISTRALAI: 'api_key_mistralai',
TOGETHERAI: 'api_key_togetherai', TOGETHERAI: 'api_key_togetherai',
CUSTOM: 'api_key_custom',
}; };
const INPUT_MAP = { const INPUT_MAP = {
@ -32,6 +33,7 @@ const INPUT_MAP = {
[SECRET_KEYS.APHRODITE]: '#api_key_aphrodite', [SECRET_KEYS.APHRODITE]: '#api_key_aphrodite',
[SECRET_KEYS.TABBY]: '#api_key_tabby', [SECRET_KEYS.TABBY]: '#api_key_tabby',
[SECRET_KEYS.MISTRALAI]: '#api_key_mistralai', [SECRET_KEYS.MISTRALAI]: '#api_key_mistralai',
[SECRET_KEYS.CUSTOM]: '#api_key_custom',
[SECRET_KEYS.TOGETHERAI]: '#api_key_togetherai', [SECRET_KEYS.TOGETHERAI]: '#api_key_togetherai',
}; };

View File

@ -20,7 +20,7 @@ import {
main_api, main_api,
name1, name1,
reloadCurrentChat, reloadCurrentChat,
replaceBiasMarkup, removeMacros,
saveChatConditional, saveChatConditional,
sendMessageAsUser, sendMessageAsUser,
sendSystemMessage, sendSystemMessage,
@ -845,7 +845,7 @@ async function unhideMessageCallback(_, arg) {
/** /**
* Copium for running group actions when the member is offscreen. * Copium for running group actions when the member is offscreen.
* @param {number} chid - character ID * @param {number} chid - character ID
* @param {string} action - one of 'enable', 'disable', 'up', 'down', 'peek', 'remove' * @param {string} action - one of 'enable', 'disable', 'up', 'down', 'view', 'remove'
* @returns {void} * @returns {void}
*/ */
function performGroupMemberAction(chid, action) { function performGroupMemberAction(chid, action) {
@ -868,7 +868,9 @@ function performGroupMemberAction(chid, action) {
if (wasOffscreen) { if (wasOffscreen) {
$(pageSizeSelector).val(paginationValue).trigger('change'); $(pageSizeSelector).val(paginationValue).trigger('change');
$(paginationSelector).pagination('go', pageValue); if ($(paginationSelector).length) {
$(paginationSelector).pagination('go', pageValue);
}
} }
} }
@ -958,7 +960,7 @@ async function peekCallback(_, arg) {
return ''; return '';
} }
performGroupMemberAction(chid, 'peek'); performGroupMemberAction(chid, 'view');
return ''; return '';
} }
@ -1258,7 +1260,7 @@ export async function sendMessageAs(args, text) {
// Messages that do nothing but set bias will be hidden from the context // Messages that do nothing but set bias will be hidden from the context
const bias = extractMessageBias(mesText); const bias = extractMessageBias(mesText);
const isSystem = replaceBiasMarkup(mesText).trim().length === 0; const isSystem = bias && !removeMacros(mesText).length;
const character = characters.find(x => x.name === name); const character = characters.find(x => x.name === name);
let force_avatar, original_avatar; let force_avatar, original_avatar;
@ -1311,7 +1313,7 @@ export async function sendNarratorMessage(args, text) {
const name = chat_metadata[NARRATOR_NAME_KEY] || NARRATOR_NAME_DEFAULT; const name = chat_metadata[NARRATOR_NAME_KEY] || NARRATOR_NAME_DEFAULT;
// Messages that do nothing but set bias will be hidden from the context // Messages that do nothing but set bias will be hidden from the context
const bias = extractMessageBias(text); const bias = extractMessageBias(text);
const isSystem = replaceBiasMarkup(text).trim().length === 0; const isSystem = bias && !removeMacros(text).length;
const message = { const message = {
name: name, name: name,

View File

@ -15,7 +15,7 @@
<h3>Confused or lost?</h3> <h3>Confused or lost?</h3>
<ul> <ul>
<li> <li>
<span class="note-link-span">?</span> - click these icons! <span class="note-link-span"><a class="fa-solid fa-circle-question" target="_blank" href="https://docs.sillytavern.app/"></a></span> - click these icons!
</li> </li>
<li> <li>
Enter <code>/?</code> in the chat bar Enter <code>/?</code> in the chat bar

View File

@ -1,10 +1,30 @@
import { setGenerationParamsFromPreset } from '../script.js'; import { callPopup, getRequestHeaders, setGenerationParamsFromPreset } from '../script.js';
import { isMobile } from './RossAscends-mods.js'; import { isMobile } from './RossAscends-mods.js';
import { textgenerationwebui_settings as textgen_settings } from './textgen-settings.js'; import { textgenerationwebui_settings as textgen_settings, textgen_types } from './textgen-settings.js';
let mancerModels = []; let mancerModels = [];
let togetherModels = []; let togetherModels = [];
export async function loadOllamaModels(data) {
if (!Array.isArray(data)) {
console.error('Invalid Ollama models data', data);
return;
}
if (!data.find(x => x.id === textgen_settings.ollama_model)) {
textgen_settings.ollama_model = data[0]?.id || '';
}
$('#ollama_model').empty();
for (const model of data) {
const option = document.createElement('option');
option.value = model.id;
option.text = model.name;
option.selected = model.id === textgen_settings.ollama_model;
$('#ollama_model').append(option);
}
}
export async function loadTogetherAIModels(data) { export async function loadTogetherAIModels(data) {
if (!Array.isArray(data)) { if (!Array.isArray(data)) {
console.error('Invalid Together AI models data', data); console.error('Invalid Together AI models data', data);
@ -13,6 +33,10 @@ export async function loadTogetherAIModels(data) {
togetherModels = data; togetherModels = data;
if (!data.find(x => x.name === textgen_settings.togetherai_model)) {
textgen_settings.togetherai_model = data[0]?.name || '';
}
$('#model_togetherai_select').empty(); $('#model_togetherai_select').empty();
for (const model of data) { for (const model of data) {
// Hey buddy, I think you've got the wrong door. // Hey buddy, I think you've got the wrong door.
@ -36,6 +60,10 @@ export async function loadMancerModels(data) {
mancerModels = data; mancerModels = data;
if (!data.find(x => x.id === textgen_settings.mancer_model)) {
textgen_settings.mancer_model = data[0]?.id || '';
}
$('#mancer_model').empty(); $('#mancer_model').empty();
for (const model of data) { for (const model of data) {
const option = document.createElement('option'); const option = document.createElement('option');
@ -55,7 +83,6 @@ function onMancerModelSelect() {
setGenerationParamsFromPreset({ max_length: limits.context, genamt: limits.completion }); setGenerationParamsFromPreset({ max_length: limits.context, genamt: limits.completion });
} }
function onTogetherModelSelect() { function onTogetherModelSelect() {
const modelName = String($('#model_togetherai_select').val()); const modelName = String($('#model_togetherai_select').val());
textgen_settings.togetherai_model = modelName; textgen_settings.togetherai_model = modelName;
@ -64,6 +91,12 @@ function onTogetherModelSelect() {
setGenerationParamsFromPreset({ max_length: model.context_length }); setGenerationParamsFromPreset({ max_length: model.context_length });
} }
function onOllamaModelSelect() {
const modelId = String($('#ollama_model').val());
textgen_settings.ollama_model = modelId;
$('#api_button_textgenerationwebui').trigger('click');
}
function getMancerModelTemplate(option) { function getMancerModelTemplate(option) {
const model = mancerModels.find(x => x.id === option?.element?.value); const model = mancerModels.find(x => x.id === option?.element?.value);
@ -97,9 +130,52 @@ function getTogetherModelTemplate(option) {
`)); `));
} }
async function downloadOllamaModel() {
try {
const serverUrl = textgen_settings.server_urls[textgen_types.OLLAMA];
if (!serverUrl) {
toastr.info('Please connect to an Ollama server first.');
return;
}
const html = `Enter a model tag, for example <code>llama2:latest</code>.<br>
See <a target="_blank" href="https://ollama.ai/library">Library</a> for available models.`;
const name = await callPopup(html, 'input', '', { okButton: 'Download' });
if (!name) {
return;
}
toastr.info('Download may take a while, please wait...', 'Working on it');
const response = await fetch('/api/backends/text-completions/ollama/download', {
method: 'POST',
headers: getRequestHeaders(),
body: JSON.stringify({
name: name,
api_server: serverUrl,
}),
});
if (!response.ok) {
throw new Error(response.statusText);
}
// Force refresh the model list
toastr.success('Download complete. Please select the model from the dropdown.');
$('#api_button_textgenerationwebui').trigger('click');
} catch (err) {
console.error(err);
toastr.error('Failed to download Ollama model. Please try again.');
}
}
jQuery(function () { jQuery(function () {
$('#mancer_model').on('change', onMancerModelSelect); $('#mancer_model').on('change', onMancerModelSelect);
$('#model_togetherai_select').on('change', onTogetherModelSelect); $('#model_togetherai_select').on('change', onTogetherModelSelect);
$('#ollama_model').on('change', onOllamaModelSelect);
$('#ollama_download_model').on('click', downloadOllamaModel);
if (!isMobile()) { if (!isMobile()) {
$('#mancer_model').select2({ $('#mancer_model').select2({
@ -116,5 +192,11 @@ jQuery(function () {
width: '100%', width: '100%',
templateResult: getTogetherModelTemplate, templateResult: getTogetherModelTemplate,
}); });
$('#ollama_model').select2({
placeholder: 'Select a model',
searchInputPlaceholder: 'Search models...',
searchInputCssClass: 'text_pole',
width: '100%',
});
} }
}); });

View File

@ -1,5 +1,4 @@
import { import {
api_server_textgenerationwebui,
getRequestHeaders, getRequestHeaders,
getStoppingStrings, getStoppingStrings,
max_context, max_context,
@ -34,9 +33,10 @@ export const textgen_types = {
KOBOLDCPP: 'koboldcpp', KOBOLDCPP: 'koboldcpp',
TOGETHERAI: 'togetherai', TOGETHERAI: 'togetherai',
LLAMACPP: 'llamacpp', LLAMACPP: 'llamacpp',
OLLAMA: 'ollama',
}; };
const { MANCER, APHRODITE, TOGETHERAI, OOBA } = textgen_types; const { MANCER, APHRODITE, TOGETHERAI, OOBA, OLLAMA, LLAMACPP } = textgen_types;
const BIAS_KEY = '#textgenerationwebui_api-settings'; const BIAS_KEY = '#textgenerationwebui_api-settings';
// Maybe let it be configurable in the future? // Maybe let it be configurable in the future?
@ -46,6 +46,15 @@ const MANCER_SERVER_DEFAULT = 'https://neuro.mancer.tech';
let MANCER_SERVER = localStorage.getItem(MANCER_SERVER_KEY) ?? MANCER_SERVER_DEFAULT; let MANCER_SERVER = localStorage.getItem(MANCER_SERVER_KEY) ?? MANCER_SERVER_DEFAULT;
let TOGETHERAI_SERVER = 'https://api.together.xyz'; let TOGETHERAI_SERVER = 'https://api.together.xyz';
const SERVER_INPUTS = {
[textgen_types.OOBA]: '#textgenerationwebui_api_url_text',
[textgen_types.APHRODITE]: '#aphrodite_api_url_text',
[textgen_types.TABBY]: '#tabby_api_url_text',
[textgen_types.KOBOLDCPP]: '#koboldcpp_api_url_text',
[textgen_types.LLAMACPP]: '#llamacpp_api_url_text',
[textgen_types.OLLAMA]: '#ollama_api_url_text',
};
const KOBOLDCPP_ORDER = [6, 0, 1, 3, 4, 2, 5]; const KOBOLDCPP_ORDER = [6, 0, 1, 3, 4, 2, 5];
const settings = { const settings = {
temp: 0.7, temp: 0.7,
@ -95,10 +104,12 @@ const settings = {
type: textgen_types.OOBA, type: textgen_types.OOBA,
mancer_model: 'mytholite', mancer_model: 'mytholite',
togetherai_model: 'Gryphe/MythoMax-L2-13b', togetherai_model: 'Gryphe/MythoMax-L2-13b',
ollama_model: '',
legacy_api: false, legacy_api: false,
sampler_order: KOBOLDCPP_ORDER, sampler_order: KOBOLDCPP_ORDER,
logit_bias: [], logit_bias: [],
n: 1, n: 1,
server_urls: {},
}; };
export let textgenerationwebui_banned_in_macros = []; export let textgenerationwebui_banned_in_macros = [];
@ -154,6 +165,37 @@ const setting_names = [
'logit_bias', 'logit_bias',
]; ];
export function validateTextGenUrl() {
const selector = SERVER_INPUTS[settings.type];
if (!selector) {
return;
}
const control = $(selector);
const url = String(control.val()).trim();
const formattedUrl = formatTextGenURL(url);
if (!formattedUrl) {
toastr.error('Enter a valid API URL', 'Text Completion API');
return;
}
control.val(formattedUrl);
}
export function getTextGenServer() {
if (settings.type === MANCER) {
return MANCER_SERVER;
}
if (settings.type === TOGETHERAI) {
return TOGETHERAI_SERVER;
}
return settings.server_urls[settings.type] ?? '';
}
async function selectPreset(name) { async function selectPreset(name) {
const preset = textgenerationwebui_presets[textgenerationwebui_preset_names.indexOf(name)]; const preset = textgenerationwebui_presets[textgenerationwebui_preset_names.indexOf(name)];
@ -291,6 +333,21 @@ function loadTextGenSettings(data, loadedSettings) {
textgenerationwebui_preset_names = data.textgenerationwebui_preset_names ?? []; textgenerationwebui_preset_names = data.textgenerationwebui_preset_names ?? [];
Object.assign(settings, loadedSettings.textgenerationwebui_settings ?? {}); Object.assign(settings, loadedSettings.textgenerationwebui_settings ?? {});
if (loadedSettings.api_server_textgenerationwebui) {
for (const type of Object.keys(SERVER_INPUTS)) {
settings.server_urls[type] = loadedSettings.api_server_textgenerationwebui;
}
delete loadedSettings.api_server_textgenerationwebui;
}
for (const [type, selector] of Object.entries(SERVER_INPUTS)) {
const control = $(selector);
control.val(settings.server_urls[type] ?? '').on('input', function () {
settings.server_urls[type] = String($(this).val());
saveSettingsDebounced();
});
}
if (loadedSettings.api_use_mancer_webui) { if (loadedSettings.api_use_mancer_webui) {
settings.type = MANCER; settings.type = MANCER;
} }
@ -336,21 +393,6 @@ function loadTextGenSettings(data, loadedSettings) {
}); });
} }
export function getTextGenUrlSourceId() {
switch (settings.type) {
case textgen_types.OOBA:
return '#textgenerationwebui_api_url_text';
case textgen_types.APHRODITE:
return '#aphrodite_api_url_text';
case textgen_types.TABBY:
return '#tabby_api_url_text';
case textgen_types.KOBOLDCPP:
return '#koboldcpp_api_url_text';
case textgen_types.LLAMACPP:
return '#llamacpp_api_url_text';
}
}
/** /**
* Sorts the sampler items by the given order. * Sorts the sampler items by the given order.
* @param {any[]} orderArray Sampler order array. * @param {any[]} orderArray Sampler order array.
@ -423,7 +465,10 @@ jQuery(function () {
BIAS_CACHE.delete(BIAS_KEY); BIAS_CACHE.delete(BIAS_KEY);
$('#main_api').trigger('change'); $('#main_api').trigger('change');
$('#api_button_textgenerationwebui').trigger('click');
if (!SERVER_INPUTS[type] || settings.server_urls[type]) {
$('#api_button_textgenerationwebui').trigger('click');
}
saveSettingsDebounced(); saveSettingsDebounced();
}); });
@ -620,21 +665,18 @@ function getModel() {
return online_status; return online_status;
} }
if (settings.type === OLLAMA) {
if (!settings.ollama_model) {
toastr.error('No Ollama model selected.', 'Text Completion API');
throw new Error('No Ollama model selected');
}
return settings.ollama_model;
}
return undefined; return undefined;
} }
export function getTextGenServer() {
if (settings.type === MANCER) {
return MANCER_SERVER;
}
if (settings.type === TOGETHERAI) {
return TOGETHERAI_SERVER;
}
return api_server_textgenerationwebui;
}
export function getTextGenGenerationData(finalPrompt, maxTokens, isImpersonate, isContinue, cfgValues, type) { export function getTextGenGenerationData(finalPrompt, maxTokens, isImpersonate, isContinue, cfgValues, type) {
const canMultiSwipe = !isContinue && !isImpersonate && type !== 'quiet'; const canMultiSwipe = !isContinue && !isImpersonate && type !== 'quiet';
let params = { let params = {
@ -687,6 +729,13 @@ export function getTextGenGenerationData(finalPrompt, maxTokens, isImpersonate,
'guidance_scale': cfgValues?.guidanceScale?.value ?? settings.guidance_scale ?? 1, 'guidance_scale': cfgValues?.guidanceScale?.value ?? settings.guidance_scale ?? 1,
'negative_prompt': cfgValues?.negativePrompt ?? substituteParams(settings.negative_prompt) ?? '', 'negative_prompt': cfgValues?.negativePrompt ?? substituteParams(settings.negative_prompt) ?? '',
'grammar_string': settings.grammar_string, 'grammar_string': settings.grammar_string,
// llama.cpp aliases. In case someone wants to use LM Studio as Text Completion API
'repeat_penalty': settings.rep_pen,
'tfs_z': settings.tfs,
'repeat_last_n': settings.rep_pen_range,
'n_predict': settings.maxTokens,
'mirostat': settings.mirostat_mode,
'ignore_eos': settings.ban_eos_token,
}; };
const aphroditeParams = { const aphroditeParams = {
'n': canMultiSwipe ? settings.n : 1, 'n': canMultiSwipe ? settings.n : 1,
@ -697,7 +746,7 @@ export function getTextGenGenerationData(finalPrompt, maxTokens, isImpersonate,
//'logprobs': settings.log_probs_aphrodite, //'logprobs': settings.log_probs_aphrodite,
//'prompt_logprobs': settings.prompt_log_probs_aphrodite, //'prompt_logprobs': settings.prompt_log_probs_aphrodite,
}; };
if (settings.type === textgen_types.APHRODITE) { if (settings.type === APHRODITE) {
params = Object.assign(params, aphroditeParams); params = Object.assign(params, aphroditeParams);
} else { } else {
params = Object.assign(params, nonAphroditeParams); params = Object.assign(params, nonAphroditeParams);
@ -709,7 +758,7 @@ export function getTextGenGenerationData(finalPrompt, maxTokens, isImpersonate,
params.logit_bias = logitBias; params.logit_bias = logitBias;
} }
if (settings.type === textgen_types.LLAMACPP) { if (settings.type === LLAMACPP || settings.type === OLLAMA) {
// Convert bias and token bans to array of arrays // Convert bias and token bans to array of arrays
const logitBiasArray = (params.logit_bias && typeof params.logit_bias === 'object' && Object.keys(params.logit_bias).length > 0) const logitBiasArray = (params.logit_bias && typeof params.logit_bias === 'object' && Object.keys(params.logit_bias).length > 0)
? Object.entries(params.logit_bias).map(([key, value]) => [Number(key), value]) ? Object.entries(params.logit_bias).map(([key, value]) => [Number(key), value])
@ -717,14 +766,9 @@ export function getTextGenGenerationData(finalPrompt, maxTokens, isImpersonate,
const tokenBans = toIntArray(getCustomTokenBans()); const tokenBans = toIntArray(getCustomTokenBans());
logitBiasArray.push(...tokenBans.map(x => [Number(x), false])); logitBiasArray.push(...tokenBans.map(x => [Number(x), false]));
const llamaCppParams = { const llamaCppParams = {
'repeat_penalty': settings.rep_pen,
'tfs_z': settings.tfs,
'repeat_last_n': settings.rep_pen_range,
'n_predict': settings.maxTokens,
'mirostat': settings.mirostat_mode,
'ignore_eos': settings.ban_eos_token,
'grammar': settings.grammar_string,
'logit_bias': logitBiasArray, 'logit_bias': logitBiasArray,
// Conflicts with ooba's grammar_string
'grammar': settings.grammar_string,
}; };
params = Object.assign(params, llamaCppParams); params = Object.assign(params, llamaCppParams);
} }

View File

@ -1,10 +1,10 @@
import { characters, main_api, api_server, api_server_textgenerationwebui, nai_settings, online_status, this_chid } from '../script.js'; import { characters, main_api, api_server, nai_settings, online_status, this_chid } from '../script.js';
import { power_user, registerDebugFunction } from './power-user.js'; import { power_user, registerDebugFunction } from './power-user.js';
import { chat_completion_sources, model_list, oai_settings } from './openai.js'; import { chat_completion_sources, model_list, oai_settings } from './openai.js';
import { groups, selected_group } from './group-chats.js'; import { groups, selected_group } from './group-chats.js';
import { getStringHash } from './utils.js'; import { getStringHash } from './utils.js';
import { kai_flags } from './kai-settings.js'; import { kai_flags } from './kai-settings.js';
import { textgen_types, textgenerationwebui_settings as textgen_settings } from './textgen-settings.js'; import { textgen_types, textgenerationwebui_settings as textgen_settings, getTextGenServer } from './textgen-settings.js';
const { OOBA, TABBY, KOBOLDCPP, APHRODITE, LLAMACPP } = textgen_types; const { OOBA, TABBY, KOBOLDCPP, APHRODITE, LLAMACPP } = textgen_types;
@ -388,6 +388,10 @@ export function getTokenizerModel() {
return mistralTokenizer; return mistralTokenizer;
} }
if (oai_settings.chat_completion_source == chat_completion_sources.CUSTOM) {
return oai_settings.custom_model;
}
// Default to Turbo 3.5 // Default to Turbo 3.5
return turboTokenizer; return turboTokenizer;
} }
@ -537,7 +541,7 @@ function getTextgenAPITokenizationParams(str) {
return { return {
text: str, text: str,
api_type: textgen_settings.type, api_type: textgen_settings.type,
url: api_server_textgenerationwebui, url: getTextGenServer(),
legacy_api: textgen_settings.legacy_api && (textgen_settings.type === OOBA || textgen_settings.type === APHRODITE), legacy_api: textgen_settings.legacy_api && (textgen_settings.type === OOBA || textgen_settings.type === APHRODITE),
}; };
} }

View File

@ -637,6 +637,9 @@ hr {
order: 2; order: 2;
padding-right: 2px; padding-right: 2px;
place-self: center; place-self: center;
cursor: pointer;
transition: 0.3s;
opacity: 0.7;
} }
#options_button { #options_button {
@ -3531,11 +3534,11 @@ a {
display: none; display: none;
} }
.reverse_proxy_warning { .reverse_proxy_warning:not(small) {
color: var(--warning); color: var(--warning);
background-color: var(--black70a); background-color: var(--black70a);
text-shadow: none !important; text-shadow: none !important;
margin-top: 12px !important; margin-top: 5px !important;
border-radius: 5px; border-radius: 5px;
padding: 3px; padding: 3px;
border: 1px solid var(--SmartThemeBorderColor); border: 1px solid var(--SmartThemeBorderColor);

View File

@ -160,6 +160,7 @@ const CHAT_COMPLETION_SOURCES = {
AI21: 'ai21', AI21: 'ai21',
MAKERSUITE: 'makersuite', MAKERSUITE: 'makersuite',
MISTRALAI: 'mistralai', MISTRALAI: 'mistralai',
CUSTOM: 'custom',
}; };
const UPLOADS_PATH = './uploads'; const UPLOADS_PATH = './uploads';
@ -173,6 +174,7 @@ const TEXTGEN_TYPES = {
KOBOLDCPP: 'koboldcpp', KOBOLDCPP: 'koboldcpp',
TOGETHERAI: 'togetherai', TOGETHERAI: 'togetherai',
LLAMACPP: 'llamacpp', LLAMACPP: 'llamacpp',
OLLAMA: 'ollama',
}; };
// https://docs.together.ai/reference/completions // https://docs.together.ai/reference/completions
@ -187,6 +189,25 @@ const TOGETHERAI_KEYS = [
'stream', 'stream',
]; ];
// https://github.com/jmorganca/ollama/blob/main/docs/api.md#request-with-options
const OLLAMA_KEYS = [
'num_predict',
'stop',
'temperature',
'repeat_penalty',
'presence_penalty',
'frequency_penalty',
'top_k',
'top_p',
'tfs_z',
'typical_p',
'seed',
'repeat_last_n',
'mirostat',
'mirostat_tau',
'mirostat_eta',
];
const AVATAR_WIDTH = 400; const AVATAR_WIDTH = 400;
const AVATAR_HEIGHT = 600; const AVATAR_HEIGHT = 600;
@ -201,4 +222,5 @@ module.exports = {
AVATAR_WIDTH, AVATAR_WIDTH,
AVATAR_HEIGHT, AVATAR_HEIGHT,
TOGETHERAI_KEYS, TOGETHERAI_KEYS,
OLLAMA_KEYS,
}; };

View File

@ -4,7 +4,7 @@ const { Readable } = require('stream');
const { jsonParser } = require('../../express-common'); const { jsonParser } = require('../../express-common');
const { CHAT_COMPLETION_SOURCES, GEMINI_SAFETY, BISON_SAFETY } = require('../../constants'); const { CHAT_COMPLETION_SOURCES, GEMINI_SAFETY, BISON_SAFETY } = require('../../constants');
const { forwardFetchResponse, getConfigValue, tryParse, uuidv4 } = require('../../util'); const { forwardFetchResponse, getConfigValue, tryParse, uuidv4, mergeObjectWithYaml, excludeKeysByYaml, color } = require('../../util');
const { convertClaudePrompt, convertGooglePrompt, convertTextCompletionPrompt } = require('../prompt-converters'); const { convertClaudePrompt, convertGooglePrompt, convertTextCompletionPrompt } = require('../prompt-converters');
const { readSecret, SECRET_KEYS } = require('../secrets'); const { readSecret, SECRET_KEYS } = require('../secrets');
@ -21,9 +21,10 @@ const API_CLAUDE = 'https://api.anthropic.com/v1';
async function sendClaudeRequest(request, response) { async function sendClaudeRequest(request, response) {
const apiUrl = new URL(request.body.reverse_proxy || API_CLAUDE).toString(); const apiUrl = new URL(request.body.reverse_proxy || API_CLAUDE).toString();
const apiKey = request.body.reverse_proxy ? request.body.proxy_password : readSecret(SECRET_KEYS.CLAUDE); const apiKey = request.body.reverse_proxy ? request.body.proxy_password : readSecret(SECRET_KEYS.CLAUDE);
const divider = '-'.repeat(process.stdout.columns);
if (!apiKey) { if (!apiKey) {
console.log('Claude API key is missing.'); console.log(color.red(`Claude API key is missing.\n${divider}`));
return response.status(400).send({ error: true }); return response.status(400).send({ error: true });
} }
@ -34,34 +35,66 @@ async function sendClaudeRequest(request, response) {
controller.abort(); controller.abort();
}); });
let doSystemPrompt = request.body.model === 'claude-2' || request.body.model === 'claude-2.1'; const isSysPromptSupported = request.body.model === 'claude-2' || request.body.model === 'claude-2.1';
let requestPrompt = convertClaudePrompt(request.body.messages, true, !request.body.exclude_assistant, doSystemPrompt); const requestPrompt = convertClaudePrompt(request.body.messages, !request.body.exclude_assistant, request.body.assistant_prefill, isSysPromptSupported, request.body.claude_use_sysprompt, request.body.human_sysprompt_message);
if (request.body.assistant_prefill && !request.body.exclude_assistant) { // Check Claude messages sequence and prefixes presence.
requestPrompt += request.body.assistant_prefill; const sequence = requestPrompt.split('\n').filter(x => x.startsWith('Human:') || x.startsWith('Assistant:'));
const humanFound = sequence.some(line => line.startsWith('Human:'));
const assistantFound = sequence.some(line => line.startsWith('Assistant:'));
let humanErrorCount = 0;
let assistantErrorCount = 0;
for (let i = 0; i < sequence.length - 1; i++) {
if (sequence[i].startsWith(sequence[i + 1].split(':')[0])) {
if (sequence[i].startsWith('Human:')) {
humanErrorCount++;
} else if (sequence[i].startsWith('Assistant:')) {
assistantErrorCount++;
}
}
} }
console.log('Claude request:', requestPrompt); if (!humanFound) {
const stop_sequences = ['\n\nHuman:', '\n\nSystem:', '\n\nAssistant:']; console.log(color.red(`${divider}\nWarning: No 'Human:' prefix found in the prompt.\n${divider}`));
}
if (!assistantFound) {
console.log(color.red(`${divider}\nWarning: No 'Assistant: ' prefix found in the prompt.\n${divider}`));
}
if (!sequence[0].startsWith('Human:')) {
console.log(color.red(`${divider}\nWarning: The messages sequence should start with 'Human:' prefix.\nMake sure you have 'Human:' prefix at the very beggining of the prompt, or after the system prompt.\n${divider}`));
}
if (humanErrorCount > 0 || assistantErrorCount > 0) {
console.log(color.red(`${divider}\nWarning: Detected incorrect Prefix sequence(s).`));
console.log(color.red(`Incorrect "Human:" prefix(es): ${humanErrorCount}.\nIncorrect "Assistant: " prefix(es): ${assistantErrorCount}.`));
console.log(color.red('Check the prompt above and fix it in the SillyTavern.'));
console.log(color.red('\nThe correct sequence should look like this:\nSystem prompt <-(for the sysprompt format only, else have 2 empty lines above the first human\'s message.)'));
console.log(color.red(` <-----(Each message beginning with the "Assistant:/Human:" prefix must have one empty line above.)\nHuman:\n\nAssistant:\n...\n\nHuman:\n\nAssistant:\n${divider}`));
}
// Add custom stop sequences // Add custom stop sequences
const stopSequences = ['\n\nHuman:', '\n\nSystem:', '\n\nAssistant:'];
if (Array.isArray(request.body.stop)) { if (Array.isArray(request.body.stop)) {
stop_sequences.push(...request.body.stop); stopSequences.push(...request.body.stop);
} }
const requestBody = {
prompt: requestPrompt,
model: request.body.model,
max_tokens_to_sample: request.body.max_tokens,
stop_sequences: stopSequences,
temperature: request.body.temperature,
top_p: request.body.top_p,
top_k: request.body.top_k,
stream: request.body.stream,
};
console.log('Claude request:', requestBody);
const generateResponse = await fetch(apiUrl + '/complete', { const generateResponse = await fetch(apiUrl + '/complete', {
method: 'POST', method: 'POST',
signal: controller.signal, signal: controller.signal,
body: JSON.stringify({ body: JSON.stringify(requestBody),
prompt: requestPrompt,
model: request.body.model,
max_tokens_to_sample: request.body.max_tokens,
stop_sequences: stop_sequences,
temperature: request.body.temperature,
top_p: request.body.top_p,
top_k: request.body.top_k,
stream: request.body.stream,
}),
headers: { headers: {
'Content-Type': 'application/json', 'Content-Type': 'application/json',
'anthropic-version': '2023-06-01', 'anthropic-version': '2023-06-01',
@ -75,20 +108,20 @@ async function sendClaudeRequest(request, response) {
forwardFetchResponse(generateResponse, response); forwardFetchResponse(generateResponse, response);
} else { } else {
if (!generateResponse.ok) { if (!generateResponse.ok) {
console.log(`Claude API returned error: ${generateResponse.status} ${generateResponse.statusText} ${await generateResponse.text()}`); console.log(color.red(`Claude API returned error: ${generateResponse.status} ${generateResponse.statusText}\n${await generateResponse.text()}\n${divider}`));
return response.status(generateResponse.status).send({ error: true }); return response.status(generateResponse.status).send({ error: true });
} }
const generateResponseJson = await generateResponse.json(); const generateResponseJson = await generateResponse.json();
const responseText = generateResponseJson.completion; const responseText = generateResponseJson.completion;
console.log('Claude response:', responseText); console.log('Claude response:', generateResponseJson);
// Wrap it back to OAI format // Wrap it back to OAI format
const reply = { choices: [{ 'message': { 'content': responseText } }] }; const reply = { choices: [{ 'message': { 'content': responseText } }] };
return response.send(reply); return response.send(reply);
} }
} catch (error) { } catch (error) {
console.log('Error communicating with Claude: ', error); console.log(color.red(`Error communicating with Claude: ${error}\n${divider}`));
if (!response.headersSent) { if (!response.headersSent) {
return response.status(500).send({ error: true }); return response.status(500).send({ error: true });
} }
@ -438,26 +471,30 @@ async function sendMistralAIRequest(request, response) {
controller.abort(); controller.abort();
}); });
const requestBody = {
'model': request.body.model,
'messages': messages,
'temperature': request.body.temperature,
'top_p': request.body.top_p,
'max_tokens': request.body.max_tokens,
'stream': request.body.stream,
'safe_mode': request.body.safe_mode,
'random_seed': request.body.seed === -1 ? undefined : request.body.seed,
};
const config = { const config = {
method: 'POST', method: 'POST',
headers: { headers: {
'Content-Type': 'application/json', 'Content-Type': 'application/json',
'Authorization': 'Bearer ' + apiKey, 'Authorization': 'Bearer ' + apiKey,
}, },
body: JSON.stringify({ body: JSON.stringify(requestBody),
'model': request.body.model,
'messages': messages,
'temperature': request.body.temperature,
'top_p': request.body.top_p,
'max_tokens': request.body.max_tokens,
'stream': request.body.stream,
'safe_mode': request.body.safe_mode,
'random_seed': request.body.seed === -1 ? undefined : request.body.seed,
}),
signal: controller.signal, signal: controller.signal,
timeout: 0, timeout: 0,
}; };
console.log('MisralAI request:', requestBody);
const generateResponse = await fetch('https://api.mistral.ai/v1/chat/completions', config); const generateResponse = await fetch('https://api.mistral.ai/v1/chat/completions', config);
if (request.body.stream) { if (request.body.stream) {
forwardFetchResponse(generateResponse, response); forwardFetchResponse(generateResponse, response);
@ -469,6 +506,7 @@ async function sendMistralAIRequest(request, response) {
return response.status(generateResponse.status === 401 ? 500 : generateResponse.status).send({ error: true }); return response.status(generateResponse.status === 401 ? 500 : generateResponse.status).send({ error: true });
} }
const generateResponseJson = await generateResponse.json(); const generateResponseJson = await generateResponse.json();
console.log('MistralAI response:', generateResponseJson);
return response.send(generateResponseJson); return response.send(generateResponseJson);
} }
} catch (error) { } catch (error) {
@ -502,12 +540,17 @@ router.post('/status', jsonParser, async function (request, response_getstatus_o
} else if (request.body.chat_completion_source === CHAT_COMPLETION_SOURCES.MISTRALAI) { } else if (request.body.chat_completion_source === CHAT_COMPLETION_SOURCES.MISTRALAI) {
api_url = 'https://api.mistral.ai/v1'; api_url = 'https://api.mistral.ai/v1';
api_key_openai = readSecret(SECRET_KEYS.MISTRALAI); api_key_openai = readSecret(SECRET_KEYS.MISTRALAI);
} else if (request.body.chat_completion_source === CHAT_COMPLETION_SOURCES.CUSTOM) {
api_url = request.body.custom_url;
api_key_openai = readSecret(SECRET_KEYS.CUSTOM);
headers = {};
mergeObjectWithYaml(headers, request.body.custom_include_headers);
} else { } else {
console.log('This chat completion source is not supported yet.'); console.log('This chat completion source is not supported yet.');
return response_getstatus_openai.status(400).send({ error: true }); return response_getstatus_openai.status(400).send({ error: true });
} }
if (!api_key_openai && !request.body.reverse_proxy) { if (!api_key_openai && !request.body.reverse_proxy && request.body.chat_completion_source !== CHAT_COMPLETION_SOURCES.CUSTOM) {
console.log('OpenAI API key is missing.'); console.log('OpenAI API key is missing.');
return response_getstatus_openai.status(400).send({ error: true }); return response_getstatus_openai.status(400).send({ error: true });
} }
@ -657,7 +700,7 @@ router.post('/generate', jsonParser, function (request, response) {
let headers; let headers;
let bodyParams; let bodyParams;
if (request.body.chat_completion_source !== CHAT_COMPLETION_SOURCES.OPENROUTER) { if (request.body.chat_completion_source === CHAT_COMPLETION_SOURCES.OPENAI) {
apiUrl = new URL(request.body.reverse_proxy || API_OPENAI).toString(); apiUrl = new URL(request.body.reverse_proxy || API_OPENAI).toString();
apiKey = request.body.reverse_proxy ? request.body.proxy_password : readSecret(SECRET_KEYS.OPENAI); apiKey = request.body.reverse_proxy ? request.body.proxy_password : readSecret(SECRET_KEYS.OPENAI);
headers = {}; headers = {};
@ -666,7 +709,7 @@ router.post('/generate', jsonParser, function (request, response) {
if (getConfigValue('openai.randomizeUserId', false)) { if (getConfigValue('openai.randomizeUserId', false)) {
bodyParams['user'] = uuidv4(); bodyParams['user'] = uuidv4();
} }
} else { } else if (request.body.chat_completion_source === CHAT_COMPLETION_SOURCES.OPENROUTER) {
apiUrl = 'https://openrouter.ai/api/v1'; apiUrl = 'https://openrouter.ai/api/v1';
apiKey = readSecret(SECRET_KEYS.OPENROUTER); apiKey = readSecret(SECRET_KEYS.OPENROUTER);
// OpenRouter needs to pass the referer: https://openrouter.ai/docs // OpenRouter needs to pass the referer: https://openrouter.ai/docs
@ -676,9 +719,19 @@ router.post('/generate', jsonParser, function (request, response) {
if (request.body.use_fallback) { if (request.body.use_fallback) {
bodyParams['route'] = 'fallback'; bodyParams['route'] = 'fallback';
} }
} else if (request.body.chat_completion_source === CHAT_COMPLETION_SOURCES.CUSTOM) {
apiUrl = request.body.custom_url;
apiKey = readSecret(SECRET_KEYS.CUSTOM);
headers = {};
bodyParams = {};
mergeObjectWithYaml(bodyParams, request.body.custom_include_body);
mergeObjectWithYaml(headers, request.body.custom_include_headers);
} else {
console.log('This chat completion source is not supported yet.');
return response.status(400).send({ error: true });
} }
if (!apiKey && !request.body.reverse_proxy) { if (!apiKey && !request.body.reverse_proxy && request.body.chat_completion_source !== CHAT_COMPLETION_SOURCES.CUSTOM) {
console.log('OpenAI API key is missing.'); console.log('OpenAI API key is missing.');
return response.status(400).send({ error: true }); return response.status(400).send({ error: true });
} }
@ -700,6 +753,27 @@ router.post('/generate', jsonParser, function (request, response) {
controller.abort(); controller.abort();
}); });
const requestBody = {
'messages': isTextCompletion === false ? request.body.messages : undefined,
'prompt': isTextCompletion === true ? textPrompt : undefined,
'model': request.body.model,
'temperature': request.body.temperature,
'max_tokens': request.body.max_tokens,
'stream': request.body.stream,
'presence_penalty': request.body.presence_penalty,
'frequency_penalty': request.body.frequency_penalty,
'top_p': request.body.top_p,
'top_k': request.body.top_k,
'stop': isTextCompletion === false ? request.body.stop : undefined,
'logit_bias': request.body.logit_bias,
'seed': request.body.seed,
...bodyParams,
};
if (request.body.chat_completion_source === CHAT_COMPLETION_SOURCES.CUSTOM) {
excludeKeysByYaml(requestBody, request.body.custom_exclude_body);
}
/** @type {import('node-fetch').RequestInit} */ /** @type {import('node-fetch').RequestInit} */
const config = { const config = {
method: 'post', method: 'post',
@ -708,27 +782,12 @@ router.post('/generate', jsonParser, function (request, response) {
'Authorization': 'Bearer ' + apiKey, 'Authorization': 'Bearer ' + apiKey,
...headers, ...headers,
}, },
body: JSON.stringify({ body: JSON.stringify(requestBody),
'messages': isTextCompletion === false ? request.body.messages : undefined,
'prompt': isTextCompletion === true ? textPrompt : undefined,
'model': request.body.model,
'temperature': request.body.temperature,
'max_tokens': request.body.max_tokens,
'stream': request.body.stream,
'presence_penalty': request.body.presence_penalty,
'frequency_penalty': request.body.frequency_penalty,
'top_p': request.body.top_p,
'top_k': request.body.top_k,
'stop': isTextCompletion === false ? request.body.stop : undefined,
'logit_bias': request.body.logit_bias,
'seed': request.body.seed,
...bodyParams,
}),
signal: controller.signal, signal: controller.signal,
timeout: 0, timeout: 0,
}; };
console.log(JSON.parse(String(config.body))); console.log(requestBody);
makeRequest(config, response, request); makeRequest(config, response, request);

View File

@ -1,14 +1,82 @@
const express = require('express'); const express = require('express');
const fetch = require('node-fetch').default; const fetch = require('node-fetch').default;
const _ = require('lodash'); const _ = require('lodash');
const Readable = require('stream').Readable;
const { jsonParser } = require('../../express-common'); const { jsonParser } = require('../../express-common');
const { TEXTGEN_TYPES, TOGETHERAI_KEYS } = require('../../constants'); const { TEXTGEN_TYPES, TOGETHERAI_KEYS, OLLAMA_KEYS } = require('../../constants');
const { forwardFetchResponse } = require('../../util'); const { forwardFetchResponse, trimV1 } = require('../../util');
const { setAdditionalHeaders } = require('../../additional-headers'); const { setAdditionalHeaders } = require('../../additional-headers');
const router = express.Router(); const router = express.Router();
/**
* Special boy's steaming routine. Wrap this abomination into proper SSE stream.
* @param {import('node-fetch').Response} jsonStream JSON stream
* @param {import('express').Request} request Express request
* @param {import('express').Response} response Express response
* @returns {Promise<any>} Nothing valuable
*/
async function parseOllamaStream(jsonStream, request, response) {
try {
let partialData = '';
jsonStream.body.on('data', (data) => {
const chunk = data.toString();
partialData += chunk;
while (true) {
let json;
try {
json = JSON.parse(partialData);
} catch (e) {
break;
}
const text = json.response || '';
const chunk = { choices: [{ text }] };
response.write(`data: ${JSON.stringify(chunk)}\n\n`);
partialData = '';
}
});
request.socket.on('close', function () {
if (jsonStream.body instanceof Readable) jsonStream.body.destroy();
response.end();
});
jsonStream.body.on('end', () => {
console.log('Streaming request finished');
response.write('data: [DONE]\n\n');
response.end();
});
} catch (error) {
console.log('Error forwarding streaming response:', error);
if (!response.headersSent) {
return response.status(500).send({ error: true });
} else {
return response.end();
}
}
}
/**
* Abort KoboldCpp generation request.
* @param {string} url Server base URL
* @returns {Promise<void>} Promise resolving when we are done
*/
async function abortKoboldCppRequest(url) {
try {
console.log('Aborting Kobold generation...');
const abortResponse = await fetch(`${url}/api/extra/abort`, {
method: 'POST',
});
if (!abortResponse.ok) {
console.log('Error sending abort request to Kobold:', abortResponse.status, abortResponse.statusText);
}
} catch (error) {
console.log(error);
}
}
//************** Ooba/OpenAI text completions API //************** Ooba/OpenAI text completions API
router.post('/status', jsonParser, async function (request, response) { router.post('/status', jsonParser, async function (request, response) {
if (!request.body) return response.sendStatus(400); if (!request.body) return response.sendStatus(400);
@ -19,9 +87,7 @@ router.post('/status', jsonParser, async function (request, response) {
} }
console.log('Trying to connect to API:', request.body); console.log('Trying to connect to API:', request.body);
const baseUrl = trimV1(request.body.api_server);
// Convert to string + remove trailing slash + /v1 suffix
const baseUrl = String(request.body.api_server).replace(/\/$/, '').replace(/\/v1$/, '');
const args = { const args = {
headers: { 'Content-Type': 'application/json' }, headers: { 'Content-Type': 'application/json' },
@ -51,6 +117,9 @@ router.post('/status', jsonParser, async function (request, response) {
case TEXTGEN_TYPES.TOGETHERAI: case TEXTGEN_TYPES.TOGETHERAI:
url += '/api/models?&info'; url += '/api/models?&info';
break; break;
case TEXTGEN_TYPES.OLLAMA:
url += '/api/tags';
break;
} }
} }
@ -73,6 +142,10 @@ router.post('/status', jsonParser, async function (request, response) {
data = { data: data.map(x => ({ id: x.name, ...x })) }; data = { data: data.map(x => ({ id: x.name, ...x })) };
} }
if (request.body.api_type === TEXTGEN_TYPES.OLLAMA && Array.isArray(data.models)) {
data = { data: data.models.map(x => ({ id: x.name, ...x })) };
}
if (!Array.isArray(data.data)) { if (!Array.isArray(data.data)) {
console.log('Models response is not an array.'); console.log('Models response is not an array.');
return response.status(400); return response.status(400);
@ -127,8 +200,8 @@ router.post('/status', jsonParser, async function (request, response) {
} }
}); });
router.post('/generate', jsonParser, async function (request, response_generate) { router.post('/generate', jsonParser, async function (request, response) {
if (!request.body) return response_generate.sendStatus(400); if (!request.body) return response.sendStatus(400);
try { try {
if (request.body.api_server.indexOf('localhost') !== -1) { if (request.body.api_server.indexOf('localhost') !== -1) {
@ -140,12 +213,15 @@ router.post('/generate', jsonParser, async function (request, response_generate)
const controller = new AbortController(); const controller = new AbortController();
request.socket.removeAllListeners('close'); request.socket.removeAllListeners('close');
request.socket.on('close', function () { request.socket.on('close', async function () {
if (request.body.api_type === TEXTGEN_TYPES.KOBOLDCPP && !response.writableEnded) {
await abortKoboldCppRequest(trimV1(baseUrl));
}
controller.abort(); controller.abort();
}); });
// Convert to string + remove trailing slash + /v1 suffix let url = trimV1(baseUrl);
let url = String(baseUrl).replace(/\/$/, '').replace(/\/v1$/, '');
if (request.body.legacy_api) { if (request.body.legacy_api) {
url += '/v1/generate'; url += '/v1/generate';
@ -164,6 +240,9 @@ router.post('/generate', jsonParser, async function (request, response_generate)
case TEXTGEN_TYPES.LLAMACPP: case TEXTGEN_TYPES.LLAMACPP:
url += '/completion'; url += '/completion';
break; break;
case TEXTGEN_TYPES.OLLAMA:
url += '/api/generate';
break;
} }
} }
@ -186,10 +265,23 @@ router.post('/generate', jsonParser, async function (request, response_generate)
args.body = JSON.stringify(request.body); args.body = JSON.stringify(request.body);
} }
if (request.body.stream) { if (request.body.api_type === TEXTGEN_TYPES.OLLAMA) {
args.body = JSON.stringify({
model: request.body.model,
prompt: request.body.prompt,
stream: request.body.stream ?? false,
raw: true,
options: _.pickBy(request.body, (_, key) => OLLAMA_KEYS.includes(key)),
});
}
if (request.body.api_type === TEXTGEN_TYPES.OLLAMA && request.body.stream) {
const stream = await fetch(url, args);
parseOllamaStream(stream, request, response);
} else if (request.body.stream) {
const completionsStream = await fetch(url, args); const completionsStream = await fetch(url, args);
// Pipe remote SSE stream to Express response // Pipe remote SSE stream to Express response
forwardFetchResponse(completionsStream, response_generate); forwardFetchResponse(completionsStream, response);
} }
else { else {
const completionsReply = await fetch(url, args); const completionsReply = await fetch(url, args);
@ -204,28 +296,152 @@ router.post('/generate', jsonParser, async function (request, response_generate)
data['choices'] = [{ text }]; data['choices'] = [{ text }];
} }
return response_generate.send(data); return response.send(data);
} else { } else {
const text = await completionsReply.text(); const text = await completionsReply.text();
const errorBody = { error: true, status: completionsReply.status, response: text }; const errorBody = { error: true, status: completionsReply.status, response: text };
if (!response_generate.headersSent) { if (!response.headersSent) {
return response_generate.send(errorBody); return response.send(errorBody);
} }
return response_generate.end(); return response.end();
} }
} }
} catch (error) { } catch (error) {
let value = { error: true, status: error?.status, response: error?.statusText }; let value = { error: true, status: error?.status, response: error?.statusText };
console.log('Endpoint error:', error); console.log('Endpoint error:', error);
if (!response_generate.headersSent) { if (!response.headersSent) {
return response_generate.send(value); return response.send(value);
} }
return response_generate.end(); return response.end();
} }
}); });
const ollama = express.Router();
ollama.post('/download', jsonParser, async function (request, response) {
try {
if (!request.body.name || !request.body.api_server) return response.sendStatus(400);
const name = request.body.name;
const url = String(request.body.api_server).replace(/\/$/, '');
const fetchResponse = await fetch(`${url}/api/pull`, {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({
name: name,
stream: false,
}),
timeout: 0,
});
if (!fetchResponse.ok) {
console.log('Download error:', fetchResponse.status, fetchResponse.statusText);
return response.status(fetchResponse.status).send({ error: true });
}
return response.send({ ok: true });
} catch (error) {
console.error(error);
return response.status(500);
}
});
ollama.post('/caption-image', jsonParser, async function (request, response) {
try {
if (!request.body.server_url || !request.body.model) {
return response.sendStatus(400);
}
console.log('Ollama caption request:', request.body);
const baseUrl = trimV1(request.body.server_url);
const fetchResponse = await fetch(`${baseUrl}/api/generate`, {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({
model: request.body.model,
prompt: request.body.prompt,
images: [request.body.image],
stream: false,
}),
timeout: 0,
});
if (!fetchResponse.ok) {
console.log('Ollama caption error:', fetchResponse.status, fetchResponse.statusText);
return response.status(500).send({ error: true });
}
const data = await fetchResponse.json();
console.log('Ollama caption response:', data);
const caption = data?.response || '';
if (!caption) {
console.log('Ollama caption is empty.');
return response.status(500).send({ error: true });
}
return response.send({ caption });
} catch (error) {
console.error(error);
return response.status(500);
}
});
const llamacpp = express.Router();
llamacpp.post('/caption-image', jsonParser, async function (request, response) {
try {
if (!request.body.server_url) {
return response.sendStatus(400);
}
console.log('LlamaCpp caption request:', request.body);
const baseUrl = trimV1(request.body.server_url);
const fetchResponse = await fetch(`${baseUrl}/completion`, {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
timeout: 0,
body: JSON.stringify({
prompt: `USER:[img-1]${String(request.body.prompt).trim()}\nASSISTANT:`,
image_data: [{ data: request.body.image, id: 1 }],
temperature: 0.1,
stream: false,
stop: ['USER:', '</s>'],
}),
});
if (!fetchResponse.ok) {
console.log('LlamaCpp caption error:', fetchResponse.status, fetchResponse.statusText);
return response.status(500).send({ error: true });
}
const data = await fetchResponse.json();
console.log('LlamaCpp caption response:', data);
const caption = data?.content || '';
if (!caption) {
console.log('LlamaCpp caption is empty.');
return response.status(500).send({ error: true });
}
return response.send({ caption });
} catch (error) {
console.error(error);
return response.status(500);
}
});
router.use('/ollama', ollama);
router.use('/llamacpp', llamacpp);
module.exports = { router }; module.exports = { router };

View File

@ -4,6 +4,7 @@ const readline = require('readline');
const express = require('express'); const express = require('express');
const sanitize = require('sanitize-filename'); const sanitize = require('sanitize-filename');
const writeFileAtomicSync = require('write-file-atomic').sync; const writeFileAtomicSync = require('write-file-atomic').sync;
const yaml = require('yaml');
const _ = require('lodash'); const _ = require('lodash');
const encode = require('png-chunks-encode'); const encode = require('png-chunks-encode');
@ -19,6 +20,7 @@ const characterCardParser = require('../character-card-parser.js');
const { readWorldInfoFile } = require('./worldinfo'); const { readWorldInfoFile } = require('./worldinfo');
const { invalidateThumbnail } = require('./thumbnails'); const { invalidateThumbnail } = require('./thumbnails');
const { importRisuSprites } = require('./sprites'); const { importRisuSprites } = require('./sprites');
const defaultAvatarPath = './public/img/ai4.png';
let characters = {}; let characters = {};
@ -394,6 +396,36 @@ function convertWorldInfoToCharacterBook(name, entries) {
return result; return result;
} }
/**
* Import a character from a YAML file.
* @param {string} uploadPath Path to the uploaded file
* @param {import('express').Response} response Express response object
*/
function importFromYaml(uploadPath, response) {
const fileText = fs.readFileSync(uploadPath, 'utf8');
fs.rmSync(uploadPath);
const yamlData = yaml.parse(fileText);
console.log('importing from yaml');
yamlData.name = sanitize(yamlData.name);
const fileName = getPngName(yamlData.name);
let char = convertToV2({
'name': yamlData.name,
'description': yamlData.context ?? '',
'first_mes': yamlData.greeting ?? '',
'create_date': humanizedISO8601DateTime(),
'chat': `${yamlData.name} - ${humanizedISO8601DateTime()}`,
'personality': '',
'creatorcomment': '',
'avatar': 'none',
'mes_example': '',
'scenario': '',
'talkativeness': 0.5,
'creator': '',
'tags': '',
});
charaWrite(defaultAvatarPath, JSON.stringify(char), fileName, response, { file_name: fileName });
}
const router = express.Router(); const router = express.Router();
router.post('/create', urlencodedParser, async function (request, response) { router.post('/create', urlencodedParser, async function (request, response) {
@ -760,144 +792,147 @@ function getPngName(file) {
} }
router.post('/import', urlencodedParser, async function (request, response) { router.post('/import', urlencodedParser, async function (request, response) {
if (!request.body || !request.file) return response.sendStatus(400);
if (!request.body || request.file === undefined) return response.sendStatus(400);
let png_name = ''; let png_name = '';
let filedata = request.file; let filedata = request.file;
let uploadPath = path.join(UPLOADS_PATH, filedata.filename); let uploadPath = path.join(UPLOADS_PATH, filedata.filename);
var format = request.body.file_type; let format = request.body.file_type;
const defaultAvatarPath = './public/img/ai4.png';
//console.log(format);
if (filedata) {
if (format == 'json') {
fs.readFile(uploadPath, 'utf8', async (err, data) => {
fs.unlinkSync(uploadPath);
if (err) { if (format == 'yaml' || format == 'yml') {
console.log(err); try {
response.send({ error: true }); importFromYaml(uploadPath, response);
} } catch (err) {
console.log(err);
response.send({ error: true });
}
} else if (format == 'json') {
fs.readFile(uploadPath, 'utf8', async (err, data) => {
fs.unlinkSync(uploadPath);
let jsonData = JSON.parse(data); if (err) {
if (jsonData.spec !== undefined) {
console.log('importing from v2 json');
importRisuSprites(jsonData);
unsetFavFlag(jsonData);
jsonData = readFromV2(jsonData);
jsonData['create_date'] = humanizedISO8601DateTime();
png_name = getPngName(jsonData.data?.name || jsonData.name);
let char = JSON.stringify(jsonData);
charaWrite(defaultAvatarPath, char, png_name, response, { file_name: png_name });
} else if (jsonData.name !== undefined) {
console.log('importing from v1 json');
jsonData.name = sanitize(jsonData.name);
if (jsonData.creator_notes) {
jsonData.creator_notes = jsonData.creator_notes.replace('Creator\'s notes go here.', '');
}
png_name = getPngName(jsonData.name);
let char = {
'name': jsonData.name,
'description': jsonData.description ?? '',
'creatorcomment': jsonData.creatorcomment ?? jsonData.creator_notes ?? '',
'personality': jsonData.personality ?? '',
'first_mes': jsonData.first_mes ?? '',
'avatar': 'none',
'chat': jsonData.name + ' - ' + humanizedISO8601DateTime(),
'mes_example': jsonData.mes_example ?? '',
'scenario': jsonData.scenario ?? '',
'create_date': humanizedISO8601DateTime(),
'talkativeness': jsonData.talkativeness ?? 0.5,
'creator': jsonData.creator ?? '',
'tags': jsonData.tags ?? '',
};
char = convertToV2(char);
let charJSON = JSON.stringify(char);
charaWrite(defaultAvatarPath, charJSON, png_name, response, { file_name: png_name });
} else if (jsonData.char_name !== undefined) {//json Pygmalion notepad
console.log('importing from gradio json');
jsonData.char_name = sanitize(jsonData.char_name);
if (jsonData.creator_notes) {
jsonData.creator_notes = jsonData.creator_notes.replace('Creator\'s notes go here.', '');
}
png_name = getPngName(jsonData.char_name);
let char = {
'name': jsonData.char_name,
'description': jsonData.char_persona ?? '',
'creatorcomment': jsonData.creatorcomment ?? jsonData.creator_notes ?? '',
'personality': '',
'first_mes': jsonData.char_greeting ?? '',
'avatar': 'none',
'chat': jsonData.name + ' - ' + humanizedISO8601DateTime(),
'mes_example': jsonData.example_dialogue ?? '',
'scenario': jsonData.world_scenario ?? '',
'create_date': humanizedISO8601DateTime(),
'talkativeness': jsonData.talkativeness ?? 0.5,
'creator': jsonData.creator ?? '',
'tags': jsonData.tags ?? '',
};
char = convertToV2(char);
let charJSON = JSON.stringify(char);
charaWrite(defaultAvatarPath, charJSON, png_name, response, { file_name: png_name });
} else {
console.log('Incorrect character format .json');
response.send({ error: true });
}
});
} else {
try {
var img_data = await charaRead(uploadPath, format);
if (img_data === undefined) throw new Error('Failed to read character data');
let jsonData = JSON.parse(img_data);
jsonData.name = sanitize(jsonData.data?.name || jsonData.name);
png_name = getPngName(jsonData.name);
if (jsonData.spec !== undefined) {
console.log('Found a v2 character file.');
importRisuSprites(jsonData);
unsetFavFlag(jsonData);
jsonData = readFromV2(jsonData);
jsonData['create_date'] = humanizedISO8601DateTime();
const char = JSON.stringify(jsonData);
await charaWrite(uploadPath, char, png_name, response, { file_name: png_name });
fs.unlinkSync(uploadPath);
} else if (jsonData.name !== undefined) {
console.log('Found a v1 character file.');
if (jsonData.creator_notes) {
jsonData.creator_notes = jsonData.creator_notes.replace('Creator\'s notes go here.', '');
}
let char = {
'name': jsonData.name,
'description': jsonData.description ?? '',
'creatorcomment': jsonData.creatorcomment ?? jsonData.creator_notes ?? '',
'personality': jsonData.personality ?? '',
'first_mes': jsonData.first_mes ?? '',
'avatar': 'none',
'chat': jsonData.name + ' - ' + humanizedISO8601DateTime(),
'mes_example': jsonData.mes_example ?? '',
'scenario': jsonData.scenario ?? '',
'create_date': humanizedISO8601DateTime(),
'talkativeness': jsonData.talkativeness ?? 0.5,
'creator': jsonData.creator ?? '',
'tags': jsonData.tags ?? '',
};
char = convertToV2(char);
const charJSON = JSON.stringify(char);
await charaWrite(uploadPath, charJSON, png_name, response, { file_name: png_name });
fs.unlinkSync(uploadPath);
} else {
console.log('Unknown character card format');
response.send({ error: true });
}
} catch (err) {
console.log(err); console.log(err);
response.send({ error: true }); response.send({ error: true });
} }
let jsonData = JSON.parse(data);
if (jsonData.spec !== undefined) {
console.log('importing from v2 json');
importRisuSprites(jsonData);
unsetFavFlag(jsonData);
jsonData = readFromV2(jsonData);
jsonData['create_date'] = humanizedISO8601DateTime();
png_name = getPngName(jsonData.data?.name || jsonData.name);
let char = JSON.stringify(jsonData);
charaWrite(defaultAvatarPath, char, png_name, response, { file_name: png_name });
} else if (jsonData.name !== undefined) {
console.log('importing from v1 json');
jsonData.name = sanitize(jsonData.name);
if (jsonData.creator_notes) {
jsonData.creator_notes = jsonData.creator_notes.replace('Creator\'s notes go here.', '');
}
png_name = getPngName(jsonData.name);
let char = {
'name': jsonData.name,
'description': jsonData.description ?? '',
'creatorcomment': jsonData.creatorcomment ?? jsonData.creator_notes ?? '',
'personality': jsonData.personality ?? '',
'first_mes': jsonData.first_mes ?? '',
'avatar': 'none',
'chat': jsonData.name + ' - ' + humanizedISO8601DateTime(),
'mes_example': jsonData.mes_example ?? '',
'scenario': jsonData.scenario ?? '',
'create_date': humanizedISO8601DateTime(),
'talkativeness': jsonData.talkativeness ?? 0.5,
'creator': jsonData.creator ?? '',
'tags': jsonData.tags ?? '',
};
char = convertToV2(char);
let charJSON = JSON.stringify(char);
charaWrite(defaultAvatarPath, charJSON, png_name, response, { file_name: png_name });
} else if (jsonData.char_name !== undefined) {//json Pygmalion notepad
console.log('importing from gradio json');
jsonData.char_name = sanitize(jsonData.char_name);
if (jsonData.creator_notes) {
jsonData.creator_notes = jsonData.creator_notes.replace('Creator\'s notes go here.', '');
}
png_name = getPngName(jsonData.char_name);
let char = {
'name': jsonData.char_name,
'description': jsonData.char_persona ?? '',
'creatorcomment': jsonData.creatorcomment ?? jsonData.creator_notes ?? '',
'personality': '',
'first_mes': jsonData.char_greeting ?? '',
'avatar': 'none',
'chat': jsonData.name + ' - ' + humanizedISO8601DateTime(),
'mes_example': jsonData.example_dialogue ?? '',
'scenario': jsonData.world_scenario ?? '',
'create_date': humanizedISO8601DateTime(),
'talkativeness': jsonData.talkativeness ?? 0.5,
'creator': jsonData.creator ?? '',
'tags': jsonData.tags ?? '',
};
char = convertToV2(char);
let charJSON = JSON.stringify(char);
charaWrite(defaultAvatarPath, charJSON, png_name, response, { file_name: png_name });
} else {
console.log('Incorrect character format .json');
response.send({ error: true });
}
});
} else {
try {
var img_data = await charaRead(uploadPath, format);
if (img_data === undefined) throw new Error('Failed to read character data');
let jsonData = JSON.parse(img_data);
jsonData.name = sanitize(jsonData.data?.name || jsonData.name);
png_name = getPngName(jsonData.name);
if (jsonData.spec !== undefined) {
console.log('Found a v2 character file.');
importRisuSprites(jsonData);
unsetFavFlag(jsonData);
jsonData = readFromV2(jsonData);
jsonData['create_date'] = humanizedISO8601DateTime();
const char = JSON.stringify(jsonData);
await charaWrite(uploadPath, char, png_name, response, { file_name: png_name });
fs.unlinkSync(uploadPath);
} else if (jsonData.name !== undefined) {
console.log('Found a v1 character file.');
if (jsonData.creator_notes) {
jsonData.creator_notes = jsonData.creator_notes.replace('Creator\'s notes go here.', '');
}
let char = {
'name': jsonData.name,
'description': jsonData.description ?? '',
'creatorcomment': jsonData.creatorcomment ?? jsonData.creator_notes ?? '',
'personality': jsonData.personality ?? '',
'first_mes': jsonData.first_mes ?? '',
'avatar': 'none',
'chat': jsonData.name + ' - ' + humanizedISO8601DateTime(),
'mes_example': jsonData.mes_example ?? '',
'scenario': jsonData.scenario ?? '',
'create_date': humanizedISO8601DateTime(),
'talkativeness': jsonData.talkativeness ?? 0.5,
'creator': jsonData.creator ?? '',
'tags': jsonData.tags ?? '',
};
char = convertToV2(char);
const charJSON = JSON.stringify(char);
await charaWrite(uploadPath, charJSON, png_name, response, { file_name: png_name });
fs.unlinkSync(uploadPath);
} else {
console.log('Unknown character card format');
response.send({ error: true });
}
} catch (err) {
console.log(err);
response.send({ error: true });
} }
} }
}); });

View File

@ -36,20 +36,12 @@ function sanitizeHordeImagePrompt(prompt) {
prompt = prompt.replace(/\b(boy)\b/gmi, 'man'); prompt = prompt.replace(/\b(boy)\b/gmi, 'man');
prompt = prompt.replace(/\b(girls)\b/gmi, 'women'); prompt = prompt.replace(/\b(girls)\b/gmi, 'women');
prompt = prompt.replace(/\b(boys)\b/gmi, 'men'); prompt = prompt.replace(/\b(boys)\b/gmi, 'men');
//always remove these high risk words from prompt, as they add little value to image gen while increasing the risk the prompt gets flagged //always remove these high risk words from prompt, as they add little value to image gen while increasing the risk the prompt gets flagged
prompt = prompt.replace(/\b(under.age|under.aged|underage|underaged|loli|pedo|pedophile|(\w+).year.old|(\w+).years.old|minor|prepubescent|minors|shota)\b/gmi, ''); prompt = prompt.replace(/\b(under.age|under.aged|underage|underaged|loli|pedo|pedophile|(\w+).year.old|(\w+).years.old|minor|prepubescent|minors|shota)\b/gmi, '');
//replace risky subject nouns with person
//if nsfw is detected, do not remove it but apply additional precautions prompt = prompt.replace(/\b(youngster|infant|baby|toddler|child|teen|kid|kiddie|kiddo|teenager|student|preteen|pre.teen)\b/gmi, 'person');
let isNsfw = prompt.match(/\b(cock|ahegao|hentai|uncensored|lewd|cocks|deepthroat|deepthroating|dick|dicks|cumshot|lesbian|fuck|fucked|fucking|sperm|naked|nipples|tits|boobs|breasts|boob|breast|topless|ass|butt|fingering|masturbate|masturbating|bitch|blowjob|pussy|piss|asshole|dildo|dildos|vibrator|erection|foreskin|handjob|nude|penis|porn|vibrator|virgin|vagina|vulva|threesome|orgy|bdsm|hickey|condom|testicles|anal|bareback|bukkake|creampie|stripper|strap-on|missionary|clitoris|clit|clitty|cowgirl|fleshlight|sex|buttplug|milf|oral|sucking|bondage|orgasm|scissoring|railed|slut|sluts|slutty|cumming|cunt|faggot|sissy|anal|anus|cum|semen|scat|nsfw|xxx|explicit|erotic|horny|aroused|jizz|moan|rape|raped|raping|throbbing|humping)\b/gmi); //remove risky adjectives and related words
prompt = prompt.replace(/\b(young|younger|youthful|youth|small|smaller|smallest|girly|boyish|lil|tiny|teenaged|lit[tl]le|school.aged|school|highschool|kindergarten|teens|children|kids)\b/gmi, '');
if (isNsfw) {
//replace risky subject nouns with person
prompt = prompt.replace(/\b(youngster|infant|baby|toddler|child|teen|kid|kiddie|kiddo|teenager|student|preteen|pre.teen)\b/gmi, 'person');
//remove risky adjectives and related words
prompt = prompt.replace(/\b(young|younger|youthful|youth|small|smaller|smallest|girly|boyish|lil|tiny|teenaged|lit[tl]le|school.aged|school|highschool|kindergarten|teens|children|kids)\b/gmi, '');
}
return prompt; return prompt;
} }

View File

@ -4,13 +4,15 @@ const express = require('express');
const FormData = require('form-data'); const FormData = require('form-data');
const fs = require('fs'); const fs = require('fs');
const { jsonParser, urlencodedParser } = require('../express-common'); const { jsonParser, urlencodedParser } = require('../express-common');
const { getConfigValue } = require('../util'); const { getConfigValue, mergeObjectWithYaml, excludeKeysByYaml } = require('../util');
const router = express.Router(); const router = express.Router();
router.post('/caption-image', jsonParser, async (request, response) => { router.post('/caption-image', jsonParser, async (request, response) => {
try { try {
let key = ''; let key = '';
let headers = {};
let bodyParams = {};
if (request.body.api === 'openai' && !request.body.reverse_proxy) { if (request.body.api === 'openai' && !request.body.reverse_proxy) {
key = readSecret(SECRET_KEYS.OPENAI); key = readSecret(SECRET_KEYS.OPENAI);
@ -24,7 +26,13 @@ router.post('/caption-image', jsonParser, async (request, response) => {
key = request.body.proxy_password; key = request.body.proxy_password;
} }
if (!key && !request.body.reverse_proxy) { if (request.body.api === 'custom') {
key = readSecret(SECRET_KEYS.CUSTOM);
mergeObjectWithYaml(bodyParams, request.body.custom_include_body);
mergeObjectWithYaml(headers, request.body.custom_include_headers);
}
if (!key && !request.body.reverse_proxy && request.body.api !== 'custom') {
console.log('No key found for API', request.body.api); console.log('No key found for API', request.body.api);
return response.sendStatus(400); return response.sendStatus(400);
} }
@ -41,6 +49,7 @@ router.post('/caption-image', jsonParser, async (request, response) => {
}, },
], ],
max_tokens: 500, max_tokens: 500,
...bodyParams,
}; };
const captionSystemPrompt = getConfigValue('openai.captionSystemPrompt'); const captionSystemPrompt = getConfigValue('openai.captionSystemPrompt');
@ -51,10 +60,13 @@ router.post('/caption-image', jsonParser, async (request, response) => {
}); });
} }
if (request.body.api === 'custom') {
excludeKeysByYaml(body, request.body.custom_exclude_body);
}
console.log('Multimodal captioning request', body); console.log('Multimodal captioning request', body);
let apiUrl = ''; let apiUrl = '';
let headers = {};
if (request.body.api === 'openrouter') { if (request.body.api === 'openrouter') {
apiUrl = 'https://openrouter.ai/api/v1/chat/completions'; apiUrl = 'https://openrouter.ai/api/v1/chat/completions';
@ -69,6 +81,10 @@ router.post('/caption-image', jsonParser, async (request, response) => {
apiUrl = `${request.body.reverse_proxy}/chat/completions`; apiUrl = `${request.body.reverse_proxy}/chat/completions`;
} }
if (request.body.api === 'custom') {
apiUrl = `${request.body.server_url}/chat/completions`;
}
const result = await fetch(apiUrl, { const result = await fetch(apiUrl, {
method: 'POST', method: 'POST',
headers: { headers: {

View File

@ -1,74 +1,67 @@
/** /**
* Convert a prompt from the ChatML objects to the format used by Claude. * Convert a prompt from the ChatML objects to the format used by Claude.
* @param {object[]} messages Array of messages * @param {object[]} messages Array of messages
* @param {boolean} addHumanPrefix Add Human prefix * @param {boolean} addAssistantPostfix Add Assistant postfix.
* @param {boolean} addAssistantPostfix Add Assistant postfix * @param {string} addAssistantPrefill Add Assistant prefill after the assistant postfix.
* @param {boolean} withSystemPrompt Build system prompt before "\n\nHuman: " * @param {boolean} withSysPromptSupport Indicates if the Claude model supports the system prompt format.
* @param {boolean} useSystemPrompt Indicates if the system prompt format should be used.
* @param {string} addSysHumanMsg Add Human message between system prompt and assistant.
* @returns {string} Prompt for Claude * @returns {string} Prompt for Claude
* @copyright Prompt Conversion script taken from RisuAI by kwaroran (GPLv3). * @copyright Prompt Conversion script taken from RisuAI by kwaroran (GPLv3).
*/ */
function convertClaudePrompt(messages, addHumanPrefix, addAssistantPostfix, withSystemPrompt) { function convertClaudePrompt(messages, addAssistantPostfix, addAssistantPrefill, withSysPromptSupport, useSystemPrompt, addSysHumanMsg) {
// Claude doesn't support message names, so we'll just add them to the message content.
for (const message of messages) { //Prepare messages for claude.
if (message.name && message.role !== 'system') { if (messages.length > 0) {
message.content = message.name + ': ' + message.content; messages[0].role = 'system';
delete message.name; //Add the assistant's message to the end of messages.
if (addAssistantPostfix) {
messages.push({
role: 'assistant',
content: addAssistantPrefill || '',
});
} }
} // Find the index of the first message with an assistant role and check for a "'user' role/Human:" before it.
let hasUser = false;
let systemPrompt = ''; const firstAssistantIndex = messages.findIndex((message, i) => {
if (withSystemPrompt) { if (i >= 0 && (message.role === 'user' || message.content.includes('\n\nHuman: '))) {
let lastSystemIdx = -1; hasUser = true;
}
for (let i = 0; i < messages.length - 1; i++) { return message.role === 'assistant' && i > 0;
const message = messages[i]; });
if (message.role === 'system' && !message.name) { // When 2.1+ and 'Use system prompt" checked, switches to the system prompt format by setting the first message's role to the 'system'.
systemPrompt += message.content + '\n\n'; // Inserts the human's message before the first the assistant one, if there are no such message or prefix found.
} else { if (withSysPromptSupport && useSystemPrompt) {
lastSystemIdx = i - 1; messages[0].role = 'system';
break; if (firstAssistantIndex > 0 && addSysHumanMsg && !hasUser) {
messages.splice(firstAssistantIndex, 0, {
role: 'user',
content: addSysHumanMsg,
});
}
} else {
// Otherwise, use the default message format by setting the first message's role to 'user'(compatible with all claude models including 2.1.)
messages[0].role = 'user';
// Fix messages order for default message format when(messages > Context Size) by merging two messages with "\n\nHuman: " prefixes into one, before the first Assistant's message.
if (firstAssistantIndex > 0) {
messages[firstAssistantIndex - 1].role = firstAssistantIndex - 1 !== 0 && messages[firstAssistantIndex - 1].role === 'user' ? 'FixHumMsg' : messages[firstAssistantIndex - 1].role;
} }
} }
if (lastSystemIdx >= 0) {
messages.splice(0, lastSystemIdx + 1);
}
} }
let requestPrompt = messages.map((v) => { // Convert messages to the prompt.
let prefix = ''; let requestPrompt = messages.map((v, i) => {
switch (v.role) { // Set prefix according to the role.
case 'assistant': let prefix = {
prefix = '\n\nAssistant: '; 'assistant': '\n\nAssistant: ',
break; 'user': '\n\nHuman: ',
case 'user': 'system': i === 0 ? '' : v.name === 'example_assistant' ? '\n\nA: ' : v.name === 'example_user' ? '\n\nH: ' : '\n\n',
prefix = '\n\nHuman: '; 'FixHumMsg': '\n\nFirst message: ',
break; }[v.role] ?? '';
case 'system': // Claude doesn't support message names, so we'll just add them to the message content.
// According to the Claude docs, H: and A: should be used for example conversations. return `${prefix}${v.name && v.role !== 'system' ? `${v.name}: ` : ''}${v.content}`;
if (v.name === 'example_assistant') {
prefix = '\n\nA: ';
} else if (v.name === 'example_user') {
prefix = '\n\nH: ';
} else {
prefix = '\n\n';
}
break;
}
return prefix + v.content;
}).join(''); }).join('');
if (addHumanPrefix) {
requestPrompt = '\n\nHuman: ' + requestPrompt;
}
if (addAssistantPostfix) {
requestPrompt = requestPrompt + '\n\nAssistant: ';
}
if (withSystemPrompt) {
requestPrompt = systemPrompt + requestPrompt;
}
return requestPrompt; return requestPrompt;
} }

View File

@ -27,6 +27,7 @@ const SECRET_KEYS = {
SERPAPI: 'api_key_serpapi', SERPAPI: 'api_key_serpapi',
TOGETHERAI: 'api_key_togetherai', TOGETHERAI: 'api_key_togetherai',
MISTRALAI: 'api_key_mistralai', MISTRALAI: 'api_key_mistralai',
CUSTOM: 'api_key_custom',
}; };
/** /**

View File

@ -399,6 +399,74 @@ function forwardFetchResponse(from, to) {
}); });
} }
/**
* Adds YAML-serialized object to the object.
* @param {object} obj Object
* @param {string} yamlString YAML-serialized object
* @returns
*/
function mergeObjectWithYaml(obj, yamlString) {
if (!yamlString) {
return;
}
try {
const parsedObject = yaml.parse(yamlString);
if (Array.isArray(parsedObject)) {
for (const item of parsedObject) {
if (typeof item === 'object' && item && !Array.isArray(item)) {
Object.assign(obj, item);
}
}
}
else if (parsedObject && typeof parsedObject === 'object') {
Object.assign(obj, parsedObject);
}
} catch {
// Do nothing
}
}
/**
* Removes keys from the object by YAML-serialized array.
* @param {object} obj Object
* @param {string} yamlString YAML-serialized array
* @returns {void} Nothing
*/
function excludeKeysByYaml(obj, yamlString) {
if (!yamlString) {
return;
}
try {
const parsedObject = yaml.parse(yamlString);
if (Array.isArray(parsedObject)) {
parsedObject.forEach(key => {
delete obj[key];
});
} else if (typeof parsedObject === 'object') {
Object.keys(parsedObject).forEach(key => {
delete obj[key];
});
} else if (typeof parsedObject === 'string') {
delete obj[parsedObject];
}
} catch {
// Do nothing
}
}
/**
* Removes trailing slash and /v1 from a string.
* @param {string} str Input string
* @returns {string} Trimmed string
*/
function trimV1(str) {
return String(str ?? '').replace(/\/$/, '').replace(/\/v1$/, '');
}
module.exports = { module.exports = {
getConfig, getConfig,
getConfigValue, getConfigValue,
@ -420,4 +488,7 @@ module.exports = {
getImages, getImages,
forwardFetchResponse, forwardFetchResponse,
getHexString, getHexString,
mergeObjectWithYaml,
excludeKeysByYaml,
trimV1,
}; };