mirror of
https://github.com/SillyTavern/SillyTavern.git
synced 2025-02-07 15:48:46 +01:00
Merge remote-tracking branch 'upstream/staging' into staging
This commit is contained in:
commit
d3024d3b9a
@ -2,7 +2,6 @@
|
|||||||
"firstRun": true,
|
"firstRun": true,
|
||||||
"username": "User",
|
"username": "User",
|
||||||
"api_server": "http://127.0.0.1:5000/api",
|
"api_server": "http://127.0.0.1:5000/api",
|
||||||
"api_server_textgenerationwebui": "http://127.0.0.1:5000/api",
|
|
||||||
"preset_settings": "RecoveredRuins",
|
"preset_settings": "RecoveredRuins",
|
||||||
"user_avatar": "user-default.png",
|
"user_avatar": "user-default.png",
|
||||||
"amount_gen": 250,
|
"amount_gen": 250,
|
||||||
|
56
public/img/ollama.svg
Normal file
56
public/img/ollama.svg
Normal file
@ -0,0 +1,56 @@
|
|||||||
|
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
|
||||||
|
<svg
|
||||||
|
version="1.0"
|
||||||
|
width="467.388pt"
|
||||||
|
height="618.89093pt"
|
||||||
|
viewBox="0 0 467.388 618.89093"
|
||||||
|
preserveAspectRatio="xMidYMid"
|
||||||
|
id="svg5"
|
||||||
|
sodipodi:docname="ollama.svg"
|
||||||
|
inkscape:version="1.3 (0e150ed, 2023-07-21)"
|
||||||
|
xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape"
|
||||||
|
xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd"
|
||||||
|
xmlns="http://www.w3.org/2000/svg"
|
||||||
|
xmlns:svg="http://www.w3.org/2000/svg">
|
||||||
|
<defs
|
||||||
|
id="defs5" />
|
||||||
|
<sodipodi:namedview
|
||||||
|
id="namedview5"
|
||||||
|
pagecolor="#ffffff"
|
||||||
|
bordercolor="#000000"
|
||||||
|
borderopacity="0.25"
|
||||||
|
inkscape:showpageshadow="2"
|
||||||
|
inkscape:pageopacity="0.0"
|
||||||
|
inkscape:pagecheckerboard="false"
|
||||||
|
inkscape:deskcolor="#d1d1d1"
|
||||||
|
inkscape:document-units="pt"
|
||||||
|
inkscape:zoom="0.20971564"
|
||||||
|
inkscape:cx="309.9435"
|
||||||
|
inkscape:cy="278.94915"
|
||||||
|
inkscape:window-width="1280"
|
||||||
|
inkscape:window-height="688"
|
||||||
|
inkscape:window-x="0"
|
||||||
|
inkscape:window-y="25"
|
||||||
|
inkscape:window-maximized="1"
|
||||||
|
inkscape:current-layer="svg5" />
|
||||||
|
<g
|
||||||
|
transform="matrix(0.1,0,0,-0.1,-188.01849,632.89095)"
|
||||||
|
stroke="none"
|
||||||
|
id="g5">
|
||||||
|
<path
|
||||||
|
d="m 2849,6312 c -219,-73 -378,-347 -444,-768 -34,-213 -29,-629 9,-774 l 13,-49 -105,-103 c -143,-140 -201,-210 -265,-320 -85,-145 -143,-312 -167,-477 -20,-135 -8,-404 23,-522 34,-129 78,-237 138,-337 l 50,-83 -50,-117 c -96,-227 -130,-376 -138,-618 -12,-345 48,-589 208,-854 l 21,-35 -35,-57 c -43,-72 -100,-243 -122,-368 -26,-149 -31,-393 -11,-523 10,-59 22,-121 28,-138 l 10,-29 177,2 176,3 -1,40 c 0,22 -11,76 -23,120 -42,149 -26,433 34,610 13,39 51,120 84,179 33,60 63,122 67,138 10,46 -4,109 -34,154 -15,22 -46,69 -69,103 -171,254 -206,664 -88,1017 27,80 77,185 130,274 63,105 56,178 -25,260 -138,138 -221,394 -207,634 21,357 227,680 532,833 130,66 183,77 375,78 96,0 183,4 193,9 10,5 36,45 58,90 121,242 304,391 594,484 72,23 96,26 235,26 148,0 160,-1 250,-32 281,-94 469,-249 577,-478 50,-105 54,-107 215,-99 153,8 244,-6 365,-57 143,-59 293,-181 389,-314 62,-87 130,-236 161,-351 22,-84 26,-119 26,-243 0,-124 -4,-159 -26,-242 -31,-118 -101,-257 -167,-332 -83,-95 -88,-166 -19,-277 128,-206 190,-431 191,-689 1,-277 -53,-446 -217,-684 -36,-52 -51,-114 -41,-164 4,-16 34,-78 67,-138 33,-59 71,-140 84,-178 60,-182 76,-461 34,-611 -12,-44 -23,-98 -23,-120 l -1,-40 176,-3 177,-2 11,31 c 46,134 52,474 11,683 -25,129 -78,281 -121,351 l -31,50 21,35 c 159,261 219,507 208,848 -8,252 -53,444 -155,663 l -40,86 31,49 c 59,94 119,235 150,352 29,112 31,126 31,317 1,224 -9,294 -70,472 -19,55 -34,106 -34,113 0,21 -109,198 -159,257 -26,32 -98,107 -159,167 -61,60 -109,113 -106,118 16,25 35,205 41,368 8,260 -15,478 -72,675 -88,303 -214,474 -393,534 -207,70 -405,-47 -542,-318 -75,-151 -139,-379 -156,-558 l -7,-72 -99,50 c -189,95 -399,149 -578,149 -173,0 -383,-52 -560,-138 -52,-26 -98,-48 -101,-50 -3,-1 -9,28 -13,65 -29,288 -146,595 -282,742 -121,130 -274,179 -415,133 z m 153,-374 c 119,-127 208,-471 208,-804 0,-85 -4,-112 -20,-144 -17,-34 -25,-40 -53,-40 -51,0 -267,-30 -326,-45 -30,-8 -56,-13 -58,-12 -1,2 -7,67 -14,145 -16,215 7,467 62,657 39,133 121,275 159,275 7,0 25,-14 42,-32 z m 2529,1 c 124,-133 208,-558 179,-909 -6,-74 -13,-136 -15,-138 -2,-2 -25,3 -52,11 -39,12 -122,24 -352,50 -7,1 -22,18 -33,37 -18,32 -19,50 -15,200 8,255 53,468 132,635 34,71 93,145 115,145 7,0 25,-14 41,-31 z"
|
||||||
|
id="path1" />
|
||||||
|
<path
|
||||||
|
d="m 4115,3729 c -390,-29 -735,-284 -824,-609 -26,-93 -28,-244 -5,-334 38,-149 171,-324 306,-404 85,-50 204,-99 288,-117 99,-22 453,-32 584,-16 350,41 626,253 700,538 20,78 21,240 1,318 -36,140 -144,303 -266,401 -218,174 -474,247 -784,223 z m 329,-258 c 291,-76 497,-291 500,-521 3,-227 -192,-414 -479,-460 -80,-13 -403,-13 -485,1 -212,34 -390,160 -452,319 -29,77 -29,194 1,272 79,206 278,353 544,404 97,18 269,11 371,-15 z"
|
||||||
|
id="path2" />
|
||||||
|
<path
|
||||||
|
d="m 4038,3151 c -58,-52 -40,-123 47,-177 43,-27 45,-31 40,-64 -19,-120 -19,-127 8,-154 22,-22 35,-26 85,-26 91,0 123,41 103,130 -17,74 -15,83 33,113 56,35 76,66 76,116 0,32 -6,44 -31,65 -39,33 -81,33 -136,1 l -43,-24 -42,24 c -58,33 -100,32 -140,-4 z"
|
||||||
|
id="path3" />
|
||||||
|
<path
|
||||||
|
d="m 2932,3664 c -107,-53 -169,-209 -128,-319 44,-115 194,-177 303,-124 89,43 153,148 153,250 0,171 -171,271 -328,193 z"
|
||||||
|
id="path4" />
|
||||||
|
<path
|
||||||
|
d="m 5320,3675 c -119,-54 -165,-193 -104,-320 27,-58 88,-118 141,-141 68,-29 162,-10 227,47 86,76 97,174 35,297 -45,89 -101,125 -198,129 -44,2 -78,-2 -101,-12 z"
|
||||||
|
id="path5" />
|
||||||
|
</g>
|
||||||
|
</svg>
|
After Width: | Height: | Size: 4.5 KiB |
@ -1754,6 +1754,7 @@
|
|||||||
<option value="tabby">TabbyAPI</option>
|
<option value="tabby">TabbyAPI</option>
|
||||||
<option value="koboldcpp">KoboldCpp</option>
|
<option value="koboldcpp">KoboldCpp</option>
|
||||||
<option value="llamacpp">llama.cpp</option>
|
<option value="llamacpp">llama.cpp</option>
|
||||||
|
<option value="ollama">Ollama</option>
|
||||||
<option value="togetherai">TogetherAI</option>
|
<option value="togetherai">TogetherAI</option>
|
||||||
</select>
|
</select>
|
||||||
</div>
|
</div>
|
||||||
@ -1794,7 +1795,11 @@
|
|||||||
</div>
|
</div>
|
||||||
<div class="flex1">
|
<div class="flex1">
|
||||||
<h4>Mancer Model</h4>
|
<h4>Mancer Model</h4>
|
||||||
<select id="mancer_model"></select>
|
<select id="mancer_model">
|
||||||
|
<option>
|
||||||
|
-- Connect to the API --
|
||||||
|
</option>
|
||||||
|
</select>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
<div data-tg-type="ooba" class="flex-container flexFlowColumn">
|
<div data-tg-type="ooba" class="flex-container flexFlowColumn">
|
||||||
@ -1845,6 +1850,32 @@
|
|||||||
<input id="llamacpp_api_url_text" class="text_pole wide100p" maxlength="500" value="" autocomplete="off" data-server-history="llamacpp">
|
<input id="llamacpp_api_url_text" class="text_pole wide100p" maxlength="500" value="" autocomplete="off" data-server-history="llamacpp">
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
<div data-tg-type="ollama">
|
||||||
|
<div class="flex-container flexFlowColumn">
|
||||||
|
<a href="https://github.com/jmorganca/ollama" target="_blank">
|
||||||
|
jmorganca/ollama
|
||||||
|
</a>
|
||||||
|
</div>
|
||||||
|
<div class="flex1">
|
||||||
|
<h4 data-i18n="API url">API URL</h4>
|
||||||
|
<small data-i18n="Example: http://127.0.0.1:11434">Example: http://127.0.0.1:11434</small>
|
||||||
|
<input id="ollama_api_url_text" class="text_pole wide100p" maxlength="500" value="" autocomplete="off" data-server-history="ollama">
|
||||||
|
</div>
|
||||||
|
<div class="flex1">
|
||||||
|
<h4>
|
||||||
|
<span data-i18n="Ollama Model">Ollama Model</h4>
|
||||||
|
</h4>
|
||||||
|
<select id="ollama_model">
|
||||||
|
<option>
|
||||||
|
-- Connect to the API --
|
||||||
|
</option>
|
||||||
|
</select>
|
||||||
|
<div id="ollama_download_model" class="menu_button menu_button_icon">
|
||||||
|
<i class="fa-solid fa-download"></i>
|
||||||
|
<span data-i18n="Download">Download</span>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
<div data-tg-type="tabby">
|
<div data-tg-type="tabby">
|
||||||
<div class="flex-container flexFlowColumn">
|
<div class="flex-container flexFlowColumn">
|
||||||
<a href="https://github.com/theroyallab/tabbyAPI" target="_blank">
|
<a href="https://github.com/theroyallab/tabbyAPI" target="_blank">
|
||||||
|
@ -8,7 +8,7 @@
|
|||||||
"system_sequence_prefix": "[INST] <<SYS>>\n",
|
"system_sequence_prefix": "[INST] <<SYS>>\n",
|
||||||
"system_sequence_suffix": "\n<</SYS>>\n",
|
"system_sequence_suffix": "\n<</SYS>>\n",
|
||||||
"stop_sequence": "",
|
"stop_sequence": "",
|
||||||
"separator_sequence": "\n",
|
"separator_sequence": " ",
|
||||||
"wrap": false,
|
"wrap": false,
|
||||||
"macro": true,
|
"macro": true,
|
||||||
"names": false,
|
"names": false,
|
||||||
|
231
public/script.js
231
public/script.js
@ -15,14 +15,13 @@ import {
|
|||||||
loadTextGenSettings,
|
loadTextGenSettings,
|
||||||
generateTextGenWithStreaming,
|
generateTextGenWithStreaming,
|
||||||
getTextGenGenerationData,
|
getTextGenGenerationData,
|
||||||
formatTextGenURL,
|
|
||||||
getTextGenUrlSourceId,
|
|
||||||
textgen_types,
|
textgen_types,
|
||||||
textgenerationwebui_banned_in_macros,
|
textgenerationwebui_banned_in_macros,
|
||||||
getTextGenServer,
|
getTextGenServer,
|
||||||
|
validateTextGenUrl,
|
||||||
} from './scripts/textgen-settings.js';
|
} from './scripts/textgen-settings.js';
|
||||||
|
|
||||||
const { MANCER, TOGETHERAI, OOBA, APHRODITE } = textgen_types;
|
const { MANCER, TOGETHERAI, OOBA, APHRODITE, OLLAMA } = textgen_types;
|
||||||
|
|
||||||
import {
|
import {
|
||||||
world_info,
|
world_info,
|
||||||
@ -189,7 +188,7 @@ import { createPersona, initPersonas, selectCurrentPersona, setPersonaDescriptio
|
|||||||
import { getBackgrounds, initBackgrounds, loadBackgroundSettings, background_settings } from './scripts/backgrounds.js';
|
import { getBackgrounds, initBackgrounds, loadBackgroundSettings, background_settings } from './scripts/backgrounds.js';
|
||||||
import { hideLoader, showLoader } from './scripts/loader.js';
|
import { hideLoader, showLoader } from './scripts/loader.js';
|
||||||
import { BulkEditOverlay, CharacterContextMenu } from './scripts/BulkEditOverlay.js';
|
import { BulkEditOverlay, CharacterContextMenu } from './scripts/BulkEditOverlay.js';
|
||||||
import { loadMancerModels, loadTogetherAIModels } from './scripts/textgen-models.js';
|
import { loadMancerModels, loadOllamaModels, loadTogetherAIModels } from './scripts/textgen-models.js';
|
||||||
import { appendFileContent, hasPendingFileAttachment, populateFileAttachment } from './scripts/chats.js';
|
import { appendFileContent, hasPendingFileAttachment, populateFileAttachment } from './scripts/chats.js';
|
||||||
import { replaceVariableMacros } from './scripts/variables.js';
|
import { replaceVariableMacros } from './scripts/variables.js';
|
||||||
import { initPresetManager } from './scripts/preset-manager.js';
|
import { initPresetManager } from './scripts/preset-manager.js';
|
||||||
@ -250,7 +249,6 @@ export {
|
|||||||
name1,
|
name1,
|
||||||
name2,
|
name2,
|
||||||
is_send_press,
|
is_send_press,
|
||||||
api_server_textgenerationwebui,
|
|
||||||
max_context,
|
max_context,
|
||||||
chat_metadata,
|
chat_metadata,
|
||||||
streamingProcessor,
|
streamingProcessor,
|
||||||
@ -662,7 +660,6 @@ let chat_file_for_del = '';
|
|||||||
let online_status = 'no_connection';
|
let online_status = 'no_connection';
|
||||||
|
|
||||||
let api_server = '';
|
let api_server = '';
|
||||||
let api_server_textgenerationwebui = '';
|
|
||||||
|
|
||||||
let is_send_press = false; //Send generation
|
let is_send_press = false; //Send generation
|
||||||
|
|
||||||
@ -888,7 +885,8 @@ async function getStatusKobold() {
|
|||||||
|
|
||||||
if (!endpoint) {
|
if (!endpoint) {
|
||||||
console.warn('No endpoint for status check');
|
console.warn('No endpoint for status check');
|
||||||
return;
|
online_status = 'no_connection';
|
||||||
|
return resultCheckStatus();
|
||||||
}
|
}
|
||||||
|
|
||||||
try {
|
try {
|
||||||
@ -931,11 +929,12 @@ async function getStatusKobold() {
|
|||||||
async function getStatusTextgen() {
|
async function getStatusTextgen() {
|
||||||
const url = '/api/backends/text-completions/status';
|
const url = '/api/backends/text-completions/status';
|
||||||
|
|
||||||
let endpoint = getTextGenServer();
|
const endpoint = getTextGenServer();
|
||||||
|
|
||||||
if (!endpoint) {
|
if (!endpoint) {
|
||||||
console.warn('No endpoint for status check');
|
console.warn('No endpoint for status check');
|
||||||
return;
|
online_status = 'no_connection';
|
||||||
|
return resultCheckStatus();
|
||||||
}
|
}
|
||||||
|
|
||||||
try {
|
try {
|
||||||
@ -953,11 +952,14 @@ async function getStatusTextgen() {
|
|||||||
const data = await response.json();
|
const data = await response.json();
|
||||||
|
|
||||||
if (textgen_settings.type === MANCER) {
|
if (textgen_settings.type === MANCER) {
|
||||||
online_status = textgen_settings.mancer_model;
|
|
||||||
loadMancerModels(data?.data);
|
loadMancerModels(data?.data);
|
||||||
|
online_status = textgen_settings.mancer_model;
|
||||||
} else if (textgen_settings.type === TOGETHERAI) {
|
} else if (textgen_settings.type === TOGETHERAI) {
|
||||||
online_status = textgen_settings.togetherai_model;
|
|
||||||
loadTogetherAIModels(data?.data);
|
loadTogetherAIModels(data?.data);
|
||||||
|
online_status = textgen_settings.togetherai_model;
|
||||||
|
} else if (textgen_settings.type === OLLAMA) {
|
||||||
|
loadOllamaModels(data?.data);
|
||||||
|
online_status = textgen_settings.ollama_model || 'Connected';
|
||||||
} else {
|
} else {
|
||||||
online_status = data?.result;
|
online_status = data?.result;
|
||||||
}
|
}
|
||||||
@ -4468,7 +4470,7 @@ function extractMessageFromData(data) {
|
|||||||
case 'koboldhorde':
|
case 'koboldhorde':
|
||||||
return data.text;
|
return data.text;
|
||||||
case 'textgenerationwebui':
|
case 'textgenerationwebui':
|
||||||
return data.choices?.[0]?.text ?? data.content;
|
return data.choices?.[0]?.text ?? data.content ?? data.response;
|
||||||
case 'novel':
|
case 'novel':
|
||||||
return data.output;
|
return data.output;
|
||||||
case 'openai':
|
case 'openai':
|
||||||
@ -5735,13 +5737,6 @@ async function getSettings() {
|
|||||||
|
|
||||||
setWorldInfoSettings(settings.world_info_settings ?? settings, data);
|
setWorldInfoSettings(settings.world_info_settings ?? settings, data);
|
||||||
|
|
||||||
api_server_textgenerationwebui = settings.api_server_textgenerationwebui;
|
|
||||||
$('#textgenerationwebui_api_url_text').val(api_server_textgenerationwebui);
|
|
||||||
$('#aphrodite_api_url_text').val(api_server_textgenerationwebui);
|
|
||||||
$('#tabby_api_url_text').val(api_server_textgenerationwebui);
|
|
||||||
$('#llamacpp_api_url_text').val(api_server_textgenerationwebui);
|
|
||||||
$('#koboldcpp_api_url_text').val(api_server_textgenerationwebui);
|
|
||||||
|
|
||||||
selected_button = settings.selected_button;
|
selected_button = settings.selected_button;
|
||||||
|
|
||||||
if (data.enable_extensions) {
|
if (data.enable_extensions) {
|
||||||
@ -5781,7 +5776,6 @@ async function saveSettings(type) {
|
|||||||
active_character: active_character,
|
active_character: active_character,
|
||||||
active_group: active_group,
|
active_group: active_group,
|
||||||
api_server: api_server,
|
api_server: api_server,
|
||||||
api_server_textgenerationwebui: api_server_textgenerationwebui,
|
|
||||||
preset_settings: preset_settings,
|
preset_settings: preset_settings,
|
||||||
user_avatar: user_avatar,
|
user_avatar: user_avatar,
|
||||||
amount_gen: amount_gen,
|
amount_gen: amount_gen,
|
||||||
@ -7478,100 +7472,105 @@ const swipe_right = () => {
|
|||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
const CONNECT_API_MAP = {
|
||||||
|
'kobold': {
|
||||||
|
button: '#api_button',
|
||||||
|
},
|
||||||
|
'horde': {
|
||||||
|
selected: 'koboldhorde',
|
||||||
|
},
|
||||||
|
'novel': {
|
||||||
|
button: '#api_button_novel',
|
||||||
|
},
|
||||||
|
'ooba': {
|
||||||
|
selected: 'textgenerationwebui',
|
||||||
|
button: '#api_button_textgenerationwebui',
|
||||||
|
type: textgen_types.OOBA,
|
||||||
|
},
|
||||||
|
'tabby': {
|
||||||
|
selected: 'textgenerationwebui',
|
||||||
|
button: '#api_button_textgenerationwebui',
|
||||||
|
type: textgen_types.TABBY,
|
||||||
|
},
|
||||||
|
'llamacpp': {
|
||||||
|
selected: 'textgenerationwebui',
|
||||||
|
button: '#api_button_textgenerationwebui',
|
||||||
|
type: textgen_types.LLAMACPP,
|
||||||
|
},
|
||||||
|
'ollama': {
|
||||||
|
selected: 'textgenerationwebui',
|
||||||
|
button: '#api_button_textgenerationwebui',
|
||||||
|
type: textgen_types.OLLAMA,
|
||||||
|
},
|
||||||
|
'mancer': {
|
||||||
|
selected: 'textgenerationwebui',
|
||||||
|
button: '#api_button_textgenerationwebui',
|
||||||
|
type: textgen_types.MANCER,
|
||||||
|
},
|
||||||
|
'aphrodite': {
|
||||||
|
selected: 'textgenerationwebui',
|
||||||
|
button: '#api_button_textgenerationwebui',
|
||||||
|
type: textgen_types.APHRODITE,
|
||||||
|
},
|
||||||
|
'kcpp': {
|
||||||
|
selected: 'textgenerationwebui',
|
||||||
|
button: '#api_button_textgenerationwebui',
|
||||||
|
type: textgen_types.KOBOLDCPP,
|
||||||
|
},
|
||||||
|
'togetherai': {
|
||||||
|
selected: 'textgenerationwebui',
|
||||||
|
button: '#api_button_textgenerationwebui',
|
||||||
|
type: textgen_types.TOGETHERAI,
|
||||||
|
},
|
||||||
|
'oai': {
|
||||||
|
selected: 'openai',
|
||||||
|
source: 'openai',
|
||||||
|
button: '#api_button_openai',
|
||||||
|
},
|
||||||
|
'claude': {
|
||||||
|
selected: 'openai',
|
||||||
|
source: 'claude',
|
||||||
|
button: '#api_button_openai',
|
||||||
|
},
|
||||||
|
'windowai': {
|
||||||
|
selected: 'openai',
|
||||||
|
source: 'windowai',
|
||||||
|
button: '#api_button_openai',
|
||||||
|
},
|
||||||
|
'openrouter': {
|
||||||
|
selected: 'openai',
|
||||||
|
source: 'openrouter',
|
||||||
|
button: '#api_button_openai',
|
||||||
|
},
|
||||||
|
'scale': {
|
||||||
|
selected: 'openai',
|
||||||
|
source: 'scale',
|
||||||
|
button: '#api_button_openai',
|
||||||
|
},
|
||||||
|
'ai21': {
|
||||||
|
selected: 'openai',
|
||||||
|
source: 'ai21',
|
||||||
|
button: '#api_button_openai',
|
||||||
|
},
|
||||||
|
'makersuite': {
|
||||||
|
selected: 'openai',
|
||||||
|
source: 'makersuite',
|
||||||
|
button: '#api_button_openai',
|
||||||
|
},
|
||||||
|
'mistralai': {
|
||||||
|
selected: 'openai',
|
||||||
|
source: 'mistralai',
|
||||||
|
button: '#api_button_openai',
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @param {string} text API name
|
* @param {string} text API name
|
||||||
*/
|
*/
|
||||||
async function connectAPISlash(_, text) {
|
async function connectAPISlash(_, text) {
|
||||||
if (!text) return;
|
if (!text) return;
|
||||||
|
|
||||||
const apiMap = {
|
const apiConfig = CONNECT_API_MAP[text.toLowerCase()];
|
||||||
'kobold': {
|
|
||||||
button: '#api_button',
|
|
||||||
},
|
|
||||||
'horde': {
|
|
||||||
selected: 'koboldhorde',
|
|
||||||
},
|
|
||||||
'novel': {
|
|
||||||
button: '#api_button_novel',
|
|
||||||
},
|
|
||||||
'ooba': {
|
|
||||||
selected: 'textgenerationwebui',
|
|
||||||
button: '#api_button_textgenerationwebui',
|
|
||||||
type: textgen_types.OOBA,
|
|
||||||
},
|
|
||||||
'tabby': {
|
|
||||||
selected: 'textgenerationwebui',
|
|
||||||
button: '#api_button_textgenerationwebui',
|
|
||||||
type: textgen_types.TABBY,
|
|
||||||
},
|
|
||||||
'llamacpp': {
|
|
||||||
selected: 'textgenerationwebui',
|
|
||||||
button: '#api_button_textgenerationwebui',
|
|
||||||
type: textgen_types.LLAMACPP,
|
|
||||||
},
|
|
||||||
'mancer': {
|
|
||||||
selected: 'textgenerationwebui',
|
|
||||||
button: '#api_button_textgenerationwebui',
|
|
||||||
type: textgen_types.MANCER,
|
|
||||||
},
|
|
||||||
'aphrodite': {
|
|
||||||
selected: 'textgenerationwebui',
|
|
||||||
button: '#api_button_textgenerationwebui',
|
|
||||||
type: textgen_types.APHRODITE,
|
|
||||||
},
|
|
||||||
'kcpp': {
|
|
||||||
selected: 'textgenerationwebui',
|
|
||||||
button: '#api_button_textgenerationwebui',
|
|
||||||
type: textgen_types.KOBOLDCPP,
|
|
||||||
},
|
|
||||||
'togetherai': {
|
|
||||||
selected: 'textgenerationwebui',
|
|
||||||
button: '#api_button_textgenerationwebui',
|
|
||||||
type: textgen_types.TOGETHERAI,
|
|
||||||
},
|
|
||||||
'oai': {
|
|
||||||
selected: 'openai',
|
|
||||||
source: 'openai',
|
|
||||||
button: '#api_button_openai',
|
|
||||||
},
|
|
||||||
'claude': {
|
|
||||||
selected: 'openai',
|
|
||||||
source: 'claude',
|
|
||||||
button: '#api_button_openai',
|
|
||||||
},
|
|
||||||
'windowai': {
|
|
||||||
selected: 'openai',
|
|
||||||
source: 'windowai',
|
|
||||||
button: '#api_button_openai',
|
|
||||||
},
|
|
||||||
'openrouter': {
|
|
||||||
selected: 'openai',
|
|
||||||
source: 'openrouter',
|
|
||||||
button: '#api_button_openai',
|
|
||||||
},
|
|
||||||
'scale': {
|
|
||||||
selected: 'openai',
|
|
||||||
source: 'scale',
|
|
||||||
button: '#api_button_openai',
|
|
||||||
},
|
|
||||||
'ai21': {
|
|
||||||
selected: 'openai',
|
|
||||||
source: 'ai21',
|
|
||||||
button: '#api_button_openai',
|
|
||||||
},
|
|
||||||
'makersuite': {
|
|
||||||
selected: 'openai',
|
|
||||||
source: 'makersuite',
|
|
||||||
button: '#api_button_openai',
|
|
||||||
},
|
|
||||||
'mistralai': {
|
|
||||||
selected: 'openai',
|
|
||||||
source: 'mistralai',
|
|
||||||
button: '#api_button_openai',
|
|
||||||
},
|
|
||||||
};
|
|
||||||
|
|
||||||
const apiConfig = apiMap[text.toLowerCase()];
|
|
||||||
if (!apiConfig) {
|
if (!apiConfig) {
|
||||||
toastr.error(`Error: ${text} is not a valid API`);
|
toastr.error(`Error: ${text} is not a valid API`);
|
||||||
return;
|
return;
|
||||||
@ -7855,7 +7854,7 @@ jQuery(async function () {
|
|||||||
}
|
}
|
||||||
|
|
||||||
registerSlashCommand('dupe', DupeChar, [], '– duplicates the currently selected character', true, true);
|
registerSlashCommand('dupe', DupeChar, [], '– duplicates the currently selected character', true, true);
|
||||||
registerSlashCommand('api', connectAPISlash, [], '<span class="monospace">(kobold, horde, novel, ooba, tabby, mancer, aphrodite, kcpp, oai, claude, windowai, openrouter, scale, ai21, makersuite, mistralai, togetherai, llamacpp)</span> – connect to an API', true, true);
|
registerSlashCommand('api', connectAPISlash, [], `<span class="monospace">(${Object.keys(CONNECT_API_MAP)})</span> – connect to an API`, true, true);
|
||||||
registerSlashCommand('impersonate', doImpersonate, ['imp'], '– calls an impersonation response', true, true);
|
registerSlashCommand('impersonate', doImpersonate, ['imp'], '– calls an impersonation response', true, true);
|
||||||
registerSlashCommand('delchat', doDeleteChat, [], '– deletes the current chat', true, true);
|
registerSlashCommand('delchat', doDeleteChat, [], '– deletes the current chat', true, true);
|
||||||
registerSlashCommand('closechat', doCloseChat, [], '– closes the current chat', true, true);
|
registerSlashCommand('closechat', doCloseChat, [], '– closes the current chat', true, true);
|
||||||
@ -8414,19 +8413,7 @@ jQuery(async function () {
|
|||||||
await writeSecret(SECRET_KEYS.TOGETHERAI, togetherKey);
|
await writeSecret(SECRET_KEYS.TOGETHERAI, togetherKey);
|
||||||
}
|
}
|
||||||
|
|
||||||
const urlSourceId = getTextGenUrlSourceId();
|
validateTextGenUrl();
|
||||||
|
|
||||||
if (urlSourceId && $(urlSourceId).val() !== '') {
|
|
||||||
let value = formatTextGenURL(String($(urlSourceId).val()).trim());
|
|
||||||
if (!value) {
|
|
||||||
callPopup('Please enter a valid URL.', 'text');
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
$(urlSourceId).val(value);
|
|
||||||
api_server_textgenerationwebui = value;
|
|
||||||
}
|
|
||||||
|
|
||||||
startStatusLoading();
|
startStatusLoading();
|
||||||
main_api = 'textgenerationwebui';
|
main_api = 'textgenerationwebui';
|
||||||
saveSettingsDebounced();
|
saveSettingsDebounced();
|
||||||
|
@ -4,7 +4,6 @@ import {
|
|||||||
online_status,
|
online_status,
|
||||||
main_api,
|
main_api,
|
||||||
api_server,
|
api_server,
|
||||||
api_server_textgenerationwebui,
|
|
||||||
is_send_press,
|
is_send_press,
|
||||||
max_context,
|
max_context,
|
||||||
saveSettingsDebounced,
|
saveSettingsDebounced,
|
||||||
@ -35,7 +34,7 @@ import {
|
|||||||
import { debounce, delay, getStringHash, isValidUrl } from './utils.js';
|
import { debounce, delay, getStringHash, isValidUrl } from './utils.js';
|
||||||
import { chat_completion_sources, oai_settings } from './openai.js';
|
import { chat_completion_sources, oai_settings } from './openai.js';
|
||||||
import { getTokenCount } from './tokenizers.js';
|
import { getTokenCount } from './tokenizers.js';
|
||||||
import { textgen_types, textgenerationwebui_settings as textgen_settings } from './textgen-settings.js';
|
import { textgen_types, textgenerationwebui_settings as textgen_settings, getTextGenServer } from './textgen-settings.js';
|
||||||
|
|
||||||
import Bowser from '../lib/bowser.min.js';
|
import Bowser from '../lib/bowser.min.js';
|
||||||
|
|
||||||
@ -387,7 +386,7 @@ function RA_autoconnect(PrevApi) {
|
|||||||
) {
|
) {
|
||||||
$('#api_button_textgenerationwebui').trigger('click');
|
$('#api_button_textgenerationwebui').trigger('click');
|
||||||
}
|
}
|
||||||
else if (api_server_textgenerationwebui && isValidUrl(api_server_textgenerationwebui)) {
|
else if (isValidUrl(getTextGenServer())) {
|
||||||
$('#api_button_textgenerationwebui').trigger('click');
|
$('#api_button_textgenerationwebui').trigger('click');
|
||||||
}
|
}
|
||||||
break;
|
break;
|
||||||
|
@ -845,7 +845,7 @@ async function unhideMessageCallback(_, arg) {
|
|||||||
/**
|
/**
|
||||||
* Copium for running group actions when the member is offscreen.
|
* Copium for running group actions when the member is offscreen.
|
||||||
* @param {number} chid - character ID
|
* @param {number} chid - character ID
|
||||||
* @param {string} action - one of 'enable', 'disable', 'up', 'down', 'peek', 'remove'
|
* @param {string} action - one of 'enable', 'disable', 'up', 'down', 'view', 'remove'
|
||||||
* @returns {void}
|
* @returns {void}
|
||||||
*/
|
*/
|
||||||
function performGroupMemberAction(chid, action) {
|
function performGroupMemberAction(chid, action) {
|
||||||
@ -868,7 +868,9 @@ function performGroupMemberAction(chid, action) {
|
|||||||
|
|
||||||
if (wasOffscreen) {
|
if (wasOffscreen) {
|
||||||
$(pageSizeSelector).val(paginationValue).trigger('change');
|
$(pageSizeSelector).val(paginationValue).trigger('change');
|
||||||
$(paginationSelector).pagination('go', pageValue);
|
if ($(paginationSelector).length) {
|
||||||
|
$(paginationSelector).pagination('go', pageValue);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -958,7 +960,7 @@ async function peekCallback(_, arg) {
|
|||||||
return '';
|
return '';
|
||||||
}
|
}
|
||||||
|
|
||||||
performGroupMemberAction(chid, 'peek');
|
performGroupMemberAction(chid, 'view');
|
||||||
return '';
|
return '';
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1,10 +1,30 @@
|
|||||||
import { setGenerationParamsFromPreset } from '../script.js';
|
import { callPopup, getRequestHeaders, setGenerationParamsFromPreset } from '../script.js';
|
||||||
import { isMobile } from './RossAscends-mods.js';
|
import { isMobile } from './RossAscends-mods.js';
|
||||||
import { textgenerationwebui_settings as textgen_settings } from './textgen-settings.js';
|
import { textgenerationwebui_settings as textgen_settings, textgen_types } from './textgen-settings.js';
|
||||||
|
|
||||||
let mancerModels = [];
|
let mancerModels = [];
|
||||||
let togetherModels = [];
|
let togetherModels = [];
|
||||||
|
|
||||||
|
export async function loadOllamaModels(data) {
|
||||||
|
if (!Array.isArray(data)) {
|
||||||
|
console.error('Invalid Ollama models data', data);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!data.find(x => x.id === textgen_settings.ollama_model)) {
|
||||||
|
textgen_settings.ollama_model = data[0]?.id || '';
|
||||||
|
}
|
||||||
|
|
||||||
|
$('#ollama_model').empty();
|
||||||
|
for (const model of data) {
|
||||||
|
const option = document.createElement('option');
|
||||||
|
option.value = model.id;
|
||||||
|
option.text = model.name;
|
||||||
|
option.selected = model.id === textgen_settings.ollama_model;
|
||||||
|
$('#ollama_model').append(option);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
export async function loadTogetherAIModels(data) {
|
export async function loadTogetherAIModels(data) {
|
||||||
if (!Array.isArray(data)) {
|
if (!Array.isArray(data)) {
|
||||||
console.error('Invalid Together AI models data', data);
|
console.error('Invalid Together AI models data', data);
|
||||||
@ -13,6 +33,10 @@ export async function loadTogetherAIModels(data) {
|
|||||||
|
|
||||||
togetherModels = data;
|
togetherModels = data;
|
||||||
|
|
||||||
|
if (!data.find(x => x.name === textgen_settings.togetherai_model)) {
|
||||||
|
textgen_settings.togetherai_model = data[0]?.name || '';
|
||||||
|
}
|
||||||
|
|
||||||
$('#model_togetherai_select').empty();
|
$('#model_togetherai_select').empty();
|
||||||
for (const model of data) {
|
for (const model of data) {
|
||||||
// Hey buddy, I think you've got the wrong door.
|
// Hey buddy, I think you've got the wrong door.
|
||||||
@ -36,6 +60,10 @@ export async function loadMancerModels(data) {
|
|||||||
|
|
||||||
mancerModels = data;
|
mancerModels = data;
|
||||||
|
|
||||||
|
if (!data.find(x => x.id === textgen_settings.mancer_model)) {
|
||||||
|
textgen_settings.mancer_model = data[0]?.id || '';
|
||||||
|
}
|
||||||
|
|
||||||
$('#mancer_model').empty();
|
$('#mancer_model').empty();
|
||||||
for (const model of data) {
|
for (const model of data) {
|
||||||
const option = document.createElement('option');
|
const option = document.createElement('option');
|
||||||
@ -55,7 +83,6 @@ function onMancerModelSelect() {
|
|||||||
setGenerationParamsFromPreset({ max_length: limits.context, genamt: limits.completion });
|
setGenerationParamsFromPreset({ max_length: limits.context, genamt: limits.completion });
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
function onTogetherModelSelect() {
|
function onTogetherModelSelect() {
|
||||||
const modelName = String($('#model_togetherai_select').val());
|
const modelName = String($('#model_togetherai_select').val());
|
||||||
textgen_settings.togetherai_model = modelName;
|
textgen_settings.togetherai_model = modelName;
|
||||||
@ -64,6 +91,12 @@ function onTogetherModelSelect() {
|
|||||||
setGenerationParamsFromPreset({ max_length: model.context_length });
|
setGenerationParamsFromPreset({ max_length: model.context_length });
|
||||||
}
|
}
|
||||||
|
|
||||||
|
function onOllamaModelSelect() {
|
||||||
|
const modelId = String($('#ollama_model').val());
|
||||||
|
textgen_settings.ollama_model = modelId;
|
||||||
|
$('#api_button_textgenerationwebui').trigger('click');
|
||||||
|
}
|
||||||
|
|
||||||
function getMancerModelTemplate(option) {
|
function getMancerModelTemplate(option) {
|
||||||
const model = mancerModels.find(x => x.id === option?.element?.value);
|
const model = mancerModels.find(x => x.id === option?.element?.value);
|
||||||
|
|
||||||
@ -97,9 +130,52 @@ function getTogetherModelTemplate(option) {
|
|||||||
`));
|
`));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
async function downloadOllamaModel() {
|
||||||
|
try {
|
||||||
|
const serverUrl = textgen_settings.server_urls[textgen_types.OLLAMA];
|
||||||
|
|
||||||
|
if (!serverUrl) {
|
||||||
|
toastr.info('Please connect to an Ollama server first.');
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const html = `Enter a model tag, for example <code>llama2:latest</code>.<br>
|
||||||
|
See <a target="_blank" href="https://ollama.ai/library">Library</a> for available models.`;
|
||||||
|
const name = await callPopup(html, 'input', '', { okButton: 'Download' });
|
||||||
|
|
||||||
|
if (!name) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
toastr.info('Download may take a while, please wait...', 'Working on it');
|
||||||
|
|
||||||
|
const response = await fetch('/api/backends/text-completions/ollama/download', {
|
||||||
|
method: 'POST',
|
||||||
|
headers: getRequestHeaders(),
|
||||||
|
body: JSON.stringify({
|
||||||
|
name: name,
|
||||||
|
api_server: serverUrl,
|
||||||
|
}),
|
||||||
|
});
|
||||||
|
|
||||||
|
if (!response.ok) {
|
||||||
|
throw new Error(response.statusText);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Force refresh the model list
|
||||||
|
toastr.success('Download complete. Please select the model from the dropdown.');
|
||||||
|
$('#api_button_textgenerationwebui').trigger('click');
|
||||||
|
} catch (err) {
|
||||||
|
console.error(err);
|
||||||
|
toastr.error('Failed to download Ollama model. Please try again.');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
jQuery(function () {
|
jQuery(function () {
|
||||||
$('#mancer_model').on('change', onMancerModelSelect);
|
$('#mancer_model').on('change', onMancerModelSelect);
|
||||||
$('#model_togetherai_select').on('change', onTogetherModelSelect);
|
$('#model_togetherai_select').on('change', onTogetherModelSelect);
|
||||||
|
$('#ollama_model').on('change', onOllamaModelSelect);
|
||||||
|
$('#ollama_download_model').on('click', downloadOllamaModel);
|
||||||
|
|
||||||
if (!isMobile()) {
|
if (!isMobile()) {
|
||||||
$('#mancer_model').select2({
|
$('#mancer_model').select2({
|
||||||
@ -116,5 +192,11 @@ jQuery(function () {
|
|||||||
width: '100%',
|
width: '100%',
|
||||||
templateResult: getTogetherModelTemplate,
|
templateResult: getTogetherModelTemplate,
|
||||||
});
|
});
|
||||||
|
$('#ollama_model').select2({
|
||||||
|
placeholder: 'Select a model',
|
||||||
|
searchInputPlaceholder: 'Search models...',
|
||||||
|
searchInputCssClass: 'text_pole',
|
||||||
|
width: '100%',
|
||||||
|
});
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
@ -1,5 +1,4 @@
|
|||||||
import {
|
import {
|
||||||
api_server_textgenerationwebui,
|
|
||||||
getRequestHeaders,
|
getRequestHeaders,
|
||||||
getStoppingStrings,
|
getStoppingStrings,
|
||||||
max_context,
|
max_context,
|
||||||
@ -34,9 +33,10 @@ export const textgen_types = {
|
|||||||
KOBOLDCPP: 'koboldcpp',
|
KOBOLDCPP: 'koboldcpp',
|
||||||
TOGETHERAI: 'togetherai',
|
TOGETHERAI: 'togetherai',
|
||||||
LLAMACPP: 'llamacpp',
|
LLAMACPP: 'llamacpp',
|
||||||
|
OLLAMA: 'ollama',
|
||||||
};
|
};
|
||||||
|
|
||||||
const { MANCER, APHRODITE, TOGETHERAI, OOBA } = textgen_types;
|
const { MANCER, APHRODITE, TOGETHERAI, OOBA, OLLAMA, LLAMACPP } = textgen_types;
|
||||||
const BIAS_KEY = '#textgenerationwebui_api-settings';
|
const BIAS_KEY = '#textgenerationwebui_api-settings';
|
||||||
|
|
||||||
// Maybe let it be configurable in the future?
|
// Maybe let it be configurable in the future?
|
||||||
@ -46,6 +46,15 @@ const MANCER_SERVER_DEFAULT = 'https://neuro.mancer.tech';
|
|||||||
let MANCER_SERVER = localStorage.getItem(MANCER_SERVER_KEY) ?? MANCER_SERVER_DEFAULT;
|
let MANCER_SERVER = localStorage.getItem(MANCER_SERVER_KEY) ?? MANCER_SERVER_DEFAULT;
|
||||||
let TOGETHERAI_SERVER = 'https://api.together.xyz';
|
let TOGETHERAI_SERVER = 'https://api.together.xyz';
|
||||||
|
|
||||||
|
const SERVER_INPUTS = {
|
||||||
|
[textgen_types.OOBA]: '#textgenerationwebui_api_url_text',
|
||||||
|
[textgen_types.APHRODITE]: '#aphrodite_api_url_text',
|
||||||
|
[textgen_types.TABBY]: '#tabby_api_url_text',
|
||||||
|
[textgen_types.KOBOLDCPP]: '#koboldcpp_api_url_text',
|
||||||
|
[textgen_types.LLAMACPP]: '#llamacpp_api_url_text',
|
||||||
|
[textgen_types.OLLAMA]: '#ollama_api_url_text',
|
||||||
|
};
|
||||||
|
|
||||||
const KOBOLDCPP_ORDER = [6, 0, 1, 3, 4, 2, 5];
|
const KOBOLDCPP_ORDER = [6, 0, 1, 3, 4, 2, 5];
|
||||||
const settings = {
|
const settings = {
|
||||||
temp: 0.7,
|
temp: 0.7,
|
||||||
@ -95,10 +104,12 @@ const settings = {
|
|||||||
type: textgen_types.OOBA,
|
type: textgen_types.OOBA,
|
||||||
mancer_model: 'mytholite',
|
mancer_model: 'mytholite',
|
||||||
togetherai_model: 'Gryphe/MythoMax-L2-13b',
|
togetherai_model: 'Gryphe/MythoMax-L2-13b',
|
||||||
|
ollama_model: '',
|
||||||
legacy_api: false,
|
legacy_api: false,
|
||||||
sampler_order: KOBOLDCPP_ORDER,
|
sampler_order: KOBOLDCPP_ORDER,
|
||||||
logit_bias: [],
|
logit_bias: [],
|
||||||
n: 1,
|
n: 1,
|
||||||
|
server_urls: {},
|
||||||
};
|
};
|
||||||
|
|
||||||
export let textgenerationwebui_banned_in_macros = [];
|
export let textgenerationwebui_banned_in_macros = [];
|
||||||
@ -154,6 +165,37 @@ const setting_names = [
|
|||||||
'logit_bias',
|
'logit_bias',
|
||||||
];
|
];
|
||||||
|
|
||||||
|
export function validateTextGenUrl() {
|
||||||
|
const selector = SERVER_INPUTS[settings.type];
|
||||||
|
|
||||||
|
if (!selector) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const control = $(selector);
|
||||||
|
const url = String(control.val()).trim();
|
||||||
|
const formattedUrl = formatTextGenURL(url);
|
||||||
|
|
||||||
|
if (!formattedUrl) {
|
||||||
|
toastr.error('Enter a valid API URL', 'Text Completion API');
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
control.val(formattedUrl);
|
||||||
|
}
|
||||||
|
|
||||||
|
export function getTextGenServer() {
|
||||||
|
if (settings.type === MANCER) {
|
||||||
|
return MANCER_SERVER;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (settings.type === TOGETHERAI) {
|
||||||
|
return TOGETHERAI_SERVER;
|
||||||
|
}
|
||||||
|
|
||||||
|
return settings.server_urls[settings.type] ?? '';
|
||||||
|
}
|
||||||
|
|
||||||
async function selectPreset(name) {
|
async function selectPreset(name) {
|
||||||
const preset = textgenerationwebui_presets[textgenerationwebui_preset_names.indexOf(name)];
|
const preset = textgenerationwebui_presets[textgenerationwebui_preset_names.indexOf(name)];
|
||||||
|
|
||||||
@ -291,6 +333,21 @@ function loadTextGenSettings(data, loadedSettings) {
|
|||||||
textgenerationwebui_preset_names = data.textgenerationwebui_preset_names ?? [];
|
textgenerationwebui_preset_names = data.textgenerationwebui_preset_names ?? [];
|
||||||
Object.assign(settings, loadedSettings.textgenerationwebui_settings ?? {});
|
Object.assign(settings, loadedSettings.textgenerationwebui_settings ?? {});
|
||||||
|
|
||||||
|
if (loadedSettings.api_server_textgenerationwebui) {
|
||||||
|
for (const type of Object.keys(SERVER_INPUTS)) {
|
||||||
|
settings.server_urls[type] = loadedSettings.api_server_textgenerationwebui;
|
||||||
|
}
|
||||||
|
delete loadedSettings.api_server_textgenerationwebui;
|
||||||
|
}
|
||||||
|
|
||||||
|
for (const [type, selector] of Object.entries(SERVER_INPUTS)) {
|
||||||
|
const control = $(selector);
|
||||||
|
control.val(settings.server_urls[type] ?? '').on('input', function () {
|
||||||
|
settings.server_urls[type] = String($(this).val());
|
||||||
|
saveSettingsDebounced();
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
if (loadedSettings.api_use_mancer_webui) {
|
if (loadedSettings.api_use_mancer_webui) {
|
||||||
settings.type = MANCER;
|
settings.type = MANCER;
|
||||||
}
|
}
|
||||||
@ -336,21 +393,6 @@ function loadTextGenSettings(data, loadedSettings) {
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
export function getTextGenUrlSourceId() {
|
|
||||||
switch (settings.type) {
|
|
||||||
case textgen_types.OOBA:
|
|
||||||
return '#textgenerationwebui_api_url_text';
|
|
||||||
case textgen_types.APHRODITE:
|
|
||||||
return '#aphrodite_api_url_text';
|
|
||||||
case textgen_types.TABBY:
|
|
||||||
return '#tabby_api_url_text';
|
|
||||||
case textgen_types.KOBOLDCPP:
|
|
||||||
return '#koboldcpp_api_url_text';
|
|
||||||
case textgen_types.LLAMACPP:
|
|
||||||
return '#llamacpp_api_url_text';
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Sorts the sampler items by the given order.
|
* Sorts the sampler items by the given order.
|
||||||
* @param {any[]} orderArray Sampler order array.
|
* @param {any[]} orderArray Sampler order array.
|
||||||
@ -423,7 +465,10 @@ jQuery(function () {
|
|||||||
BIAS_CACHE.delete(BIAS_KEY);
|
BIAS_CACHE.delete(BIAS_KEY);
|
||||||
|
|
||||||
$('#main_api').trigger('change');
|
$('#main_api').trigger('change');
|
||||||
$('#api_button_textgenerationwebui').trigger('click');
|
|
||||||
|
if (!SERVER_INPUTS[type] || settings.server_urls[type]) {
|
||||||
|
$('#api_button_textgenerationwebui').trigger('click');
|
||||||
|
}
|
||||||
|
|
||||||
saveSettingsDebounced();
|
saveSettingsDebounced();
|
||||||
});
|
});
|
||||||
@ -620,21 +665,18 @@ function getModel() {
|
|||||||
return online_status;
|
return online_status;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (settings.type === OLLAMA) {
|
||||||
|
if (!settings.ollama_model) {
|
||||||
|
toastr.error('No Ollama model selected.', 'Text Completion API');
|
||||||
|
throw new Error('No Ollama model selected');
|
||||||
|
}
|
||||||
|
|
||||||
|
return settings.ollama_model;
|
||||||
|
}
|
||||||
|
|
||||||
return undefined;
|
return undefined;
|
||||||
}
|
}
|
||||||
|
|
||||||
export function getTextGenServer() {
|
|
||||||
if (settings.type === MANCER) {
|
|
||||||
return MANCER_SERVER;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (settings.type === TOGETHERAI) {
|
|
||||||
return TOGETHERAI_SERVER;
|
|
||||||
}
|
|
||||||
|
|
||||||
return api_server_textgenerationwebui;
|
|
||||||
}
|
|
||||||
|
|
||||||
export function getTextGenGenerationData(finalPrompt, maxTokens, isImpersonate, isContinue, cfgValues, type) {
|
export function getTextGenGenerationData(finalPrompt, maxTokens, isImpersonate, isContinue, cfgValues, type) {
|
||||||
const canMultiSwipe = !isContinue && !isImpersonate && type !== 'quiet';
|
const canMultiSwipe = !isContinue && !isImpersonate && type !== 'quiet';
|
||||||
let params = {
|
let params = {
|
||||||
@ -687,6 +729,13 @@ export function getTextGenGenerationData(finalPrompt, maxTokens, isImpersonate,
|
|||||||
'guidance_scale': cfgValues?.guidanceScale?.value ?? settings.guidance_scale ?? 1,
|
'guidance_scale': cfgValues?.guidanceScale?.value ?? settings.guidance_scale ?? 1,
|
||||||
'negative_prompt': cfgValues?.negativePrompt ?? substituteParams(settings.negative_prompt) ?? '',
|
'negative_prompt': cfgValues?.negativePrompt ?? substituteParams(settings.negative_prompt) ?? '',
|
||||||
'grammar_string': settings.grammar_string,
|
'grammar_string': settings.grammar_string,
|
||||||
|
// llama.cpp aliases. In case someone wants to use LM Studio as Text Completion API
|
||||||
|
'repeat_penalty': settings.rep_pen,
|
||||||
|
'tfs_z': settings.tfs,
|
||||||
|
'repeat_last_n': settings.rep_pen_range,
|
||||||
|
'n_predict': settings.maxTokens,
|
||||||
|
'mirostat': settings.mirostat_mode,
|
||||||
|
'ignore_eos': settings.ban_eos_token,
|
||||||
};
|
};
|
||||||
const aphroditeParams = {
|
const aphroditeParams = {
|
||||||
'n': canMultiSwipe ? settings.n : 1,
|
'n': canMultiSwipe ? settings.n : 1,
|
||||||
@ -697,7 +746,7 @@ export function getTextGenGenerationData(finalPrompt, maxTokens, isImpersonate,
|
|||||||
//'logprobs': settings.log_probs_aphrodite,
|
//'logprobs': settings.log_probs_aphrodite,
|
||||||
//'prompt_logprobs': settings.prompt_log_probs_aphrodite,
|
//'prompt_logprobs': settings.prompt_log_probs_aphrodite,
|
||||||
};
|
};
|
||||||
if (settings.type === textgen_types.APHRODITE) {
|
if (settings.type === APHRODITE) {
|
||||||
params = Object.assign(params, aphroditeParams);
|
params = Object.assign(params, aphroditeParams);
|
||||||
} else {
|
} else {
|
||||||
params = Object.assign(params, nonAphroditeParams);
|
params = Object.assign(params, nonAphroditeParams);
|
||||||
@ -709,7 +758,7 @@ export function getTextGenGenerationData(finalPrompt, maxTokens, isImpersonate,
|
|||||||
params.logit_bias = logitBias;
|
params.logit_bias = logitBias;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (settings.type === textgen_types.LLAMACPP) {
|
if (settings.type === LLAMACPP || settings.type === OLLAMA) {
|
||||||
// Convert bias and token bans to array of arrays
|
// Convert bias and token bans to array of arrays
|
||||||
const logitBiasArray = (params.logit_bias && typeof params.logit_bias === 'object' && Object.keys(params.logit_bias).length > 0)
|
const logitBiasArray = (params.logit_bias && typeof params.logit_bias === 'object' && Object.keys(params.logit_bias).length > 0)
|
||||||
? Object.entries(params.logit_bias).map(([key, value]) => [Number(key), value])
|
? Object.entries(params.logit_bias).map(([key, value]) => [Number(key), value])
|
||||||
@ -717,14 +766,9 @@ export function getTextGenGenerationData(finalPrompt, maxTokens, isImpersonate,
|
|||||||
const tokenBans = toIntArray(getCustomTokenBans());
|
const tokenBans = toIntArray(getCustomTokenBans());
|
||||||
logitBiasArray.push(...tokenBans.map(x => [Number(x), false]));
|
logitBiasArray.push(...tokenBans.map(x => [Number(x), false]));
|
||||||
const llamaCppParams = {
|
const llamaCppParams = {
|
||||||
'repeat_penalty': settings.rep_pen,
|
|
||||||
'tfs_z': settings.tfs,
|
|
||||||
'repeat_last_n': settings.rep_pen_range,
|
|
||||||
'n_predict': settings.maxTokens,
|
|
||||||
'mirostat': settings.mirostat_mode,
|
|
||||||
'ignore_eos': settings.ban_eos_token,
|
|
||||||
'grammar': settings.grammar_string,
|
|
||||||
'logit_bias': logitBiasArray,
|
'logit_bias': logitBiasArray,
|
||||||
|
// Conflicts with ooba's grammar_string
|
||||||
|
'grammar': settings.grammar_string,
|
||||||
};
|
};
|
||||||
params = Object.assign(params, llamaCppParams);
|
params = Object.assign(params, llamaCppParams);
|
||||||
}
|
}
|
||||||
|
@ -1,10 +1,10 @@
|
|||||||
import { characters, main_api, api_server, api_server_textgenerationwebui, nai_settings, online_status, this_chid } from '../script.js';
|
import { characters, main_api, api_server, nai_settings, online_status, this_chid } from '../script.js';
|
||||||
import { power_user, registerDebugFunction } from './power-user.js';
|
import { power_user, registerDebugFunction } from './power-user.js';
|
||||||
import { chat_completion_sources, model_list, oai_settings } from './openai.js';
|
import { chat_completion_sources, model_list, oai_settings } from './openai.js';
|
||||||
import { groups, selected_group } from './group-chats.js';
|
import { groups, selected_group } from './group-chats.js';
|
||||||
import { getStringHash } from './utils.js';
|
import { getStringHash } from './utils.js';
|
||||||
import { kai_flags } from './kai-settings.js';
|
import { kai_flags } from './kai-settings.js';
|
||||||
import { textgen_types, textgenerationwebui_settings as textgen_settings } from './textgen-settings.js';
|
import { textgen_types, textgenerationwebui_settings as textgen_settings, getTextGenServer } from './textgen-settings.js';
|
||||||
|
|
||||||
const { OOBA, TABBY, KOBOLDCPP, APHRODITE, LLAMACPP } = textgen_types;
|
const { OOBA, TABBY, KOBOLDCPP, APHRODITE, LLAMACPP } = textgen_types;
|
||||||
|
|
||||||
@ -537,7 +537,7 @@ function getTextgenAPITokenizationParams(str) {
|
|||||||
return {
|
return {
|
||||||
text: str,
|
text: str,
|
||||||
api_type: textgen_settings.type,
|
api_type: textgen_settings.type,
|
||||||
url: api_server_textgenerationwebui,
|
url: getTextGenServer(),
|
||||||
legacy_api: textgen_settings.legacy_api && (textgen_settings.type === OOBA || textgen_settings.type === APHRODITE),
|
legacy_api: textgen_settings.legacy_api && (textgen_settings.type === OOBA || textgen_settings.type === APHRODITE),
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
@ -173,6 +173,7 @@ const TEXTGEN_TYPES = {
|
|||||||
KOBOLDCPP: 'koboldcpp',
|
KOBOLDCPP: 'koboldcpp',
|
||||||
TOGETHERAI: 'togetherai',
|
TOGETHERAI: 'togetherai',
|
||||||
LLAMACPP: 'llamacpp',
|
LLAMACPP: 'llamacpp',
|
||||||
|
OLLAMA: 'ollama',
|
||||||
};
|
};
|
||||||
|
|
||||||
// https://docs.together.ai/reference/completions
|
// https://docs.together.ai/reference/completions
|
||||||
@ -187,6 +188,25 @@ const TOGETHERAI_KEYS = [
|
|||||||
'stream',
|
'stream',
|
||||||
];
|
];
|
||||||
|
|
||||||
|
// https://github.com/jmorganca/ollama/blob/main/docs/api.md#request-with-options
|
||||||
|
const OLLAMA_KEYS = [
|
||||||
|
'num_predict',
|
||||||
|
'stop',
|
||||||
|
'temperature',
|
||||||
|
'repeat_penalty',
|
||||||
|
'presence_penalty',
|
||||||
|
'frequency_penalty',
|
||||||
|
'top_k',
|
||||||
|
'top_p',
|
||||||
|
'tfs_z',
|
||||||
|
'typical_p',
|
||||||
|
'seed',
|
||||||
|
'repeat_last_n',
|
||||||
|
'mirostat',
|
||||||
|
'mirostat_tau',
|
||||||
|
'mirostat_eta',
|
||||||
|
];
|
||||||
|
|
||||||
const AVATAR_WIDTH = 400;
|
const AVATAR_WIDTH = 400;
|
||||||
const AVATAR_HEIGHT = 600;
|
const AVATAR_HEIGHT = 600;
|
||||||
|
|
||||||
@ -201,4 +221,5 @@ module.exports = {
|
|||||||
AVATAR_WIDTH,
|
AVATAR_WIDTH,
|
||||||
AVATAR_HEIGHT,
|
AVATAR_HEIGHT,
|
||||||
TOGETHERAI_KEYS,
|
TOGETHERAI_KEYS,
|
||||||
|
OLLAMA_KEYS,
|
||||||
};
|
};
|
||||||
|
@ -1,14 +1,62 @@
|
|||||||
const express = require('express');
|
const express = require('express');
|
||||||
const fetch = require('node-fetch').default;
|
const fetch = require('node-fetch').default;
|
||||||
const _ = require('lodash');
|
const _ = require('lodash');
|
||||||
|
const Readable = require('stream').Readable;
|
||||||
|
|
||||||
const { jsonParser } = require('../../express-common');
|
const { jsonParser } = require('../../express-common');
|
||||||
const { TEXTGEN_TYPES, TOGETHERAI_KEYS } = require('../../constants');
|
const { TEXTGEN_TYPES, TOGETHERAI_KEYS, OLLAMA_KEYS } = require('../../constants');
|
||||||
const { forwardFetchResponse } = require('../../util');
|
const { forwardFetchResponse } = require('../../util');
|
||||||
const { setAdditionalHeaders } = require('../../additional-headers');
|
const { setAdditionalHeaders } = require('../../additional-headers');
|
||||||
|
|
||||||
const router = express.Router();
|
const router = express.Router();
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Special boy's steaming routine. Wrap this abomination into proper SSE stream.
|
||||||
|
* @param {import('node-fetch').Response} jsonStream JSON stream
|
||||||
|
* @param {import('express').Request} request Express request
|
||||||
|
* @param {import('express').Response} response Express response
|
||||||
|
* @returns {Promise<any>} Nothing valuable
|
||||||
|
*/
|
||||||
|
async function parseOllamaStream(jsonStream, request, response) {
|
||||||
|
try {
|
||||||
|
let partialData = '';
|
||||||
|
jsonStream.body.on('data', (data) => {
|
||||||
|
const chunk = data.toString();
|
||||||
|
partialData += chunk;
|
||||||
|
while (true) {
|
||||||
|
let json;
|
||||||
|
try {
|
||||||
|
json = JSON.parse(partialData);
|
||||||
|
} catch (e) {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
const text = json.response || '';
|
||||||
|
const chunk = { choices: [{ text }] };
|
||||||
|
response.write(`data: ${JSON.stringify(chunk)}\n\n`);
|
||||||
|
partialData = '';
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
request.socket.on('close', function () {
|
||||||
|
if (jsonStream.body instanceof Readable) jsonStream.body.destroy();
|
||||||
|
response.end();
|
||||||
|
});
|
||||||
|
|
||||||
|
jsonStream.body.on('end', () => {
|
||||||
|
console.log('Streaming request finished');
|
||||||
|
response.write('data: [DONE]\n\n');
|
||||||
|
response.end();
|
||||||
|
});
|
||||||
|
} catch (error) {
|
||||||
|
console.log('Error forwarding streaming response:', error);
|
||||||
|
if (!response.headersSent) {
|
||||||
|
return response.status(500).send({ error: true });
|
||||||
|
} else {
|
||||||
|
return response.end();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
//************** Ooba/OpenAI text completions API
|
//************** Ooba/OpenAI text completions API
|
||||||
router.post('/status', jsonParser, async function (request, response) {
|
router.post('/status', jsonParser, async function (request, response) {
|
||||||
if (!request.body) return response.sendStatus(400);
|
if (!request.body) return response.sendStatus(400);
|
||||||
@ -51,6 +99,9 @@ router.post('/status', jsonParser, async function (request, response) {
|
|||||||
case TEXTGEN_TYPES.TOGETHERAI:
|
case TEXTGEN_TYPES.TOGETHERAI:
|
||||||
url += '/api/models?&info';
|
url += '/api/models?&info';
|
||||||
break;
|
break;
|
||||||
|
case TEXTGEN_TYPES.OLLAMA:
|
||||||
|
url += '/api/tags';
|
||||||
|
break;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -73,6 +124,10 @@ router.post('/status', jsonParser, async function (request, response) {
|
|||||||
data = { data: data.map(x => ({ id: x.name, ...x })) };
|
data = { data: data.map(x => ({ id: x.name, ...x })) };
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (request.body.api_type === TEXTGEN_TYPES.OLLAMA && Array.isArray(data.models)) {
|
||||||
|
data = { data: data.models.map(x => ({ id: x.name, ...x })) };
|
||||||
|
}
|
||||||
|
|
||||||
if (!Array.isArray(data.data)) {
|
if (!Array.isArray(data.data)) {
|
||||||
console.log('Models response is not an array.');
|
console.log('Models response is not an array.');
|
||||||
return response.status(400);
|
return response.status(400);
|
||||||
@ -127,8 +182,8 @@ router.post('/status', jsonParser, async function (request, response) {
|
|||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
router.post('/generate', jsonParser, async function (request, response_generate) {
|
router.post('/generate', jsonParser, async function (request, response) {
|
||||||
if (!request.body) return response_generate.sendStatus(400);
|
if (!request.body) return response.sendStatus(400);
|
||||||
|
|
||||||
try {
|
try {
|
||||||
if (request.body.api_server.indexOf('localhost') !== -1) {
|
if (request.body.api_server.indexOf('localhost') !== -1) {
|
||||||
@ -164,6 +219,9 @@ router.post('/generate', jsonParser, async function (request, response_generate)
|
|||||||
case TEXTGEN_TYPES.LLAMACPP:
|
case TEXTGEN_TYPES.LLAMACPP:
|
||||||
url += '/completion';
|
url += '/completion';
|
||||||
break;
|
break;
|
||||||
|
case TEXTGEN_TYPES.OLLAMA:
|
||||||
|
url += '/api/generate';
|
||||||
|
break;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -186,10 +244,23 @@ router.post('/generate', jsonParser, async function (request, response_generate)
|
|||||||
args.body = JSON.stringify(request.body);
|
args.body = JSON.stringify(request.body);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (request.body.stream) {
|
if (request.body.api_type === TEXTGEN_TYPES.OLLAMA) {
|
||||||
|
args.body = JSON.stringify({
|
||||||
|
model: request.body.model,
|
||||||
|
prompt: request.body.prompt,
|
||||||
|
stream: request.body.stream ?? false,
|
||||||
|
raw: true,
|
||||||
|
options: _.pickBy(request.body, (_, key) => OLLAMA_KEYS.includes(key)),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
if (request.body.api_type === TEXTGEN_TYPES.OLLAMA && request.body.stream) {
|
||||||
|
const stream = await fetch(url, args);
|
||||||
|
parseOllamaStream(stream, request, response);
|
||||||
|
} else if (request.body.stream) {
|
||||||
const completionsStream = await fetch(url, args);
|
const completionsStream = await fetch(url, args);
|
||||||
// Pipe remote SSE stream to Express response
|
// Pipe remote SSE stream to Express response
|
||||||
forwardFetchResponse(completionsStream, response_generate);
|
forwardFetchResponse(completionsStream, response);
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
const completionsReply = await fetch(url, args);
|
const completionsReply = await fetch(url, args);
|
||||||
@ -204,28 +275,60 @@ router.post('/generate', jsonParser, async function (request, response_generate)
|
|||||||
data['choices'] = [{ text }];
|
data['choices'] = [{ text }];
|
||||||
}
|
}
|
||||||
|
|
||||||
return response_generate.send(data);
|
return response.send(data);
|
||||||
} else {
|
} else {
|
||||||
const text = await completionsReply.text();
|
const text = await completionsReply.text();
|
||||||
const errorBody = { error: true, status: completionsReply.status, response: text };
|
const errorBody = { error: true, status: completionsReply.status, response: text };
|
||||||
|
|
||||||
if (!response_generate.headersSent) {
|
if (!response.headersSent) {
|
||||||
return response_generate.send(errorBody);
|
return response.send(errorBody);
|
||||||
}
|
}
|
||||||
|
|
||||||
return response_generate.end();
|
return response.end();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
let value = { error: true, status: error?.status, response: error?.statusText };
|
let value = { error: true, status: error?.status, response: error?.statusText };
|
||||||
console.log('Endpoint error:', error);
|
console.log('Endpoint error:', error);
|
||||||
|
|
||||||
if (!response_generate.headersSent) {
|
if (!response.headersSent) {
|
||||||
return response_generate.send(value);
|
return response.send(value);
|
||||||
}
|
}
|
||||||
|
|
||||||
return response_generate.end();
|
return response.end();
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
|
const ollama = express.Router();
|
||||||
|
|
||||||
|
ollama.post('/download', jsonParser, async function (request, response) {
|
||||||
|
try {
|
||||||
|
if (!request.body.name || !request.body.api_server) return response.sendStatus(400);
|
||||||
|
|
||||||
|
const name = request.body.name;
|
||||||
|
const url = String(request.body.api_server).replace(/\/$/, '');
|
||||||
|
|
||||||
|
const fetchResponse = await fetch(`${url}/api/pull`, {
|
||||||
|
method: 'POST',
|
||||||
|
body: JSON.stringify({
|
||||||
|
name: name,
|
||||||
|
stream: false,
|
||||||
|
}),
|
||||||
|
headers: { 'Content-Type': 'application/json' },
|
||||||
|
});
|
||||||
|
|
||||||
|
if (!fetchResponse.ok) {
|
||||||
|
console.log('Download error:', fetchResponse.status, fetchResponse.statusText);
|
||||||
|
return response.status(fetchResponse.status).send({ error: true });
|
||||||
|
}
|
||||||
|
|
||||||
|
return response.send({ ok: true });
|
||||||
|
} catch (error) {
|
||||||
|
console.error(error);
|
||||||
|
return response.status(500);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
router.use('/ollama', ollama);
|
||||||
|
|
||||||
module.exports = { router };
|
module.exports = { router };
|
||||||
|
Loading…
x
Reference in New Issue
Block a user