#1328 New API schema for ooba / mancer / aphrodite

This commit is contained in:
Cohee 2023-11-08 00:17:13 +02:00
parent 2d2ff5230c
commit 2c7b954a8d
7 changed files with 293 additions and 341 deletions

View File

@ -49,7 +49,6 @@
"ban_eos_token": false, "ban_eos_token": false,
"skip_special_tokens": true, "skip_special_tokens": true,
"streaming": false, "streaming": false,
"streaming_url": "ws://127.0.0.1:5005/api/v1/stream",
"mirostat_mode": 0, "mirostat_mode": 0,
"mirostat_tau": 5, "mirostat_tau": 5,
"mirostat_eta": 0.1, "mirostat_eta": 0.1,

View File

@ -1793,9 +1793,6 @@
<div class="flex1"> <div class="flex1">
<h4>Mancer Model</h4> <h4>Mancer Model</h4>
<select id="mancer_model"></select> <select id="mancer_model"></select>
<h4 data-i18n="Mancer API url">Mancer API URL</h4>
<small data-i18n="Example: https://neuro.mancer.tech/webui/MODEL/api">Example: https://neuro.mancer.tech/webui/MODEL/api</small>
<input id="mancer_api_url_text" name="mancer_api_url" class="text_pole wide100p" maxlength="500" value="" autocomplete="off">
</div> </div>
</div> </div>
<div data-tg-type="ooba" class="flex-container flexFlowColumn"> <div data-tg-type="ooba" class="flex-container flexFlowColumn">
@ -1808,15 +1805,10 @@
</span> </span>
</div> </div>
<div class="flex1"> <div class="flex1">
<h4 data-i18n="Blocking API url">Blocking API URL</h4> <h4 data-i18n="Server url">Server URL</h4>
<small data-i18n="Example: http://127.0.0.1:5000/api ">Example: http://127.0.0.1:5000/api </small> <small data-i18n="Example: http://127.0.0.1:5000 ">Example: http://127.0.0.1:5000</small>
<input id="textgenerationwebui_api_url_text" name="textgenerationwebui_api_url" class="text_pole wide100p" maxlength="500" value="" autocomplete="off" data-server-history="ooba_blocking"> <input id="textgenerationwebui_api_url_text" name="textgenerationwebui_api_url" class="text_pole wide100p" maxlength="500" value="" autocomplete="off" data-server-history="ooba_blocking">
</div> </div>
<div class="flex1">
<h4 data-i18n="Streaming API url">Streaming API URL</h4>
<small data-i18n="Example: ws://127.0.0.1:5005/api/v1/stream">Example: ws://127.0.0.1:5005/api/v1/stream </small>
<input id="streaming_url_textgenerationwebui" type="text" class="text_pole wide100p" maxlength="500" value="" autocomplete="off" data-server-history="ooba_streaming">
</div>
</div> </div>
<div data-tg-type="aphrodite"> <div data-tg-type="aphrodite">
<div class="flex-container flexFlowColumn"> <div class="flex-container flexFlowColumn">
@ -1835,11 +1827,11 @@
</div> </div>
<div class="flex1"> <div class="flex1">
<h4 data-i18n="API url">API URL</h4> <h4 data-i18n="API url">API URL</h4>
<small data-i18n="Example: http://127.0.0.1:5000/api ">Example: http://127.0.0.1:5000/api </small> <small data-i18n="Example: http://127.0.0.1:5000">Example: http://127.0.0.1:5000</small>
<input id="aphrodite_api_url_text" class="text_pole wide100p" maxlength="500" value="" autocomplete="off" data-server-history="aphrodite"> <input id="aphrodite_api_url_text" class="text_pole wide100p" maxlength="500" value="" autocomplete="off" data-server-history="aphrodite">
</div> </div>
</div> </div>
<div id="api_button_textgenerationwebui" class="menu_button" type="submit" data-i18n="Connect" data-server-connect="ooba_blocking,ooba_streaming,aphrodite">Connect</div> <div id="api_button_textgenerationwebui" class="menu_button" type="submit" data-i18n="Connect" data-server-connect="ooba_blocking,aphrodite">Connect</div>
<div id="api_loading_textgenerationwebui" class="api-load-icon fa-solid fa-hourglass fa-spin"></div> <div id="api_loading_textgenerationwebui" class="api-load-icon fa-solid fa-hourglass fa-spin"></div>
</form> </form>
<div class="online_status4"> <div class="online_status4">

View File

@ -21,6 +21,8 @@ import {
isAphrodite, isAphrodite,
textgen_types, textgen_types,
textgenerationwebui_banned_in_macros, textgenerationwebui_banned_in_macros,
isOoba,
MANCER_SERVER,
} from "./scripts/textgen-settings.js"; } from "./scripts/textgen-settings.js";
import { import {
@ -187,7 +189,7 @@ import { getFriendlyTokenizerName, getTokenCount, getTokenizerModel, initTokeniz
import { initPersonas, selectCurrentPersona, setPersonaDescription } from "./scripts/personas.js"; import { initPersonas, selectCurrentPersona, setPersonaDescription } from "./scripts/personas.js";
import { getBackgrounds, initBackgrounds } from "./scripts/backgrounds.js"; import { getBackgrounds, initBackgrounds } from "./scripts/backgrounds.js";
import { hideLoader, showLoader } from "./scripts/loader.js"; import { hideLoader, showLoader } from "./scripts/loader.js";
import {CharacterContextMenu, BulkEditOverlay} from "./scripts/BulkEditOverlay.js"; import { CharacterContextMenu, BulkEditOverlay } from "./scripts/BulkEditOverlay.js";
//exporting functions and vars for mods //exporting functions and vars for mods
export { export {
@ -884,14 +886,27 @@ async function getStatus() {
return; return;
} }
const url = main_api == "textgenerationwebui" ? '/api/textgenerationwebui/status' : '/getstatus';
let endpoint = api_server;
if (main_api == "textgenerationwebui") {
endpoint = api_server_textgenerationwebui;
}
if (main_api == "textgenerationwebui" && isMancer()) {
endpoint = MANCER_SERVER
}
jQuery.ajax({ jQuery.ajax({
type: "POST", // type: "POST", //
url: "/getstatus", // url: url, //
data: JSON.stringify({ data: JSON.stringify({
api_server: main_api == "kobold" ? api_server : api_server_textgenerationwebui,
main_api: main_api, main_api: main_api,
api_server: endpoint,
use_mancer: main_api == "textgenerationwebui" ? isMancer() : false, use_mancer: main_api == "textgenerationwebui" ? isMancer() : false,
use_aphrodite: main_api == "textgenerationwebui" ? isAphrodite() : false, use_aphrodite: main_api == "textgenerationwebui" ? isAphrodite() : false,
use_ooba: main_api == "textgenerationwebui" ? isOoba() : false,
}), }),
beforeSend: function () { }, beforeSend: function () { },
cache: false, cache: false,
@ -900,8 +915,13 @@ async function getStatus() {
contentType: "application/json", contentType: "application/json",
//processData: false, //processData: false,
success: function (data) { success: function (data) {
online_status = data.result; if (main_api == "textgenerationwebui" && isMancer()) {
if (online_status == undefined) { online_status = textgenerationwebui_settings.mancer_model;
} else {
online_status = data.result;
}
if (!online_status) {
online_status = "no_connection"; online_status = "no_connection";
} }
@ -914,11 +934,10 @@ async function getStatus() {
} }
// We didn't get a 200 status code, but the endpoint has an explanation. Which means it DID connect, but I digress. // We didn't get a 200 status code, but the endpoint has an explanation. Which means it DID connect, but I digress.
if (online_status == "no_connection" && data.response) { if (online_status === "no_connection" && data.response) {
toastr.error(data.response, "API Error", { timeOut: 5000, preventDuplicates: true }) toastr.error(data.response, "API Error", { timeOut: 5000, preventDuplicates: true })
} }
//console.log(online_status);
resultCheckStatus(); resultCheckStatus();
}, },
error: function (jqXHR, exception) { error: function (jqXHR, exception) {
@ -3510,11 +3529,8 @@ async function Generate(type, { automatic_trigger, force_name2, resolve, reject,
//console.log('runGenerate calling showSwipeBtns'); //console.log('runGenerate calling showSwipeBtns');
showSwipeButtons(); showSwipeButtons();
if (main_api == 'textgenerationwebui' && isMancer()) { if (data?.response) {
const errorText = `<h3>Inferencer endpoint is unhappy!</h3> toastr.error(data.response, 'API Error');
Returned status <tt>${data.status}</tt> with the reason:<br/>
${data.response}`;
callPopup(errorText, 'text');
} }
} }
console.debug('/savechat called by /Generate'); console.debug('/savechat called by /Generate');
@ -4025,7 +4041,7 @@ function getGenerateUrl(api) {
if (api == 'kobold') { if (api == 'kobold') {
generate_url = '/generate'; generate_url = '/generate';
} else if (api == 'textgenerationwebui') { } else if (api == 'textgenerationwebui') {
generate_url = '/generate_textgenerationwebui'; generate_url = '/api/textgenerationwebui/generate';
} else if (api == 'novel') { } else if (api == 'novel') {
generate_url = '/api/novelai/generate'; generate_url = '/api/novelai/generate';
} }
@ -4054,7 +4070,7 @@ function extractMessageFromData(data) {
case 'koboldhorde': case 'koboldhorde':
return data.text; return data.text;
case 'textgenerationwebui': case 'textgenerationwebui':
return data.results[0].text; return data.choices[0].text;
case 'novel': case 'novel':
return data.output; return data.output;
case 'openai': case 'openai':
@ -5265,7 +5281,6 @@ async function getSettings(type) {
api_server_textgenerationwebui = settings.api_server_textgenerationwebui; api_server_textgenerationwebui = settings.api_server_textgenerationwebui;
$("#textgenerationwebui_api_url_text").val(api_server_textgenerationwebui); $("#textgenerationwebui_api_url_text").val(api_server_textgenerationwebui);
$("#mancer_api_url_text").val(api_server_textgenerationwebui);
$("#aphrodite_api_url_text").val(api_server_textgenerationwebui); $("#aphrodite_api_url_text").val(api_server_textgenerationwebui);
selected_button = settings.selected_button; selected_button = settings.selected_button;
@ -7835,36 +7850,37 @@ jQuery(async function () {
}); });
$("#api_button_textgenerationwebui").on('click', async function (e) { $("#api_button_textgenerationwebui").on('click', async function (e) {
const mancerKey = String($("#api_key_mancer").val()).trim();
if (mancerKey.length) {
await writeSecret(SECRET_KEYS.MANCER, mancerKey);
}
const aphroditeKey = String($("#api_key_aphrodite").val()).trim();
if (aphroditeKey.length) {
await writeSecret(SECRET_KEYS.APHRODITE, aphroditeKey);
}
const urlSourceId = getTextGenUrlSourceId(); const urlSourceId = getTextGenUrlSourceId();
if ($(urlSourceId).val() != "") { if (urlSourceId && $(urlSourceId).val() !== "") {
let value = formatTextGenURL(String($(urlSourceId).val()).trim(), isMancer()); let value = formatTextGenURL(String($(urlSourceId).val()).trim());
if (!value) { if (!value) {
callPopup("Please enter a valid URL.<br/>WebUI URLs should end with <tt>/api</tt><br/>Enable 'Relaxed API URLs' to allow other paths.", 'text'); callPopup("Please enter a valid URL.", 'text');
return; return;
} }
const mancerKey = String($("#api_key_mancer").val()).trim();
if (mancerKey.length) {
await writeSecret(SECRET_KEYS.MANCER, mancerKey);
}
const aphroditeKey = String($("#api_key_aphrodite").val()).trim();
if (aphroditeKey.length) {
await writeSecret(SECRET_KEYS.APHRODITE, aphroditeKey);
}
$(urlSourceId).val(value); $(urlSourceId).val(value);
$("#api_loading_textgenerationwebui").css("display", "inline-block");
$("#api_button_textgenerationwebui").css("display", "none");
api_server_textgenerationwebui = value; api_server_textgenerationwebui = value;
main_api = "textgenerationwebui";
saveSettingsDebounced();
is_get_status = true;
is_api_button_press = true;
getStatus();
} }
$("#api_loading_textgenerationwebui").css("display", "inline-block");
$("#api_button_textgenerationwebui").css("display", "none");
main_api = "textgenerationwebui";
saveSettingsDebounced();
is_get_status = true;
is_api_button_press = true;
getStatus();
}); });
var button = $('#options_button'); var button = $('#options_button');

View File

@ -1,15 +1,9 @@
import { api_server_textgenerationwebui, getRequestHeaders, setGenerationParamsFromPreset } from "../script.js"; import { getRequestHeaders, setGenerationParamsFromPreset } from "../script.js";
import { getDeviceInfo } from "./RossAscends-mods.js"; import { getDeviceInfo } from "./RossAscends-mods.js";
import { textgenerationwebui_settings } from "./textgen-settings.js";
let models = []; let models = [];
/**
* @param {string} modelId
*/
export function getMancerModelURL(modelId) {
return `https://neuro.mancer.tech/webui/${modelId}/api`;
}
export async function loadMancerModels() { export async function loadMancerModels() {
try { try {
const response = await fetch('/api/mancer/models', { const response = await fetch('/api/mancer/models', {
@ -29,7 +23,7 @@ export async function loadMancerModels() {
const option = document.createElement('option'); const option = document.createElement('option');
option.value = model.id; option.value = model.id;
option.text = model.name; option.text = model.name;
option.selected = api_server_textgenerationwebui === getMancerModelURL(model.id); option.selected = model.id === textgenerationwebui_settings.mancer_model;
$('#mancer_model').append(option); $('#mancer_model').append(option);
} }
@ -40,12 +34,11 @@ export async function loadMancerModels() {
function onMancerModelSelect() { function onMancerModelSelect() {
const modelId = String($('#mancer_model').val()); const modelId = String($('#mancer_model').val());
const url = getMancerModelURL(modelId); textgenerationwebui_settings.mancer_model = modelId;
$('#mancer_api_url_text').val(url);
$('#api_button_textgenerationwebui').trigger('click'); $('#api_button_textgenerationwebui').trigger('click');
const context = models.find(x => x.id === modelId)?.context; const limits = models.find(x => x.id === modelId)?.limits;
setGenerationParamsFromPreset({ max_length: context }); setGenerationParamsFromPreset({ max_length: limits.context, genamt: limits.completion });
} }
function getMancerModelTemplate(option) { function getMancerModelTemplate(option) {
@ -57,8 +50,7 @@ function getMancerModelTemplate(option) {
return $((` return $((`
<div class="flex-container flexFlowColumn"> <div class="flex-container flexFlowColumn">
<div><strong>${DOMPurify.sanitize(model.name)}</strong> | <span>${model.context} ctx</span></div> <div><strong>${DOMPurify.sanitize(model.name)}</strong> | <span>${model.limits?.context} ctx</span></div>
<small>${DOMPurify.sanitize(model.description)}</small>
</div> </div>
`)); `));
} }

View File

@ -263,6 +263,7 @@ class PresetManager {
'streaming_kobold', 'streaming_kobold',
"enabled", "enabled",
'seed', 'seed',
'mancer_model',
]; ];
const settings = Object.assign({}, getSettingsByApiId(this.apiId)); const settings = Object.assign({}, getSettingsByApiId(this.apiId));

View File

@ -3,6 +3,7 @@ import {
getRequestHeaders, getRequestHeaders,
getStoppingStrings, getStoppingStrings,
max_context, max_context,
online_status,
saveSettingsDebounced, saveSettingsDebounced,
setGenerationParamsFromPreset, setGenerationParamsFromPreset,
} from "../script.js"; } from "../script.js";
@ -12,7 +13,7 @@ import {
power_user, power_user,
} from "./power-user.js"; } from "./power-user.js";
import { getTextTokens, tokenizers } from "./tokenizers.js"; import { getTextTokens, tokenizers } from "./tokenizers.js";
import { delay, onlyUnique } from "./utils.js"; import { onlyUnique } from "./utils.js";
export { export {
textgenerationwebui_settings, textgenerationwebui_settings,
@ -27,6 +28,9 @@ export const textgen_types = {
APHRODITE: 'aphrodite', APHRODITE: 'aphrodite',
}; };
// Maybe let it be configurable in the future?
export const MANCER_SERVER = 'https://neuro.mancer.tech';
const textgenerationwebui_settings = { const textgenerationwebui_settings = {
temp: 0.7, temp: 0.7,
temperature_last: true, temperature_last: true,
@ -58,7 +62,6 @@ const textgenerationwebui_settings = {
ban_eos_token: false, ban_eos_token: false,
skip_special_tokens: true, skip_special_tokens: true,
streaming: false, streaming: false,
streaming_url: 'ws://127.0.0.1:5005/api/v1/stream',
mirostat_mode: 0, mirostat_mode: 0,
mirostat_tau: 5, mirostat_tau: 5,
mirostat_eta: 0.1, mirostat_eta: 0.1,
@ -74,6 +77,7 @@ const textgenerationwebui_settings = {
//log_probs_aphrodite: 0, //log_probs_aphrodite: 0,
//prompt_log_probs_aphrodite: 0, //prompt_log_probs_aphrodite: 0,
type: textgen_types.OOBA, type: textgen_types.OOBA,
mancer_model: 'mytholite',
}; };
export let textgenerationwebui_banned_in_macros = []; export let textgenerationwebui_banned_in_macros = [];
@ -109,7 +113,6 @@ const setting_names = [
"ban_eos_token", "ban_eos_token",
"skip_special_tokens", "skip_special_tokens",
"streaming", "streaming",
"streaming_url",
"mirostat_mode", "mirostat_mode",
"mirostat_tau", "mirostat_tau",
"mirostat_eta", "mirostat_eta",
@ -142,17 +145,12 @@ async function selectPreset(name) {
saveSettingsDebounced(); saveSettingsDebounced();
} }
function formatTextGenURL(value, use_mancer) { function formatTextGenURL(value) {
try { try {
const url = new URL(value); const url = new URL(value);
if (!power_user.relaxed_api_urls) { if (url.pathname === '/api') {
if (use_mancer) { // If Mancer is in use, only require the URL to *end* with `/api`. url.pathname = '/';
if (!url.pathname.endsWith('/api')) { toastr.info('Legacy API URL detected, please make sure you updated ooba-webui to the latest version.');
return null;
}
} else {
url.pathname = '/api';
}
} }
return url.toString(); return url.toString();
} catch { } // Just using URL as a validation check } catch { } // Just using URL as a validation check
@ -255,8 +253,6 @@ export function isOoba() {
export function getTextGenUrlSourceId() { export function getTextGenUrlSourceId() {
switch (textgenerationwebui_settings.type) { switch (textgenerationwebui_settings.type) {
case textgen_types.MANCER:
return "#mancer_api_url_text";
case textgen_types.OOBA: case textgen_types.OOBA:
return "#textgenerationwebui_api_url_text"; return "#textgenerationwebui_api_url_text";
case textgen_types.APHRODITE: case textgen_types.APHRODITE:
@ -371,33 +367,11 @@ function setSettingByName(i, value, trigger) {
} }
async function generateTextGenWithStreaming(generate_data, signal) { async function generateTextGenWithStreaming(generate_data, signal) {
let streamingUrl = textgenerationwebui_settings.streaming_url; generate_data.stream = true;
if (isMancer()) { const response = await fetch('/api/textgenerationwebui/generate', {
streamingUrl = api_server_textgenerationwebui.replace("http", "ws") + "/v1/stream";
}
if (isAphrodite()) {
streamingUrl = api_server_textgenerationwebui;
}
if (isMancer() || isOoba()) {
try {
const parsedUrl = new URL(streamingUrl);
if (parsedUrl.protocol !== 'ws:' && parsedUrl.protocol !== 'wss:') {
throw new Error('Invalid protocol');
}
} catch {
toastr.error('Invalid URL for streaming. Make sure it starts with ws:// or wss://');
return async function* () { throw new Error('Invalid URL for streaming.'); }
}
}
const response = await fetch('/generate_textgenerationwebui', {
headers: { headers: {
...getRequestHeaders(), ...getRequestHeaders(),
'X-Response-Streaming': String(true),
'X-Streaming-URL': streamingUrl,
}, },
body: JSON.stringify(generate_data), body: JSON.stringify(generate_data),
method: 'POST', method: 'POST',
@ -408,54 +382,93 @@ async function generateTextGenWithStreaming(generate_data, signal) {
const decoder = new TextDecoder(); const decoder = new TextDecoder();
const reader = response.body.getReader(); const reader = response.body.getReader();
let getMessage = ''; let getMessage = '';
let messageBuffer = "";
while (true) { while (true) {
const { done, value } = await reader.read(); const { done, value } = await reader.read();
let response = decoder.decode(value); // We don't want carriage returns in our messages
let response = decoder.decode(value).replace(/\r/g, "");
if (isAphrodite()) { tryParseStreamingError(response);
const events = response.split('\n\n');
for (const event of events) { let eventList = [];
if (event.length == 0) {
continue;
}
try { messageBuffer += response;
const { results } = JSON.parse(event); eventList = messageBuffer.split("\n\n");
// Last element will be an empty string or a leftover partial message
messageBuffer = eventList.pop();
if (Array.isArray(results) && results.length > 0) { for (let event of eventList) {
getMessage = results[0].text; if (event.startsWith('event: completion')) {
yield getMessage; event = event.split("\n")[1];
// unhang UI thread
await delay(1);
}
} catch {
// Ignore
}
} }
if (done) { if (typeof event !== 'string' || !event.length)
continue;
if (!event.startsWith("data"))
continue;
if (event == "data: [DONE]") {
return; return;
} }
} else { let data = JSON.parse(event.substring(6));
// the first and last messages are undefined, protect against that
getMessage += response; getMessage += data?.choices[0]?.text || '';
if (done) {
return;
}
yield getMessage; yield getMessage;
} }
if (done) {
return;
}
} }
} }
} }
/**
* Parses errors in streaming responses and displays them in toastr.
* @param {string} response - Response from the server.
* @returns {void} Nothing.
*/
function tryParseStreamingError(response) {
let data = {};
try {
data = JSON.parse(response);
} catch {
// No JSON. Do nothing.
}
if (data?.error?.message) {
toastr.error(data.error.message, 'API Error');
throw new Error(data.error.message);
}
}
function toIntArray(string) {
if (!string) {
return [];
}
return string.split(',').map(x => parseInt(x)).filter(x => !isNaN(x));
}
function getModel() {
if (isMancer()) {
return textgenerationwebui_settings.mancer_model;
}
if (isAphrodite()) {
return online_status;
}
return undefined;
}
export function getTextGenGenerationData(finalPrompt, this_amount_gen, isImpersonate, cfgValues) { export function getTextGenGenerationData(finalPrompt, this_amount_gen, isImpersonate, cfgValues) {
return { return {
'prompt': finalPrompt, 'prompt': finalPrompt,
'model': getModel(),
'max_new_tokens': this_amount_gen, 'max_new_tokens': this_amount_gen,
'max_tokens': this_amount_gen,
'do_sample': textgenerationwebui_settings.do_sample, 'do_sample': textgenerationwebui_settings.do_sample,
'temperature': textgenerationwebui_settings.temp, 'temperature': textgenerationwebui_settings.temp,
'temperature_last': textgenerationwebui_settings.temperature_last, 'temperature_last': textgenerationwebui_settings.temperature_last,
@ -469,6 +482,7 @@ export function getTextGenGenerationData(finalPrompt, this_amount_gen, isImperso
'presence_penalty': textgenerationwebui_settings.presence_pen, 'presence_penalty': textgenerationwebui_settings.presence_pen,
'top_k': textgenerationwebui_settings.top_k, 'top_k': textgenerationwebui_settings.top_k,
'min_length': textgenerationwebui_settings.min_length, 'min_length': textgenerationwebui_settings.min_length,
'min_tokens': textgenerationwebui_settings.min_length,
'no_repeat_ngram_size': textgenerationwebui_settings.no_repeat_ngram_size, 'no_repeat_ngram_size': textgenerationwebui_settings.no_repeat_ngram_size,
'num_beams': textgenerationwebui_settings.num_beams, 'num_beams': textgenerationwebui_settings.num_beams,
'penalty_alpha': textgenerationwebui_settings.penalty_alpha, 'penalty_alpha': textgenerationwebui_settings.penalty_alpha,
@ -479,6 +493,7 @@ export function getTextGenGenerationData(finalPrompt, this_amount_gen, isImperso
'seed': textgenerationwebui_settings.seed, 'seed': textgenerationwebui_settings.seed,
'add_bos_token': textgenerationwebui_settings.add_bos_token, 'add_bos_token': textgenerationwebui_settings.add_bos_token,
'stopping_strings': getStoppingStrings(isImpersonate), 'stopping_strings': getStoppingStrings(isImpersonate),
'stop': getStoppingStrings(isImpersonate),
'truncation_length': max_context, 'truncation_length': max_context,
'ban_eos_token': textgenerationwebui_settings.ban_eos_token, 'ban_eos_token': textgenerationwebui_settings.ban_eos_token,
'skip_special_tokens': textgenerationwebui_settings.skip_special_tokens, 'skip_special_tokens': textgenerationwebui_settings.skip_special_tokens,
@ -490,9 +505,11 @@ export function getTextGenGenerationData(finalPrompt, this_amount_gen, isImperso
'mirostat_tau': textgenerationwebui_settings.mirostat_tau, 'mirostat_tau': textgenerationwebui_settings.mirostat_tau,
'mirostat_eta': textgenerationwebui_settings.mirostat_eta, 'mirostat_eta': textgenerationwebui_settings.mirostat_eta,
'grammar_string': textgenerationwebui_settings.grammar_string, 'grammar_string': textgenerationwebui_settings.grammar_string,
'custom_token_bans': getCustomTokenBans(), 'custom_token_bans': isAphrodite() ? toIntArray(getCustomTokenBans()) : getCustomTokenBans(),
'use_mancer': isMancer(), 'use_mancer': isMancer(),
'use_aphrodite': isAphrodite(), 'use_aphrodite': isAphrodite(),
'use_ooba': isOoba(),
'api_server': isMancer() ? MANCER_SERVER : api_server_textgenerationwebui,
//'n': textgenerationwebui_settings.n_aphrodite, //'n': textgenerationwebui_settings.n_aphrodite,
//'best_of': textgenerationwebui_settings.n_aphrodite, //n must always == best_of and vice versa //'best_of': textgenerationwebui_settings.n_aphrodite, //n must always == best_of and vice versa
//'ignore_eos': textgenerationwebui_settings.ignore_eos_token_aphrodite, //'ignore_eos': textgenerationwebui_settings.ignore_eos_token_aphrodite,
@ -502,3 +519,4 @@ export function getTextGenGenerationData(finalPrompt, this_amount_gen, isImperso
//'prompt_logprobs': textgenerationwebui_settings.prompt_log_probs_aphrodite, //'prompt_logprobs': textgenerationwebui_settings.prompt_log_probs_aphrodite,
}; };
} }

354
server.js
View File

@ -9,7 +9,6 @@ const path = require('path');
const readline = require('readline'); const readline = require('readline');
const util = require('util'); const util = require('util');
const { Readable } = require('stream'); const { Readable } = require('stream');
const { TextDecoder } = require('util');
// cli/fs related library imports // cli/fs related library imports
const open = require('open'); const open = require('open');
@ -35,7 +34,6 @@ const fetch = require('node-fetch').default;
const ipaddr = require('ipaddr.js'); const ipaddr = require('ipaddr.js');
const ipMatching = require('ip-matching'); const ipMatching = require('ip-matching');
const json5 = require('json5'); const json5 = require('json5');
const WebSocket = require('ws');
// image processing related library imports // image processing related library imports
const encode = require('png-chunks-encode'); const encode = require('png-chunks-encode');
@ -57,7 +55,7 @@ const characterCardParser = require('./src/character-card-parser.js');
const contentManager = require('./src/content-manager'); const contentManager = require('./src/content-manager');
const statsHelpers = require('./statsHelpers.js'); const statsHelpers = require('./statsHelpers.js');
const { readSecret, migrateSecrets, SECRET_KEYS } = require('./src/secrets'); const { readSecret, migrateSecrets, SECRET_KEYS } = require('./src/secrets');
const { delay, getVersion, deepMerge} = require('./src/util'); const { delay, getVersion, deepMerge } = require('./src/util');
const { invalidateThumbnail, ensureThumbnailCache } = require('./src/thumbnails'); const { invalidateThumbnail, ensureThumbnailCache } = require('./src/thumbnails');
const { getTokenizerModel, getTiktokenTokenizer, loadTokenizers, TEXT_COMPLETION_MODELS } = require('./src/tokenizers'); const { getTokenizerModel, getTiktokenTokenizer, loadTokenizers, TEXT_COMPLETION_MODELS } = require('./src/tokenizers');
const { convertClaudePrompt } = require('./src/chat-completion'); const { convertClaudePrompt } = require('./src/chat-completion');
@ -150,12 +148,20 @@ let color = {
function getMancerHeaders() { function getMancerHeaders() {
const apiKey = readSecret(SECRET_KEYS.MANCER); const apiKey = readSecret(SECRET_KEYS.MANCER);
return apiKey ? { "X-API-KEY": apiKey } : {};
return apiKey ? ({
"X-API-KEY": apiKey,
"Authorization": `Bearer ${apiKey}`,
}) : {};
} }
function getAphroditeHeaders() { function getAphroditeHeaders() {
const apiKey = readSecret(SECRET_KEYS.APHRODITE); const apiKey = readSecret(SECRET_KEYS.APHRODITE);
return apiKey ? { "X-API-KEY": apiKey } : {};
return apiKey ? ({
"X-API-KEY": apiKey,
"Authorization": `Bearer ${apiKey}`,
}) : {};
} }
function getOverrideHeaders(urlHost) { function getOverrideHeaders(urlHost) {
@ -181,7 +187,7 @@ function setAdditionalHeaders(request, args, server) {
} else if (request.body.use_aphrodite) { } else if (request.body.use_aphrodite) {
headers = getAphroditeHeaders(); headers = getAphroditeHeaders();
} else { } else {
headers = server ? getOverrideHeaders((new URL(server))?.host) : ''; headers = server ? getOverrideHeaders((new URL(server))?.host) : {};
} }
args.headers = Object.assign(args.headers, headers); args.headers = Object.assign(args.headers, headers);
@ -208,7 +214,7 @@ const AVATAR_HEIGHT = 600;
const jsonParser = express.json({ limit: '100mb' }); const jsonParser = express.json({ limit: '100mb' });
const urlencodedParser = express.urlencoded({ extended: true, limit: '100mb' }); const urlencodedParser = express.urlencoded({ extended: true, limit: '100mb' });
const { DIRECTORIES, UPLOADS_PATH, PALM_SAFETY } = require('./src/constants'); const { DIRECTORIES, UPLOADS_PATH, PALM_SAFETY } = require('./src/constants');
const {TavernCardValidator} = require("./src/validator/TavernCardValidator"); const { TavernCardValidator } = require("./src/validator/TavernCardValidator");
// CSRF Protection // // CSRF Protection //
if (cliArguments.disableCsrf === false) { if (cliArguments.disableCsrf === false) {
@ -479,215 +485,146 @@ app.post("/generate", jsonParser, async function (request, response_generate) {
return response_generate.send({ error: true }); return response_generate.send({ error: true });
}); });
/** //************** Text generation web UI
* @param {string} streamingUrlString Streaming URL app.post("/api/textgenerationwebui/status", jsonParser, async function (request, response) {
* @param {import('express').Request} request Express request if (!request.body) return response.sendStatus(400);
* @param {import('express').Response} response Express response
* @param {AbortController} controller Abort controller
* @returns
*/
async function sendAphroditeStreamingRequest(streamingUrlString, request, response, controller) {
request.body['stream'] = true;
const args = {
method: 'POST',
body: JSON.stringify(request.body),
headers: { "Content-Type": "application/json" },
signal: controller.signal,
};
setAdditionalHeaders(request, args, streamingUrlString);
try { try {
const generateResponse = await fetch(streamingUrlString + "/v1/generate", args); if (request.body.api_server.indexOf('localhost') !== -1) {
// Pipe remote SSE stream to Express response request.body.api_server = request.body.api_server.replace('localhost', '127.0.0.1');
generateResponse.body.pipe(response);
request.socket.on('close', function () {
if (generateResponse.body instanceof Readable) generateResponse.body.destroy(); // Close the remote stream
response.end(); // End the Express response
});
generateResponse.body.on('end', function () {
console.log("Streaming request finished");
response.end();
});
} catch (error) {
let value = { error: true, status: error.status, response: error.statusText };
console.log("Aphrodite endpoint error:", error);
if (!response.headersSent) {
return response.send(value);
} else {
return response.end();
}
}
}
//************** Text generation web UI
app.post("/generate_textgenerationwebui", jsonParser, async function (request, response_generate) {
if (!request.body) return response_generate.sendStatus(400);
console.log(request.body);
const controller = new AbortController();
let isGenerationStopped = false;
request.socket.removeAllListeners('close');
request.socket.on('close', function () {
isGenerationStopped = true;
controller.abort();
});
if (request.header('X-Response-Streaming')) {
const streamingUrlHeader = request.header('X-Streaming-URL');
if (streamingUrlHeader === undefined) return response_generate.sendStatus(400);
const streamingUrlString = streamingUrlHeader.replace("localhost", "127.0.0.1");
if (request.body.use_aphrodite) {
return sendAphroditeStreamingRequest(streamingUrlString, request, response_generate, controller);
} }
response_generate.writeHead(200, { console.log('Trying to connect to API:', request.body);
'Content-Type': 'text/plain;charset=utf-8',
'Transfer-Encoding': 'chunked',
'Cache-Control': 'no-transform',
});
async function* readWebsocket() { const baseUrl = request.body.api_server;
/** @type {WebSocket} */
let websocket;
/** @type {URL} */
let streamingUrl;
try {
const streamingUrl = new URL(streamingUrlString);
websocket = new WebSocket(streamingUrl);
} catch (error) {
console.log("[SillyTavern] Socket error", error);
return;
}
websocket.on('open', async function () {
console.log('WebSocket opened');
let headers = {};
if (request.body.use_mancer) {
headers = getMancerHeaders();
} else if (request.body.use_aphrodite) {
headers = getAphroditeHeaders();
} else {
headers = getOverrideHeaders(streamingUrl?.host);
}
const combined_args = Object.assign(
{},
headers,
request.body
);
console.log(combined_args);
websocket.send(JSON.stringify(combined_args));
});
websocket.on('close', (code, buffer) => {
const reason = new TextDecoder().decode(buffer)
console.log("WebSocket closed (reason: %o)", reason);
});
while (true) {
if (isGenerationStopped) {
console.error('Streaming stopped by user. Closing websocket...');
websocket.close();
return;
}
let rawMessage = null;
try {
// This lunacy is because the websocket can fail to connect AFTER we're awaiting 'message'... so 'message' never triggers.
// So instead we need to look for 'error' at the same time to reject the promise. And then remove the listener if we resolve.
// This is awful.
// Welcome to the shenanigan shack.
rawMessage = await new Promise(function (resolve, reject) {
websocket.once('error', reject);
websocket.once('message', (data, isBinary) => {
websocket.removeListener('error', reject);
resolve(data);
});
});
} catch (err) {
console.error("Socket error:", err);
websocket.close();
yield "[SillyTavern] Streaming failed:\n" + err;
return;
}
const message = json5.parse(rawMessage);
switch (message.event) {
case 'text_stream':
yield message.text;
break;
case 'stream_end':
if (message.error) {
yield `\n[API Error] ${message.error}\n`
}
websocket.close();
return;
}
}
}
let reply = '';
try {
for await (const text of readWebsocket()) {
if (typeof text !== 'string') {
break;
}
let newText = text;
if (!newText) {
continue;
}
reply += text;
response_generate.write(newText);
}
console.log(reply);
}
finally {
response_generate.end();
}
}
else {
const args = { const args = {
body: JSON.stringify(request.body),
headers: { "Content-Type": "application/json" }, headers: { "Content-Type": "application/json" },
signal: controller.signal, timeout: 0,
}; };
setAdditionalHeaders(request, args, api_server); setAdditionalHeaders(request, args, baseUrl);
try { const url = new URL(baseUrl);
const data = await postAsync(api_server + "/v1/generate", args);
console.log("Endpoint response:", data); if (request.body.use_ooba) {
return response_generate.send(data); url.pathname = "/v1/models";
} catch (error) {
let retval = { error: true, status: error.status, response: error.statusText };
console.log("Endpoint error:", error);
try {
retval.response = await error.json();
retval.response = retval.response.result;
} catch { }
return response_generate.send(retval);
} }
if (request.body.use_aphrodite) {
url.pathname = "/v1/models";
}
if (request.body.use_mancer) {
url.pathname = "/oai/v1/models";
}
const modelsReply = await fetch(url, args);
if (!modelsReply.ok) {
console.log('Models endpoint is offline.');
return response.status(modelsReply.status);
}
const data = await modelsReply.json();
if (!Array.isArray(data.data)) {
console.log('Models response is not an array.')
return response.status(503);
}
const modelIds = data.data.map(x => x.id);
console.log('Models available:', modelIds);
const result = modelIds[0] ?? 'Valid';
return response.send({ result });
} catch (error) {
console.error(error);
return response.status(500);
} }
}); });
app.post("/api/textgenerationwebui/generate", jsonParser, async function (request, response_generate) {
if (!request.body) return response_generate.sendStatus(400);
try {
if (request.body.api_server.indexOf('localhost') !== -1) {
request.body.api_server = request.body.api_server.replace('localhost', '127.0.0.1');
}
const baseUrl = request.body.api_server;
console.log(request.body);
const controller = new AbortController();
request.socket.removeAllListeners('close');
request.socket.on('close', function () {
controller.abort();
});
const url = new URL(baseUrl);
if (request.body.use_aphrodite || request.body.use_ooba) {
url.pathname = "/v1/completions";
}
if (request.body.use_mancer) {
url.pathname = "/oai/v1/completions";
}
const args = {
method: 'POST',
body: JSON.stringify(request.body),
headers: { "Content-Type": "application/json" },
signal: controller.signal,
timeout: 0,
};
setAdditionalHeaders(request, args, baseUrl);
if (request.body.stream) {
const completionsStream = await fetch(url, args);
// Pipe remote SSE stream to Express response
completionsStream.body.pipe(response_generate);
request.socket.on('close', function () {
if (completionsStream.body instanceof Readable) completionsStream.body.destroy(); // Close the remote stream
response_generate.end(); // End the Express response
});
completionsStream.body.on('end', function () {
console.log("Streaming request finished");
response_generate.end();
});
}
else {
const completionsReply = await fetch(url, args);
if (completionsReply.ok) {
const data = await completionsReply.json();
console.log("Endpoint response:", data);
return response_generate.send(data);
} else {
const text = await completionsReply.text();
const errorBody = { error: true, status: completionsReply.status, response: text };
if (!response_generate.headersSent) {
return response_generate.send(errorBody);
}
return response_generate.end();
}
}
} catch (error) {
let value = { error: true, status: error?.status, response: error?.statusText };
console.log("Endpoint error:", error);
if (!response_generate.headersSent) {
return response_generate.send(value);
}
return response_generate.end();
}
});
app.post("/savechat", jsonParser, function (request, response) { app.post("/savechat", jsonParser, function (request, response) {
try { try {
@ -740,7 +677,7 @@ app.post("/getchat", jsonParser, function (request, response) {
app.post("/api/mancer/models", jsonParser, async function (_req, res) { app.post("/api/mancer/models", jsonParser, async function (_req, res) {
try { try {
const response = await fetch('https://mancer.tech/internal/api/models'); const response = await fetch('https://neuro.mancer.tech/oai/v1/models');
const data = await response.json(); const data = await response.json();
if (!response.ok) { if (!response.ok) {
@ -748,15 +685,12 @@ app.post("/api/mancer/models", jsonParser, async function (_req, res) {
return res.json([]); return res.json([]);
} }
if (!Array.isArray(data.models)) { if (!Array.isArray(data.data)) {
console.log('Mancer models response is not an array.') console.log('Mancer models response is not an array.')
return res.json([]); return res.json([]);
} }
const modelIds = data.models.map(x => x.id); return res.json(data.data);
console.log('Mancer models available:', modelIds);
return res.json(data.models);
} catch (error) { } catch (error) {
console.error(error); console.error(error);
return res.json([]); return res.json([]);
@ -1184,7 +1118,7 @@ app.post("/v2/editcharacterattribute", jsonParser, async function (request, resp
const avatarPath = path.join(charactersPath, update.avatar); const avatarPath = path.join(charactersPath, update.avatar);
try { try {
let character = JSON.parse(await charaRead(avatarPath)); let character = JSON.parse(await charaRead(avatarPath));
character = deepMerge(character, update); character = deepMerge(character, update);
const validator = new TavernCardValidator(character); const validator = new TavernCardValidator(character);
@ -1200,10 +1134,10 @@ app.post("/v2/editcharacterattribute", jsonParser, async function (request, resp
); );
} else { } else {
console.log(validator.lastValidationError) console.log(validator.lastValidationError)
response.status(400).send({message: `Validation failed for ${character.name}`, error: validator.lastValidationError}); response.status(400).send({ message: `Validation failed for ${character.name}`, error: validator.lastValidationError });
} }
} catch (exception) { } catch (exception) {
response.status(500).send({message: 'Unexpected error while saving character.', error: exception.toString()}); response.status(500).send({ message: 'Unexpected error while saving character.', error: exception.toString() });
} }
}); });