mirror of
https://github.com/SillyTavern/SillyTavern.git
synced 2025-06-05 21:59:27 +02:00
Merge remote-tracking branch 'upstream/staging' into staging
This commit is contained in:
@ -121,7 +121,7 @@
|
||||
}
|
||||
|
||||
/* Add the custom checkbox */
|
||||
.select2-results__option:before {
|
||||
.select2-results__option::before {
|
||||
content: '';
|
||||
display: inline-block;
|
||||
position: absolute;
|
||||
@ -141,7 +141,7 @@
|
||||
}
|
||||
|
||||
/* Add the custom checkbox checkmark */
|
||||
.select2-results__option--selected.select2-results__option:before {
|
||||
.select2-results__option--selected.select2-results__option::before {
|
||||
content: '\2713';
|
||||
font-weight: bold;
|
||||
color: var(--SmartThemeBodyColor);
|
||||
@ -149,3 +149,11 @@
|
||||
text-align: center;
|
||||
line-height: 14px;
|
||||
}
|
||||
|
||||
.select2-results__option.select2-results__message {
|
||||
background-color: inherit;
|
||||
}
|
||||
|
||||
.select2-results__option.select2-results__message::before {
|
||||
display: none;
|
||||
}
|
||||
|
@ -3847,7 +3847,7 @@
|
||||
</div>
|
||||
<div id="rm_character_import" class="right_menu" style="display: none;">
|
||||
<form id="form_import" action="javascript:void(null);" method="post" enctype="multipart/form-data">
|
||||
<input multiple type="file" id="character_import_file" accept=".json, image/png" name="avatar">
|
||||
<input multiple type="file" id="character_import_file" accept=".json, image/png, .yaml, .yml" name="avatar">
|
||||
<input id="character_import_file_type" name="file_type" class="text_pole" maxlength="999" size="2" value="" autocomplete="off">
|
||||
</form>
|
||||
</div>
|
||||
|
290
public/script.js
290
public/script.js
@ -145,7 +145,7 @@ import {
|
||||
Stopwatch,
|
||||
} from './scripts/utils.js';
|
||||
|
||||
import { ModuleWorkerWrapper, doDailyExtensionUpdatesCheck, extension_settings, getContext, loadExtensionSettings, processExtensionHelpers, registerExtensionHelper, renderExtensionTemplate, runGenerationInterceptors, saveMetadataDebounced } from './scripts/extensions.js';
|
||||
import { ModuleWorkerWrapper, doDailyExtensionUpdatesCheck, extension_settings, getContext, loadExtensionSettings, renderExtensionTemplate, runGenerationInterceptors, saveMetadataDebounced } from './scripts/extensions.js';
|
||||
import { COMMENT_NAME_DEFAULT, executeSlashCommands, getSlashCommandsHelp, processChatSlashCommands, registerSlashCommand } from './scripts/slash-commands.js';
|
||||
import {
|
||||
tag_map,
|
||||
@ -338,8 +338,6 @@ export const event_types = {
|
||||
|
||||
export const eventSource = new EventEmitter();
|
||||
|
||||
eventSource.on(event_types.MESSAGE_RECEIVED, processExtensionHelpers);
|
||||
eventSource.on(event_types.MESSAGE_SENT, processExtensionHelpers);
|
||||
eventSource.on(event_types.CHAT_CHANGED, processChatSlashCommands);
|
||||
|
||||
const characterGroupOverlay = new BulkEditOverlay();
|
||||
@ -701,8 +699,6 @@ export let user_avatar = 'you.png';
|
||||
export var amount_gen = 80; //default max length of AI generated responses
|
||||
var max_context = 2048;
|
||||
|
||||
var message_already_generated = '';
|
||||
|
||||
var swipes = true;
|
||||
let extension_prompts = {};
|
||||
|
||||
@ -1478,13 +1474,11 @@ export async function reloadCurrentChat() {
|
||||
}
|
||||
|
||||
function messageFormatting(mes, ch_name, isSystem, isUser) {
|
||||
if (mes) {
|
||||
mesForShowdownParse = mes;
|
||||
if (!mes) {
|
||||
return '';
|
||||
}
|
||||
|
||||
if (!mes) {
|
||||
mes = '';
|
||||
}
|
||||
mesForShowdownParse = mes;
|
||||
|
||||
// Force isSystem = false on comment messages so they get formatted properly
|
||||
if (ch_name === COMMENT_NAME_DEFAULT && isSystem && !isUser) {
|
||||
@ -1544,7 +1538,6 @@ function messageFormatting(mes, ch_name, isSystem, isUser) {
|
||||
mes = mes.replaceAll('\\begin{align*}', '$$');
|
||||
mes = mes.replaceAll('\\end{align*}', '$$');
|
||||
mes = converter.makeHtml(mes);
|
||||
mes = replaceBiasMarkup(mes);
|
||||
|
||||
mes = mes.replace(/<code(.*)>[\s\S]*?<\/code>/g, function (match) {
|
||||
// Firefox creates extra newlines from <br>s in code blocks, so we replace them before converting newlines to <br>s.
|
||||
@ -2600,6 +2593,21 @@ function hideStopButton() {
|
||||
}
|
||||
|
||||
class StreamingProcessor {
|
||||
constructor(type, force_name2, timeStarted, messageAlreadyGenerated) {
|
||||
this.result = '';
|
||||
this.messageId = -1;
|
||||
this.type = type;
|
||||
this.force_name2 = force_name2;
|
||||
this.isStopped = false;
|
||||
this.isFinished = false;
|
||||
this.generator = this.nullStreamingGeneration;
|
||||
this.abortController = new AbortController();
|
||||
this.firstMessageText = '...';
|
||||
this.timeStarted = timeStarted;
|
||||
this.messageAlreadyGenerated = messageAlreadyGenerated;
|
||||
this.swipes = [];
|
||||
}
|
||||
|
||||
showMessageButtons(messageId) {
|
||||
if (messageId == -1) {
|
||||
return;
|
||||
@ -2635,32 +2643,16 @@ class StreamingProcessor {
|
||||
return messageId;
|
||||
}
|
||||
|
||||
removePrefix(text) {
|
||||
const name1Marker = `${name1}: `;
|
||||
const name2Marker = `${name2}: `;
|
||||
|
||||
if (text) {
|
||||
if (text.startsWith(name1Marker)) {
|
||||
text = text.replace(name1Marker, '');
|
||||
}
|
||||
if (text.startsWith(name2Marker)) {
|
||||
text = text.replace(name2Marker, '');
|
||||
}
|
||||
}
|
||||
return text;
|
||||
}
|
||||
|
||||
onProgressStreaming(messageId, text, isFinal) {
|
||||
const isImpersonate = this.type == 'impersonate';
|
||||
const isContinue = this.type == 'continue';
|
||||
|
||||
if (!isImpersonate && !isContinue && Array.isArray(this.swipes) && this.swipes.length > 0) {
|
||||
for (let i = 0; i < this.swipes.length; i++) {
|
||||
this.swipes[i] = cleanUpMessage(this.removePrefix(this.swipes[i]), false, false, true, this.stoppingStrings);
|
||||
this.swipes[i] = cleanUpMessage(this.swipes[i], false, false, true, this.stoppingStrings);
|
||||
}
|
||||
}
|
||||
|
||||
text = this.removePrefix(text);
|
||||
let processedText = cleanUpMessage(text, isImpersonate, isContinue, !isFinal, this.stoppingStrings);
|
||||
|
||||
// Predict unbalanced asterisks / quotes during streaming
|
||||
@ -2786,6 +2778,9 @@ class StreamingProcessor {
|
||||
}
|
||||
|
||||
onErrorStreaming() {
|
||||
this.abortController.abort();
|
||||
this.isStopped = true;
|
||||
|
||||
this.hideMessageButtons(this.messageId);
|
||||
$('#send_textarea').removeAttr('disabled');
|
||||
is_send_press = false;
|
||||
@ -2811,20 +2806,6 @@ class StreamingProcessor {
|
||||
throw new Error('Generation function for streaming is not hooked up');
|
||||
}
|
||||
|
||||
constructor(type, force_name2, timeStarted) {
|
||||
this.result = '';
|
||||
this.messageId = -1;
|
||||
this.type = type;
|
||||
this.force_name2 = force_name2;
|
||||
this.isStopped = false;
|
||||
this.isFinished = false;
|
||||
this.generator = this.nullStreamingGeneration;
|
||||
this.abortController = new AbortController();
|
||||
this.firstMessageText = '...';
|
||||
this.timeStarted = timeStarted;
|
||||
this.swipes = [];
|
||||
}
|
||||
|
||||
async generate() {
|
||||
if (this.messageId == -1) {
|
||||
this.messageId = await this.onStartStreaming(this.firstMessageText);
|
||||
@ -2844,13 +2825,12 @@ class StreamingProcessor {
|
||||
for await (const { text, swipes } of this.generator()) {
|
||||
timestamps.push(Date.now());
|
||||
if (this.isStopped) {
|
||||
this.onStopStreaming();
|
||||
return;
|
||||
}
|
||||
|
||||
this.result = text;
|
||||
this.swipes = swipes;
|
||||
await sw.tick(() => this.onProgressStreaming(this.messageId, message_already_generated + text));
|
||||
await sw.tick(() => this.onProgressStreaming(this.messageId, this.messageAlreadyGenerated + text));
|
||||
}
|
||||
const seconds = (timestamps[timestamps.length - 1] - timestamps[0]) / 1000;
|
||||
console.warn(`Stream stats: ${timestamps.length} tokens, ${seconds.toFixed(2)} seconds, rate: ${Number(timestamps.length / seconds).toFixed(2)} TPS`);
|
||||
@ -2858,7 +2838,6 @@ class StreamingProcessor {
|
||||
catch (err) {
|
||||
console.error(err);
|
||||
this.onErrorStreaming();
|
||||
this.isStopped = true;
|
||||
return;
|
||||
}
|
||||
|
||||
@ -2965,7 +2944,7 @@ async function Generate(type, { automatic_trigger, force_name2, quiet_prompt, qu
|
||||
const isInstruct = power_user.instruct.enabled && main_api !== 'openai';
|
||||
const isImpersonate = type == 'impersonate';
|
||||
|
||||
message_already_generated = isImpersonate ? `${name1}: ` : `${name2}: `;
|
||||
let message_already_generated = isImpersonate ? `${name1}: ` : `${name2}: `;
|
||||
|
||||
const interruptedByCommand = await processCommands($('#send_textarea').val(), type, dryRun);
|
||||
|
||||
@ -3088,7 +3067,7 @@ async function Generate(type, { automatic_trigger, force_name2, quiet_prompt, qu
|
||||
//for normal messages sent from user..
|
||||
if ((textareaText != '' || hasPendingFileAttachment()) && !automatic_trigger && type !== 'quiet' && !dryRun) {
|
||||
// If user message contains no text other than bias - send as a system message
|
||||
if (messageBias && replaceBiasMarkup(textareaText).trim().length === 0) {
|
||||
if (messageBias && !removeMacros(textareaText)) {
|
||||
sendSystemMessage(system_message_types.GENERIC, ' ', { bias: messageBias });
|
||||
}
|
||||
else {
|
||||
@ -3378,10 +3357,6 @@ async function Generate(type, { automatic_trigger, force_name2, quiet_prompt, qu
|
||||
let mesSend = [];
|
||||
console.debug('calling runGenerate');
|
||||
|
||||
if (!dryRun) {
|
||||
streamingProcessor = isStreamingEnabled() && type !== 'quiet' ? new StreamingProcessor(type, force_name2, generation_started) : false;
|
||||
}
|
||||
|
||||
if (isContinue) {
|
||||
// Coping mechanism for OAI spacing
|
||||
const isForceInstruct = isOpenRouterWithInstruct();
|
||||
@ -3389,21 +3364,16 @@ async function Generate(type, { automatic_trigger, force_name2, quiet_prompt, qu
|
||||
cyclePrompt += ' ';
|
||||
continue_mag += ' ';
|
||||
}
|
||||
|
||||
// Save reply does add cycle text to the prompt, so it's not needed here
|
||||
streamingProcessor && (streamingProcessor.firstMessageText = '');
|
||||
message_already_generated = continue_mag;
|
||||
}
|
||||
|
||||
const originalType = type;
|
||||
return runGenerate(cyclePrompt);
|
||||
|
||||
async function runGenerate(cycleGenerationPrompt = '') {
|
||||
if (!dryRun) {
|
||||
is_send_press = true;
|
||||
}
|
||||
|
||||
generatedPromptCache += cycleGenerationPrompt;
|
||||
generatedPromptCache += cyclePrompt;
|
||||
if (generatedPromptCache.length == 0 || type === 'continue') {
|
||||
console.debug('generating prompt');
|
||||
chatString = '';
|
||||
@ -3771,14 +3741,13 @@ async function Generate(type, { automatic_trigger, force_name2, quiet_prompt, qu
|
||||
}
|
||||
}
|
||||
|
||||
return new Promise(async (resolve, reject) => {
|
||||
if (true === dryRun) return onSuccess({ error: 'dryRun' });
|
||||
async function finishGenerating() {
|
||||
if (dryRun) return { error: 'dryRun' };
|
||||
|
||||
if (power_user.console_log_prompts) {
|
||||
console.log(generate_data.prompt);
|
||||
}
|
||||
|
||||
let generate_url = getGenerateUrl(main_api);
|
||||
console.debug('rungenerate calling API');
|
||||
|
||||
showStopButton();
|
||||
@ -3825,55 +3794,16 @@ async function Generate(type, { automatic_trigger, force_name2, quiet_prompt, qu
|
||||
}
|
||||
|
||||
console.debug(`pushed prompt bits to itemizedPrompts array. Length is now: ${itemizedPrompts.length}`);
|
||||
/** @type {Promise<any>} */
|
||||
let streamingGeneratorPromise = Promise.resolve();
|
||||
|
||||
if (main_api == 'openai') {
|
||||
if (isStreamingEnabled() && type !== 'quiet') {
|
||||
streamingGeneratorPromise = sendOpenAIRequest(type, generate_data.prompt, streamingProcessor.abortController.signal);
|
||||
}
|
||||
else {
|
||||
sendOpenAIRequest(type, generate_data.prompt, abortController.signal).then(onSuccess).catch(onError);
|
||||
}
|
||||
}
|
||||
else if (main_api == 'koboldhorde') {
|
||||
generateHorde(finalPrompt, generate_data, abortController.signal, true).then(onSuccess).catch(onError);
|
||||
}
|
||||
else if (main_api == 'textgenerationwebui' && isStreamingEnabled() && type !== 'quiet') {
|
||||
streamingGeneratorPromise = generateTextGenWithStreaming(generate_data, streamingProcessor.abortController.signal);
|
||||
}
|
||||
else if (main_api == 'novel' && isStreamingEnabled() && type !== 'quiet') {
|
||||
streamingGeneratorPromise = generateNovelWithStreaming(generate_data, streamingProcessor.abortController.signal);
|
||||
}
|
||||
else if (main_api == 'kobold' && isStreamingEnabled() && type !== 'quiet') {
|
||||
streamingGeneratorPromise = generateKoboldWithStreaming(generate_data, streamingProcessor.abortController.signal);
|
||||
}
|
||||
else {
|
||||
try {
|
||||
const response = await fetch(generate_url, {
|
||||
method: 'POST',
|
||||
headers: getRequestHeaders(),
|
||||
cache: 'no-cache',
|
||||
body: JSON.stringify(generate_data),
|
||||
signal: abortController.signal,
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
const error = await response.json();
|
||||
throw error;
|
||||
}
|
||||
|
||||
const data = await response.json();
|
||||
onSuccess(data);
|
||||
} catch (error) {
|
||||
onError(error);
|
||||
}
|
||||
}
|
||||
|
||||
if (isStreamingEnabled() && type !== 'quiet') {
|
||||
try {
|
||||
const streamingGenerator = await streamingGeneratorPromise;
|
||||
streamingProcessor.generator = streamingGenerator;
|
||||
streamingProcessor = new StreamingProcessor(type, force_name2, generation_started, message_already_generated);
|
||||
if (isContinue) {
|
||||
// Save reply does add cycle text to the prompt, so it's not needed here
|
||||
streamingProcessor.firstMessageText = '';
|
||||
}
|
||||
|
||||
streamingProcessor.generator = await sendStreamingRequest(type, generate_data);
|
||||
|
||||
hideSwipeButtons();
|
||||
let getMessage = await streamingProcessor.generate();
|
||||
let messageChunk = cleanUpMessage(getMessage, isImpersonate, isContinue, false);
|
||||
@ -3887,19 +3817,19 @@ async function Generate(type, { automatic_trigger, force_name2, quiet_prompt, qu
|
||||
streamingProcessor = null;
|
||||
triggerAutoContinue(messageChunk, isImpersonate);
|
||||
}
|
||||
resolve();
|
||||
} catch (err) {
|
||||
onError(err);
|
||||
} else {
|
||||
return await sendGenerationRequest(type, generate_data);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
return finishGenerating().then(onSuccess, onError);
|
||||
|
||||
async function onSuccess(data) {
|
||||
if (!data) return;
|
||||
let messageChunk = '';
|
||||
|
||||
if (data.error == 'dryRun') {
|
||||
generatedPromptCache = '';
|
||||
resolve();
|
||||
return;
|
||||
}
|
||||
|
||||
@ -3928,7 +3858,8 @@ async function Generate(type, { automatic_trigger, force_name2, quiet_prompt, qu
|
||||
await eventSource.emit(event_types.IMPERSONATE_READY, getMessage);
|
||||
}
|
||||
else if (type == 'quiet') {
|
||||
resolve(getMessage);
|
||||
unblockGeneration();
|
||||
return getMessage;
|
||||
}
|
||||
else {
|
||||
// Without streaming we'll be having a full message on continuation. Treat it as a last chunk.
|
||||
@ -3948,21 +3879,18 @@ async function Generate(type, { automatic_trigger, force_name2, quiet_prompt, qu
|
||||
maxLoops ??= MAX_GENERATION_LOOPS;
|
||||
|
||||
if (maxLoops === 0) {
|
||||
reject(new Error('Generate circuit breaker interruption'));
|
||||
if (type !== 'quiet') {
|
||||
throwCircuitBreakerError();
|
||||
}
|
||||
return;
|
||||
throw new Error('Generate circuit breaker interruption');
|
||||
}
|
||||
|
||||
// regenerate with character speech reenforced
|
||||
// to make sure we leave on swipe type while also adding the name2 appendage
|
||||
delay(1000).then(async () => {
|
||||
await delay(1000);
|
||||
// The first await is for waiting for the generate to start. The second one is waiting for it to finish
|
||||
const result = await await Generate(type, { automatic_trigger, force_name2: true, quiet_prompt, skipWIAN, force_chid, maxLoops: maxLoops - 1 });
|
||||
resolve(result);
|
||||
});
|
||||
return;
|
||||
return result;
|
||||
}
|
||||
|
||||
if (power_user.auto_swipe) {
|
||||
@ -3989,7 +3917,6 @@ async function Generate(type, { automatic_trigger, force_name2, quiet_prompt, qu
|
||||
is_send_press = false;
|
||||
swipe_right();
|
||||
// TODO: do we want to resolve after an auto-swipe?
|
||||
resolve();
|
||||
return;
|
||||
}
|
||||
}
|
||||
@ -3999,7 +3926,7 @@ async function Generate(type, { automatic_trigger, force_name2, quiet_prompt, qu
|
||||
if (data?.response) {
|
||||
toastr.error(data.response, 'API Error');
|
||||
}
|
||||
reject(data.response);
|
||||
throw data?.response;
|
||||
}
|
||||
|
||||
console.debug('/api/chats/save called by /Generate');
|
||||
@ -4010,7 +3937,6 @@ async function Generate(type, { automatic_trigger, force_name2, quiet_prompt, qu
|
||||
if (type !== 'quiet') {
|
||||
triggerAutoContinue(messageChunk, isImpersonate);
|
||||
}
|
||||
resolve();
|
||||
}
|
||||
|
||||
function onError(exception) {
|
||||
@ -4018,23 +3944,18 @@ async function Generate(type, { automatic_trigger, force_name2, quiet_prompt, qu
|
||||
toastr.error(exception.error.message, 'Error', { timeOut: 10000, extendedTimeOut: 20000 });
|
||||
}
|
||||
|
||||
reject(exception);
|
||||
unblockGeneration();
|
||||
console.log(exception);
|
||||
streamingProcessor = null;
|
||||
throw exception;
|
||||
}
|
||||
});
|
||||
|
||||
} //rungenerate ends
|
||||
} else { //generate's primary loop ends, after this is error handling for no-connection or safety-id
|
||||
if (this_chid === undefined || this_chid === 'invalid-safety-id') {
|
||||
toastr.warning('Сharacter is not selected');
|
||||
}
|
||||
is_send_press = false;
|
||||
}
|
||||
|
||||
//console.log('generate ending');
|
||||
} //generate ends
|
||||
}
|
||||
|
||||
function flushWIDepthInjections() {
|
||||
//prevent custom depth WI entries (which have unique random key names) from duplicating
|
||||
@ -4165,13 +4086,16 @@ function formatMessageHistoryItem(chatItem, isInstruct, forceOutputSequence) {
|
||||
textResult = formatInstructModeChat(itemName, chatItem.mes, chatItem.is_user, isNarratorType, chatItem.force_avatar, name1, name2, forceOutputSequence);
|
||||
}
|
||||
|
||||
textResult = replaceBiasMarkup(textResult);
|
||||
|
||||
return textResult;
|
||||
}
|
||||
|
||||
export function replaceBiasMarkup(str) {
|
||||
return (str ?? '').replace(/\{\{[\s\S]*?\}\}/gm, '');
|
||||
/**
|
||||
* Removes all {{macros}} from a string.
|
||||
* @param {string} str String to remove macros from.
|
||||
* @returns {string} String with macros removed.
|
||||
*/
|
||||
export function removeMacros(str) {
|
||||
return (str ?? '').replace(/\{\{[\s\S]*?\}\}/gm, '').trim();
|
||||
}
|
||||
|
||||
/**
|
||||
@ -4481,22 +4405,82 @@ function setInContextMessages(lastmsg, type) {
|
||||
}
|
||||
}
|
||||
|
||||
function getGenerateUrl(api) {
|
||||
let generate_url = '';
|
||||
if (api == 'kobold') {
|
||||
generate_url = '/api/backends/kobold/generate';
|
||||
} else if (api == 'textgenerationwebui') {
|
||||
generate_url = '/api/backends/text-completions/generate';
|
||||
} else if (api == 'novel') {
|
||||
generate_url = '/api/novelai/generate';
|
||||
/**
|
||||
* Sends a non-streaming request to the API.
|
||||
* @param {string} type Generation type
|
||||
* @param {object} data Generation data
|
||||
* @returns {Promise<object>} Response data from the API
|
||||
*/
|
||||
async function sendGenerationRequest(type, data) {
|
||||
if (main_api === 'openai') {
|
||||
return await sendOpenAIRequest(type, data.prompt, abortController.signal);
|
||||
}
|
||||
|
||||
if (main_api === 'koboldhorde') {
|
||||
return await generateHorde(data.prompt, data, abortController.signal, true);
|
||||
}
|
||||
|
||||
const response = await fetch(getGenerateUrl(main_api), {
|
||||
method: 'POST',
|
||||
headers: getRequestHeaders(),
|
||||
cache: 'no-cache',
|
||||
body: JSON.stringify(data),
|
||||
signal: abortController.signal,
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
const error = await response.json();
|
||||
throw error;
|
||||
}
|
||||
|
||||
const responseData = await response.json();
|
||||
return responseData;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sends a streaming request to the API.
|
||||
* @param {string} type Generation type
|
||||
* @param {object} data Generation data
|
||||
* @returns {Promise<any>} Streaming generator
|
||||
*/
|
||||
async function sendStreamingRequest(type, data) {
|
||||
switch (main_api) {
|
||||
case 'openai':
|
||||
return await sendOpenAIRequest(type, data.prompt, streamingProcessor.abortController.signal);
|
||||
case 'textgenerationwebui':
|
||||
return await generateTextGenWithStreaming(data, streamingProcessor.abortController.signal);
|
||||
case 'novel':
|
||||
return await generateNovelWithStreaming(data, streamingProcessor.abortController.signal);
|
||||
case 'kobold':
|
||||
return await generateKoboldWithStreaming(data, streamingProcessor.abortController.signal);
|
||||
default:
|
||||
throw new Error('Streaming is enabled, but the current API does not support streaming.');
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the generation endpoint URL for the specified API.
|
||||
* @param {string} api API name
|
||||
* @returns {string} Generation URL
|
||||
*/
|
||||
function getGenerateUrl(api) {
|
||||
switch (api) {
|
||||
case 'kobold':
|
||||
return '/api/backends/kobold/generate';
|
||||
case 'koboldhorde':
|
||||
return '/api/backends/koboldhorde/generate';
|
||||
case 'textgenerationwebui':
|
||||
return '/api/backends/text-completions/generate';
|
||||
case 'novel':
|
||||
return '/api/novelai/generate';
|
||||
default:
|
||||
throw new Error(`Unknown API: ${api}`);
|
||||
}
|
||||
return generate_url;
|
||||
}
|
||||
|
||||
function throwCircuitBreakerError() {
|
||||
callPopup(`Could not extract reply in ${MAX_GENERATION_LOOPS} attempts. Try generating again`, 'text');
|
||||
unblockGeneration();
|
||||
throw new Error('Generate circuit breaker interruption');
|
||||
}
|
||||
|
||||
function extractTitleFromData(data) {
|
||||
@ -7178,7 +7162,10 @@ window['SillyTavern'].getContext = function () {
|
||||
saveReply,
|
||||
registerSlashCommand: registerSlashCommand,
|
||||
executeSlashCommands: executeSlashCommands,
|
||||
registerHelper: registerExtensionHelper,
|
||||
/**
|
||||
* @deprecated Handlebars for extensions are no longer supported.
|
||||
*/
|
||||
registerHelper: () => { },
|
||||
registedDebugFunction: registerDebugFunction,
|
||||
renderExtensionTemplate: renderExtensionTemplate,
|
||||
callPopup: callPopup,
|
||||
@ -7198,7 +7185,7 @@ function swipe_left() { // when we swipe left..but no generation.
|
||||
}
|
||||
|
||||
if (isStreamingEnabled() && streamingProcessor) {
|
||||
streamingProcessor.isStopped = true;
|
||||
streamingProcessor.onStopStreaming();
|
||||
}
|
||||
|
||||
const swipe_duration = 120;
|
||||
@ -7662,6 +7649,10 @@ export async function processDroppedFiles(files) {
|
||||
const allowedMimeTypes = [
|
||||
'application/json',
|
||||
'image/png',
|
||||
'application/yaml',
|
||||
'application/x-yaml',
|
||||
'text/yaml',
|
||||
'text/x-yaml',
|
||||
];
|
||||
|
||||
for (const file of files) {
|
||||
@ -7675,10 +7666,7 @@ export async function processDroppedFiles(files) {
|
||||
|
||||
async function importCharacter(file) {
|
||||
const ext = file.name.match(/\.(\w+)$/);
|
||||
if (
|
||||
!ext ||
|
||||
(ext[1].toLowerCase() != 'json' && ext[1].toLowerCase() != 'png')
|
||||
) {
|
||||
if (!ext || !(['json', 'png', 'yaml', 'yml'].includes(ext[1].toLowerCase()))) {
|
||||
return;
|
||||
}
|
||||
|
||||
@ -9327,8 +9315,6 @@ jQuery(async function () {
|
||||
|
||||
$(document).on('click', '.mes_stop', function () {
|
||||
if (streamingProcessor) {
|
||||
streamingProcessor.abortController.abort();
|
||||
streamingProcessor.isStopped = true;
|
||||
streamingProcessor.onStopStreaming();
|
||||
streamingProcessor = null;
|
||||
}
|
||||
@ -9583,7 +9569,7 @@ jQuery(async function () {
|
||||
cancelTtsPlay();
|
||||
if (streamingProcessor) {
|
||||
console.log('Page reloaded. Aborting streaming...');
|
||||
streamingProcessor.abortController.abort();
|
||||
streamingProcessor.onStopStreaming();
|
||||
}
|
||||
});
|
||||
|
||||
|
@ -47,8 +47,6 @@ export function saveMetadataDebounced() {
|
||||
}, 1000);
|
||||
}
|
||||
|
||||
export const extensionsHandlebars = Handlebars.create();
|
||||
|
||||
/**
|
||||
* Provides an ability for extensions to render HTML templates.
|
||||
* Templates sanitation and localization is forced.
|
||||
@ -61,40 +59,6 @@ export function renderExtensionTemplate(extensionName, templateId, templateData
|
||||
return renderTemplate(`scripts/extensions/${extensionName}/${templateId}.html`, templateData, sanitize, localize, true);
|
||||
}
|
||||
|
||||
/**
|
||||
* Registers a Handlebars helper for use in extensions.
|
||||
* @param {string} name Handlebars helper name
|
||||
* @param {function} helper Handlebars helper function
|
||||
*/
|
||||
export function registerExtensionHelper(name, helper) {
|
||||
extensionsHandlebars.registerHelper(name, helper);
|
||||
}
|
||||
|
||||
/**
|
||||
* Applies handlebars extension helpers to a message.
|
||||
* @param {number} messageId Message index in the chat.
|
||||
*/
|
||||
export function processExtensionHelpers(messageId) {
|
||||
const context = getContext();
|
||||
const message = context.chat[messageId];
|
||||
|
||||
if (!message?.mes || typeof message.mes !== 'string') {
|
||||
return;
|
||||
}
|
||||
|
||||
// Don't waste time if there are no mustaches
|
||||
if (!substituteParams(message.mes).includes('{{')) {
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
const template = extensionsHandlebars.compile(substituteParams(message.mes), { noEscape: true });
|
||||
message.mes = template({});
|
||||
} catch {
|
||||
// Ignore
|
||||
}
|
||||
}
|
||||
|
||||
// Disables parallel updates
|
||||
class ModuleWorkerWrapper {
|
||||
constructor(callback) {
|
||||
|
@ -12,6 +12,7 @@ import {
|
||||
} from '../../../script.js';
|
||||
import { extension_settings, getContext } from '../../extensions.js';
|
||||
import { secret_state, writeSecret } from '../../secrets.js';
|
||||
import { splitRecursive } from '../../utils.js';
|
||||
|
||||
export const autoModeOptions = {
|
||||
NONE: 'none',
|
||||
@ -315,6 +316,28 @@ async function translateProviderBing(text, lang) {
|
||||
throw new Error(response.statusText);
|
||||
}
|
||||
|
||||
/**
|
||||
* Splits text into chunks and translates each chunk separately
|
||||
* @param {string} text Text to translate
|
||||
* @param {string} lang Target language code
|
||||
* @param {(text: string, lang: string) => Promise<string>} translateFn Function to translate a single chunk (must return a Promise)
|
||||
* @param {number} chunkSize Maximum chunk size
|
||||
* @returns {Promise<string>} Translated text
|
||||
*/
|
||||
async function chunkedTranslate(text, lang, translateFn, chunkSize = 5000) {
|
||||
if (text.length <= chunkSize) {
|
||||
return await translateFn(text, lang);
|
||||
}
|
||||
|
||||
const chunks = splitRecursive(text, chunkSize);
|
||||
|
||||
let result = '';
|
||||
for (const chunk of chunks) {
|
||||
result += await translateFn(chunk, lang);
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* Translates text using the selected translation provider
|
||||
* @param {string} text Text to translate
|
||||
@ -331,15 +354,15 @@ async function translate(text, lang) {
|
||||
case 'libre':
|
||||
return await translateProviderLibre(text, lang);
|
||||
case 'google':
|
||||
return await translateProviderGoogle(text, lang);
|
||||
return await chunkedTranslate(text, lang, translateProviderGoogle, 5000);
|
||||
case 'deepl':
|
||||
return await translateProviderDeepl(text, lang);
|
||||
case 'deeplx':
|
||||
return await translateProviderDeepLX(text, lang);
|
||||
return await chunkedTranslate(text, lang, translateProviderDeepLX, 1500);
|
||||
case 'oneringtranslator':
|
||||
return await translateProviderOneRing(text, lang);
|
||||
case 'bing':
|
||||
return await translateProviderBing(text, lang);
|
||||
return await chunkedTranslate(text, lang, translateProviderBing, 1000);
|
||||
default:
|
||||
console.error('Unknown translation provider', extension_settings.translate.provider);
|
||||
return text;
|
||||
|
@ -21,7 +21,6 @@ import {
|
||||
MAX_INJECTION_DEPTH,
|
||||
name1,
|
||||
name2,
|
||||
replaceBiasMarkup,
|
||||
replaceItemizedPromptText,
|
||||
resultCheckStatus,
|
||||
saveSettingsDebounced,
|
||||
@ -443,8 +442,6 @@ function setOpenAIMessages(chat) {
|
||||
content = `${chat[j].name}: ${content}`;
|
||||
}
|
||||
}
|
||||
content = replaceBiasMarkup(content);
|
||||
|
||||
// remove caret return (waste of tokens)
|
||||
content = content.replace(/\r/gm, '');
|
||||
|
||||
|
@ -20,7 +20,7 @@ import {
|
||||
main_api,
|
||||
name1,
|
||||
reloadCurrentChat,
|
||||
replaceBiasMarkup,
|
||||
removeMacros,
|
||||
saveChatConditional,
|
||||
sendMessageAsUser,
|
||||
sendSystemMessage,
|
||||
@ -1260,7 +1260,7 @@ export async function sendMessageAs(args, text) {
|
||||
|
||||
// Messages that do nothing but set bias will be hidden from the context
|
||||
const bias = extractMessageBias(mesText);
|
||||
const isSystem = replaceBiasMarkup(mesText).trim().length === 0;
|
||||
const isSystem = bias && !removeMacros(mesText).length;
|
||||
|
||||
const character = characters.find(x => x.name === name);
|
||||
let force_avatar, original_avatar;
|
||||
@ -1313,7 +1313,7 @@ export async function sendNarratorMessage(args, text) {
|
||||
const name = chat_metadata[NARRATOR_NAME_KEY] || NARRATOR_NAME_DEFAULT;
|
||||
// Messages that do nothing but set bias will be hidden from the context
|
||||
const bias = extractMessageBias(text);
|
||||
const isSystem = replaceBiasMarkup(text).trim().length === 0;
|
||||
const isSystem = bias && !removeMacros(text).length;
|
||||
|
||||
const message = {
|
||||
name: name,
|
||||
|
@ -637,6 +637,9 @@ hr {
|
||||
order: 2;
|
||||
padding-right: 2px;
|
||||
place-self: center;
|
||||
cursor: pointer;
|
||||
transition: 0.3s;
|
||||
opacity: 0.7;
|
||||
}
|
||||
|
||||
#options_button {
|
||||
|
@ -5,7 +5,7 @@ const Readable = require('stream').Readable;
|
||||
|
||||
const { jsonParser } = require('../../express-common');
|
||||
const { TEXTGEN_TYPES, TOGETHERAI_KEYS, OLLAMA_KEYS } = require('../../constants');
|
||||
const { forwardFetchResponse } = require('../../util');
|
||||
const { forwardFetchResponse, trimV1 } = require('../../util');
|
||||
const { setAdditionalHeaders } = require('../../additional-headers');
|
||||
|
||||
const router = express.Router();
|
||||
@ -57,6 +57,26 @@ async function parseOllamaStream(jsonStream, request, response) {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Abort KoboldCpp generation request.
|
||||
* @param {string} url Server base URL
|
||||
* @returns {Promise<void>} Promise resolving when we are done
|
||||
*/
|
||||
async function abortKoboldCppRequest(url) {
|
||||
try {
|
||||
console.log('Aborting Kobold generation...');
|
||||
const abortResponse = await fetch(`${url}/api/extra/abort`, {
|
||||
method: 'POST',
|
||||
});
|
||||
|
||||
if (!abortResponse.ok) {
|
||||
console.log('Error sending abort request to Kobold:', abortResponse.status, abortResponse.statusText);
|
||||
}
|
||||
} catch (error) {
|
||||
console.log(error);
|
||||
}
|
||||
}
|
||||
|
||||
//************** Ooba/OpenAI text completions API
|
||||
router.post('/status', jsonParser, async function (request, response) {
|
||||
if (!request.body) return response.sendStatus(400);
|
||||
@ -67,9 +87,7 @@ router.post('/status', jsonParser, async function (request, response) {
|
||||
}
|
||||
|
||||
console.log('Trying to connect to API:', request.body);
|
||||
|
||||
// Convert to string + remove trailing slash + /v1 suffix
|
||||
const baseUrl = String(request.body.api_server).replace(/\/$/, '').replace(/\/v1$/, '');
|
||||
const baseUrl = trimV1(request.body.api_server);
|
||||
|
||||
const args = {
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
@ -195,12 +213,15 @@ router.post('/generate', jsonParser, async function (request, response) {
|
||||
|
||||
const controller = new AbortController();
|
||||
request.socket.removeAllListeners('close');
|
||||
request.socket.on('close', function () {
|
||||
request.socket.on('close', async function () {
|
||||
if (request.body.api_type === TEXTGEN_TYPES.KOBOLDCPP && !response.writableEnded) {
|
||||
await abortKoboldCppRequest(trimV1(baseUrl));
|
||||
}
|
||||
|
||||
controller.abort();
|
||||
});
|
||||
|
||||
// Convert to string + remove trailing slash + /v1 suffix
|
||||
let url = String(baseUrl).replace(/\/$/, '').replace(/\/v1$/, '');
|
||||
let url = trimV1(baseUrl);
|
||||
|
||||
if (request.body.legacy_api) {
|
||||
url += '/v1/generate';
|
||||
@ -337,8 +358,7 @@ ollama.post('/caption-image', jsonParser, async function (request, response) {
|
||||
}
|
||||
|
||||
console.log('Ollama caption request:', request.body);
|
||||
// Convert to string + remove trailing slash + /v1 suffix
|
||||
const baseUrl = String(request.body.server_url).replace(/\/$/, '').replace(/\/v1$/, '');
|
||||
const baseUrl = trimV1(request.body.server_url);
|
||||
|
||||
const fetchResponse = await fetch(`${baseUrl}/api/generate`, {
|
||||
method: 'POST',
|
||||
@ -383,8 +403,7 @@ llamacpp.post('/caption-image', jsonParser, async function (request, response) {
|
||||
}
|
||||
|
||||
console.log('LlamaCpp caption request:', request.body);
|
||||
// Convert to string + remove trailing slash + /v1 suffix
|
||||
const baseUrl = String(request.body.server_url).replace(/\/$/, '').replace(/\/v1$/, '');
|
||||
const baseUrl = trimV1(request.body.server_url);
|
||||
|
||||
const fetchResponse = await fetch(`${baseUrl}/completion`, {
|
||||
method: 'POST',
|
||||
|
@ -4,6 +4,7 @@ const readline = require('readline');
|
||||
const express = require('express');
|
||||
const sanitize = require('sanitize-filename');
|
||||
const writeFileAtomicSync = require('write-file-atomic').sync;
|
||||
const yaml = require('yaml');
|
||||
const _ = require('lodash');
|
||||
|
||||
const encode = require('png-chunks-encode');
|
||||
@ -19,6 +20,7 @@ const characterCardParser = require('../character-card-parser.js');
|
||||
const { readWorldInfoFile } = require('./worldinfo');
|
||||
const { invalidateThumbnail } = require('./thumbnails');
|
||||
const { importRisuSprites } = require('./sprites');
|
||||
const defaultAvatarPath = './public/img/ai4.png';
|
||||
|
||||
let characters = {};
|
||||
|
||||
@ -394,6 +396,36 @@ function convertWorldInfoToCharacterBook(name, entries) {
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* Import a character from a YAML file.
|
||||
* @param {string} uploadPath Path to the uploaded file
|
||||
* @param {import('express').Response} response Express response object
|
||||
*/
|
||||
function importFromYaml(uploadPath, response) {
|
||||
const fileText = fs.readFileSync(uploadPath, 'utf8');
|
||||
fs.rmSync(uploadPath);
|
||||
const yamlData = yaml.parse(fileText);
|
||||
console.log('importing from yaml');
|
||||
yamlData.name = sanitize(yamlData.name);
|
||||
const fileName = getPngName(yamlData.name);
|
||||
let char = convertToV2({
|
||||
'name': yamlData.name,
|
||||
'description': yamlData.context ?? '',
|
||||
'first_mes': yamlData.greeting ?? '',
|
||||
'create_date': humanizedISO8601DateTime(),
|
||||
'chat': `${yamlData.name} - ${humanizedISO8601DateTime()}`,
|
||||
'personality': '',
|
||||
'creatorcomment': '',
|
||||
'avatar': 'none',
|
||||
'mes_example': '',
|
||||
'scenario': '',
|
||||
'talkativeness': 0.5,
|
||||
'creator': '',
|
||||
'tags': '',
|
||||
});
|
||||
charaWrite(defaultAvatarPath, JSON.stringify(char), fileName, response, { file_name: fileName });
|
||||
}
|
||||
|
||||
const router = express.Router();
|
||||
|
||||
router.post('/create', urlencodedParser, async function (request, response) {
|
||||
@ -760,17 +792,21 @@ function getPngName(file) {
|
||||
}
|
||||
|
||||
router.post('/import', urlencodedParser, async function (request, response) {
|
||||
|
||||
if (!request.body || request.file === undefined) return response.sendStatus(400);
|
||||
if (!request.body || !request.file) return response.sendStatus(400);
|
||||
|
||||
let png_name = '';
|
||||
let filedata = request.file;
|
||||
let uploadPath = path.join(UPLOADS_PATH, filedata.filename);
|
||||
var format = request.body.file_type;
|
||||
const defaultAvatarPath = './public/img/ai4.png';
|
||||
//console.log(format);
|
||||
if (filedata) {
|
||||
if (format == 'json') {
|
||||
let format = request.body.file_type;
|
||||
|
||||
if (format == 'yaml' || format == 'yml') {
|
||||
try {
|
||||
importFromYaml(uploadPath, response);
|
||||
} catch (err) {
|
||||
console.log(err);
|
||||
response.send({ error: true });
|
||||
}
|
||||
} else if (format == 'json') {
|
||||
fs.readFile(uploadPath, 'utf8', async (err, data) => {
|
||||
fs.unlinkSync(uploadPath);
|
||||
|
||||
@ -899,7 +935,6 @@ router.post('/import', urlencodedParser, async function (request, response) {
|
||||
response.send({ error: true });
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
router.post('/duplicate', jsonParser, async function (request, response) {
|
||||
|
@ -36,20 +36,12 @@ function sanitizeHordeImagePrompt(prompt) {
|
||||
prompt = prompt.replace(/\b(boy)\b/gmi, 'man');
|
||||
prompt = prompt.replace(/\b(girls)\b/gmi, 'women');
|
||||
prompt = prompt.replace(/\b(boys)\b/gmi, 'men');
|
||||
|
||||
//always remove these high risk words from prompt, as they add little value to image gen while increasing the risk the prompt gets flagged
|
||||
prompt = prompt.replace(/\b(under.age|under.aged|underage|underaged|loli|pedo|pedophile|(\w+).year.old|(\w+).years.old|minor|prepubescent|minors|shota)\b/gmi, '');
|
||||
|
||||
//if nsfw is detected, do not remove it but apply additional precautions
|
||||
let isNsfw = prompt.match(/\b(cock|ahegao|hentai|uncensored|lewd|cocks|deepthroat|deepthroating|dick|dicks|cumshot|lesbian|fuck|fucked|fucking|sperm|naked|nipples|tits|boobs|breasts|boob|breast|topless|ass|butt|fingering|masturbate|masturbating|bitch|blowjob|pussy|piss|asshole|dildo|dildos|vibrator|erection|foreskin|handjob|nude|penis|porn|vibrator|virgin|vagina|vulva|threesome|orgy|bdsm|hickey|condom|testicles|anal|bareback|bukkake|creampie|stripper|strap-on|missionary|clitoris|clit|clitty|cowgirl|fleshlight|sex|buttplug|milf|oral|sucking|bondage|orgasm|scissoring|railed|slut|sluts|slutty|cumming|cunt|faggot|sissy|anal|anus|cum|semen|scat|nsfw|xxx|explicit|erotic|horny|aroused|jizz|moan|rape|raped|raping|throbbing|humping)\b/gmi);
|
||||
|
||||
if (isNsfw) {
|
||||
//replace risky subject nouns with person
|
||||
prompt = prompt.replace(/\b(youngster|infant|baby|toddler|child|teen|kid|kiddie|kiddo|teenager|student|preteen|pre.teen)\b/gmi, 'person');
|
||||
|
||||
//remove risky adjectives and related words
|
||||
prompt = prompt.replace(/\b(young|younger|youthful|youth|small|smaller|smallest|girly|boyish|lil|tiny|teenaged|lit[tl]le|school.aged|school|highschool|kindergarten|teens|children|kids)\b/gmi, '');
|
||||
}
|
||||
|
||||
return prompt;
|
||||
}
|
||||
|
10
src/util.js
10
src/util.js
@ -458,6 +458,15 @@ function excludeKeysByYaml(obj, yamlString) {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Removes trailing slash and /v1 from a string.
|
||||
* @param {string} str Input string
|
||||
* @returns {string} Trimmed string
|
||||
*/
|
||||
function trimV1(str) {
|
||||
return String(str ?? '').replace(/\/$/, '').replace(/\/v1$/, '');
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
getConfig,
|
||||
getConfigValue,
|
||||
@ -481,4 +490,5 @@ module.exports = {
|
||||
getHexString,
|
||||
mergeObjectWithYaml,
|
||||
excludeKeysByYaml,
|
||||
trimV1,
|
||||
};
|
||||
|
Reference in New Issue
Block a user