Merge remote-tracking branch 'upstream/staging' into staging

This commit is contained in:
DonMoralez
2023-12-22 17:12:59 +02:00
12 changed files with 911 additions and 874 deletions

View File

@ -121,7 +121,7 @@
} }
/* Add the custom checkbox */ /* Add the custom checkbox */
.select2-results__option:before { .select2-results__option::before {
content: ''; content: '';
display: inline-block; display: inline-block;
position: absolute; position: absolute;
@ -141,7 +141,7 @@
} }
/* Add the custom checkbox checkmark */ /* Add the custom checkbox checkmark */
.select2-results__option--selected.select2-results__option:before { .select2-results__option--selected.select2-results__option::before {
content: '\2713'; content: '\2713';
font-weight: bold; font-weight: bold;
color: var(--SmartThemeBodyColor); color: var(--SmartThemeBodyColor);
@ -149,3 +149,11 @@
text-align: center; text-align: center;
line-height: 14px; line-height: 14px;
} }
.select2-results__option.select2-results__message {
background-color: inherit;
}
.select2-results__option.select2-results__message::before {
display: none;
}

View File

@ -3847,7 +3847,7 @@
</div> </div>
<div id="rm_character_import" class="right_menu" style="display: none;"> <div id="rm_character_import" class="right_menu" style="display: none;">
<form id="form_import" action="javascript:void(null);" method="post" enctype="multipart/form-data"> <form id="form_import" action="javascript:void(null);" method="post" enctype="multipart/form-data">
<input multiple type="file" id="character_import_file" accept=".json, image/png" name="avatar"> <input multiple type="file" id="character_import_file" accept=".json, image/png, .yaml, .yml" name="avatar">
<input id="character_import_file_type" name="file_type" class="text_pole" maxlength="999" size="2" value="" autocomplete="off"> <input id="character_import_file_type" name="file_type" class="text_pole" maxlength="999" size="2" value="" autocomplete="off">
</form> </form>
</div> </div>

View File

@ -145,7 +145,7 @@ import {
Stopwatch, Stopwatch,
} from './scripts/utils.js'; } from './scripts/utils.js';
import { ModuleWorkerWrapper, doDailyExtensionUpdatesCheck, extension_settings, getContext, loadExtensionSettings, processExtensionHelpers, registerExtensionHelper, renderExtensionTemplate, runGenerationInterceptors, saveMetadataDebounced } from './scripts/extensions.js'; import { ModuleWorkerWrapper, doDailyExtensionUpdatesCheck, extension_settings, getContext, loadExtensionSettings, renderExtensionTemplate, runGenerationInterceptors, saveMetadataDebounced } from './scripts/extensions.js';
import { COMMENT_NAME_DEFAULT, executeSlashCommands, getSlashCommandsHelp, processChatSlashCommands, registerSlashCommand } from './scripts/slash-commands.js'; import { COMMENT_NAME_DEFAULT, executeSlashCommands, getSlashCommandsHelp, processChatSlashCommands, registerSlashCommand } from './scripts/slash-commands.js';
import { import {
tag_map, tag_map,
@ -338,8 +338,6 @@ export const event_types = {
export const eventSource = new EventEmitter(); export const eventSource = new EventEmitter();
eventSource.on(event_types.MESSAGE_RECEIVED, processExtensionHelpers);
eventSource.on(event_types.MESSAGE_SENT, processExtensionHelpers);
eventSource.on(event_types.CHAT_CHANGED, processChatSlashCommands); eventSource.on(event_types.CHAT_CHANGED, processChatSlashCommands);
const characterGroupOverlay = new BulkEditOverlay(); const characterGroupOverlay = new BulkEditOverlay();
@ -701,8 +699,6 @@ export let user_avatar = 'you.png';
export var amount_gen = 80; //default max length of AI generated responses export var amount_gen = 80; //default max length of AI generated responses
var max_context = 2048; var max_context = 2048;
var message_already_generated = '';
var swipes = true; var swipes = true;
let extension_prompts = {}; let extension_prompts = {};
@ -1478,13 +1474,11 @@ export async function reloadCurrentChat() {
} }
function messageFormatting(mes, ch_name, isSystem, isUser) { function messageFormatting(mes, ch_name, isSystem, isUser) {
if (mes) { if (!mes) {
mesForShowdownParse = mes; return '';
} }
if (!mes) { mesForShowdownParse = mes;
mes = '';
}
// Force isSystem = false on comment messages so they get formatted properly // Force isSystem = false on comment messages so they get formatted properly
if (ch_name === COMMENT_NAME_DEFAULT && isSystem && !isUser) { if (ch_name === COMMENT_NAME_DEFAULT && isSystem && !isUser) {
@ -1544,7 +1538,6 @@ function messageFormatting(mes, ch_name, isSystem, isUser) {
mes = mes.replaceAll('\\begin{align*}', '$$'); mes = mes.replaceAll('\\begin{align*}', '$$');
mes = mes.replaceAll('\\end{align*}', '$$'); mes = mes.replaceAll('\\end{align*}', '$$');
mes = converter.makeHtml(mes); mes = converter.makeHtml(mes);
mes = replaceBiasMarkup(mes);
mes = mes.replace(/<code(.*)>[\s\S]*?<\/code>/g, function (match) { mes = mes.replace(/<code(.*)>[\s\S]*?<\/code>/g, function (match) {
// Firefox creates extra newlines from <br>s in code blocks, so we replace them before converting newlines to <br>s. // Firefox creates extra newlines from <br>s in code blocks, so we replace them before converting newlines to <br>s.
@ -2600,6 +2593,21 @@ function hideStopButton() {
} }
class StreamingProcessor { class StreamingProcessor {
constructor(type, force_name2, timeStarted, messageAlreadyGenerated) {
this.result = '';
this.messageId = -1;
this.type = type;
this.force_name2 = force_name2;
this.isStopped = false;
this.isFinished = false;
this.generator = this.nullStreamingGeneration;
this.abortController = new AbortController();
this.firstMessageText = '...';
this.timeStarted = timeStarted;
this.messageAlreadyGenerated = messageAlreadyGenerated;
this.swipes = [];
}
showMessageButtons(messageId) { showMessageButtons(messageId) {
if (messageId == -1) { if (messageId == -1) {
return; return;
@ -2635,32 +2643,16 @@ class StreamingProcessor {
return messageId; return messageId;
} }
removePrefix(text) {
const name1Marker = `${name1}: `;
const name2Marker = `${name2}: `;
if (text) {
if (text.startsWith(name1Marker)) {
text = text.replace(name1Marker, '');
}
if (text.startsWith(name2Marker)) {
text = text.replace(name2Marker, '');
}
}
return text;
}
onProgressStreaming(messageId, text, isFinal) { onProgressStreaming(messageId, text, isFinal) {
const isImpersonate = this.type == 'impersonate'; const isImpersonate = this.type == 'impersonate';
const isContinue = this.type == 'continue'; const isContinue = this.type == 'continue';
if (!isImpersonate && !isContinue && Array.isArray(this.swipes) && this.swipes.length > 0) { if (!isImpersonate && !isContinue && Array.isArray(this.swipes) && this.swipes.length > 0) {
for (let i = 0; i < this.swipes.length; i++) { for (let i = 0; i < this.swipes.length; i++) {
this.swipes[i] = cleanUpMessage(this.removePrefix(this.swipes[i]), false, false, true, this.stoppingStrings); this.swipes[i] = cleanUpMessage(this.swipes[i], false, false, true, this.stoppingStrings);
} }
} }
text = this.removePrefix(text);
let processedText = cleanUpMessage(text, isImpersonate, isContinue, !isFinal, this.stoppingStrings); let processedText = cleanUpMessage(text, isImpersonate, isContinue, !isFinal, this.stoppingStrings);
// Predict unbalanced asterisks / quotes during streaming // Predict unbalanced asterisks / quotes during streaming
@ -2786,6 +2778,9 @@ class StreamingProcessor {
} }
onErrorStreaming() { onErrorStreaming() {
this.abortController.abort();
this.isStopped = true;
this.hideMessageButtons(this.messageId); this.hideMessageButtons(this.messageId);
$('#send_textarea').removeAttr('disabled'); $('#send_textarea').removeAttr('disabled');
is_send_press = false; is_send_press = false;
@ -2811,20 +2806,6 @@ class StreamingProcessor {
throw new Error('Generation function for streaming is not hooked up'); throw new Error('Generation function for streaming is not hooked up');
} }
constructor(type, force_name2, timeStarted) {
this.result = '';
this.messageId = -1;
this.type = type;
this.force_name2 = force_name2;
this.isStopped = false;
this.isFinished = false;
this.generator = this.nullStreamingGeneration;
this.abortController = new AbortController();
this.firstMessageText = '...';
this.timeStarted = timeStarted;
this.swipes = [];
}
async generate() { async generate() {
if (this.messageId == -1) { if (this.messageId == -1) {
this.messageId = await this.onStartStreaming(this.firstMessageText); this.messageId = await this.onStartStreaming(this.firstMessageText);
@ -2844,13 +2825,12 @@ class StreamingProcessor {
for await (const { text, swipes } of this.generator()) { for await (const { text, swipes } of this.generator()) {
timestamps.push(Date.now()); timestamps.push(Date.now());
if (this.isStopped) { if (this.isStopped) {
this.onStopStreaming();
return; return;
} }
this.result = text; this.result = text;
this.swipes = swipes; this.swipes = swipes;
await sw.tick(() => this.onProgressStreaming(this.messageId, message_already_generated + text)); await sw.tick(() => this.onProgressStreaming(this.messageId, this.messageAlreadyGenerated + text));
} }
const seconds = (timestamps[timestamps.length - 1] - timestamps[0]) / 1000; const seconds = (timestamps[timestamps.length - 1] - timestamps[0]) / 1000;
console.warn(`Stream stats: ${timestamps.length} tokens, ${seconds.toFixed(2)} seconds, rate: ${Number(timestamps.length / seconds).toFixed(2)} TPS`); console.warn(`Stream stats: ${timestamps.length} tokens, ${seconds.toFixed(2)} seconds, rate: ${Number(timestamps.length / seconds).toFixed(2)} TPS`);
@ -2858,7 +2838,6 @@ class StreamingProcessor {
catch (err) { catch (err) {
console.error(err); console.error(err);
this.onErrorStreaming(); this.onErrorStreaming();
this.isStopped = true;
return; return;
} }
@ -2965,7 +2944,7 @@ async function Generate(type, { automatic_trigger, force_name2, quiet_prompt, qu
const isInstruct = power_user.instruct.enabled && main_api !== 'openai'; const isInstruct = power_user.instruct.enabled && main_api !== 'openai';
const isImpersonate = type == 'impersonate'; const isImpersonate = type == 'impersonate';
message_already_generated = isImpersonate ? `${name1}: ` : `${name2}: `; let message_already_generated = isImpersonate ? `${name1}: ` : `${name2}: `;
const interruptedByCommand = await processCommands($('#send_textarea').val(), type, dryRun); const interruptedByCommand = await processCommands($('#send_textarea').val(), type, dryRun);
@ -3088,7 +3067,7 @@ async function Generate(type, { automatic_trigger, force_name2, quiet_prompt, qu
//for normal messages sent from user.. //for normal messages sent from user..
if ((textareaText != '' || hasPendingFileAttachment()) && !automatic_trigger && type !== 'quiet' && !dryRun) { if ((textareaText != '' || hasPendingFileAttachment()) && !automatic_trigger && type !== 'quiet' && !dryRun) {
// If user message contains no text other than bias - send as a system message // If user message contains no text other than bias - send as a system message
if (messageBias && replaceBiasMarkup(textareaText).trim().length === 0) { if (messageBias && !removeMacros(textareaText)) {
sendSystemMessage(system_message_types.GENERIC, ' ', { bias: messageBias }); sendSystemMessage(system_message_types.GENERIC, ' ', { bias: messageBias });
} }
else { else {
@ -3378,10 +3357,6 @@ async function Generate(type, { automatic_trigger, force_name2, quiet_prompt, qu
let mesSend = []; let mesSend = [];
console.debug('calling runGenerate'); console.debug('calling runGenerate');
if (!dryRun) {
streamingProcessor = isStreamingEnabled() && type !== 'quiet' ? new StreamingProcessor(type, force_name2, generation_started) : false;
}
if (isContinue) { if (isContinue) {
// Coping mechanism for OAI spacing // Coping mechanism for OAI spacing
const isForceInstruct = isOpenRouterWithInstruct(); const isForceInstruct = isOpenRouterWithInstruct();
@ -3389,21 +3364,16 @@ async function Generate(type, { automatic_trigger, force_name2, quiet_prompt, qu
cyclePrompt += ' '; cyclePrompt += ' ';
continue_mag += ' '; continue_mag += ' ';
} }
// Save reply does add cycle text to the prompt, so it's not needed here
streamingProcessor && (streamingProcessor.firstMessageText = '');
message_already_generated = continue_mag; message_already_generated = continue_mag;
} }
const originalType = type; const originalType = type;
return runGenerate(cyclePrompt);
async function runGenerate(cycleGenerationPrompt = '') {
if (!dryRun) { if (!dryRun) {
is_send_press = true; is_send_press = true;
} }
generatedPromptCache += cycleGenerationPrompt; generatedPromptCache += cyclePrompt;
if (generatedPromptCache.length == 0 || type === 'continue') { if (generatedPromptCache.length == 0 || type === 'continue') {
console.debug('generating prompt'); console.debug('generating prompt');
chatString = ''; chatString = '';
@ -3771,14 +3741,13 @@ async function Generate(type, { automatic_trigger, force_name2, quiet_prompt, qu
} }
} }
return new Promise(async (resolve, reject) => { async function finishGenerating() {
if (true === dryRun) return onSuccess({ error: 'dryRun' }); if (dryRun) return { error: 'dryRun' };
if (power_user.console_log_prompts) { if (power_user.console_log_prompts) {
console.log(generate_data.prompt); console.log(generate_data.prompt);
} }
let generate_url = getGenerateUrl(main_api);
console.debug('rungenerate calling API'); console.debug('rungenerate calling API');
showStopButton(); showStopButton();
@ -3825,55 +3794,16 @@ async function Generate(type, { automatic_trigger, force_name2, quiet_prompt, qu
} }
console.debug(`pushed prompt bits to itemizedPrompts array. Length is now: ${itemizedPrompts.length}`); console.debug(`pushed prompt bits to itemizedPrompts array. Length is now: ${itemizedPrompts.length}`);
/** @type {Promise<any>} */
let streamingGeneratorPromise = Promise.resolve();
if (main_api == 'openai') {
if (isStreamingEnabled() && type !== 'quiet') {
streamingGeneratorPromise = sendOpenAIRequest(type, generate_data.prompt, streamingProcessor.abortController.signal);
}
else {
sendOpenAIRequest(type, generate_data.prompt, abortController.signal).then(onSuccess).catch(onError);
}
}
else if (main_api == 'koboldhorde') {
generateHorde(finalPrompt, generate_data, abortController.signal, true).then(onSuccess).catch(onError);
}
else if (main_api == 'textgenerationwebui' && isStreamingEnabled() && type !== 'quiet') {
streamingGeneratorPromise = generateTextGenWithStreaming(generate_data, streamingProcessor.abortController.signal);
}
else if (main_api == 'novel' && isStreamingEnabled() && type !== 'quiet') {
streamingGeneratorPromise = generateNovelWithStreaming(generate_data, streamingProcessor.abortController.signal);
}
else if (main_api == 'kobold' && isStreamingEnabled() && type !== 'quiet') {
streamingGeneratorPromise = generateKoboldWithStreaming(generate_data, streamingProcessor.abortController.signal);
}
else {
try {
const response = await fetch(generate_url, {
method: 'POST',
headers: getRequestHeaders(),
cache: 'no-cache',
body: JSON.stringify(generate_data),
signal: abortController.signal,
});
if (!response.ok) {
const error = await response.json();
throw error;
}
const data = await response.json();
onSuccess(data);
} catch (error) {
onError(error);
}
}
if (isStreamingEnabled() && type !== 'quiet') { if (isStreamingEnabled() && type !== 'quiet') {
try { streamingProcessor = new StreamingProcessor(type, force_name2, generation_started, message_already_generated);
const streamingGenerator = await streamingGeneratorPromise; if (isContinue) {
streamingProcessor.generator = streamingGenerator; // Save reply does add cycle text to the prompt, so it's not needed here
streamingProcessor.firstMessageText = '';
}
streamingProcessor.generator = await sendStreamingRequest(type, generate_data);
hideSwipeButtons(); hideSwipeButtons();
let getMessage = await streamingProcessor.generate(); let getMessage = await streamingProcessor.generate();
let messageChunk = cleanUpMessage(getMessage, isImpersonate, isContinue, false); let messageChunk = cleanUpMessage(getMessage, isImpersonate, isContinue, false);
@ -3887,19 +3817,19 @@ async function Generate(type, { automatic_trigger, force_name2, quiet_prompt, qu
streamingProcessor = null; streamingProcessor = null;
triggerAutoContinue(messageChunk, isImpersonate); triggerAutoContinue(messageChunk, isImpersonate);
} }
resolve(); } else {
} catch (err) { return await sendGenerationRequest(type, generate_data);
onError(err); }
} }
} return finishGenerating().then(onSuccess, onError);
async function onSuccess(data) { async function onSuccess(data) {
if (!data) return;
let messageChunk = ''; let messageChunk = '';
if (data.error == 'dryRun') { if (data.error == 'dryRun') {
generatedPromptCache = ''; generatedPromptCache = '';
resolve();
return; return;
} }
@ -3928,7 +3858,8 @@ async function Generate(type, { automatic_trigger, force_name2, quiet_prompt, qu
await eventSource.emit(event_types.IMPERSONATE_READY, getMessage); await eventSource.emit(event_types.IMPERSONATE_READY, getMessage);
} }
else if (type == 'quiet') { else if (type == 'quiet') {
resolve(getMessage); unblockGeneration();
return getMessage;
} }
else { else {
// Without streaming we'll be having a full message on continuation. Treat it as a last chunk. // Without streaming we'll be having a full message on continuation. Treat it as a last chunk.
@ -3948,21 +3879,18 @@ async function Generate(type, { automatic_trigger, force_name2, quiet_prompt, qu
maxLoops ??= MAX_GENERATION_LOOPS; maxLoops ??= MAX_GENERATION_LOOPS;
if (maxLoops === 0) { if (maxLoops === 0) {
reject(new Error('Generate circuit breaker interruption'));
if (type !== 'quiet') { if (type !== 'quiet') {
throwCircuitBreakerError(); throwCircuitBreakerError();
} }
return; throw new Error('Generate circuit breaker interruption');
} }
// regenerate with character speech reenforced // regenerate with character speech reenforced
// to make sure we leave on swipe type while also adding the name2 appendage // to make sure we leave on swipe type while also adding the name2 appendage
delay(1000).then(async () => { await delay(1000);
// The first await is for waiting for the generate to start. The second one is waiting for it to finish // The first await is for waiting for the generate to start. The second one is waiting for it to finish
const result = await await Generate(type, { automatic_trigger, force_name2: true, quiet_prompt, skipWIAN, force_chid, maxLoops: maxLoops - 1 }); const result = await await Generate(type, { automatic_trigger, force_name2: true, quiet_prompt, skipWIAN, force_chid, maxLoops: maxLoops - 1 });
resolve(result); return result;
});
return;
} }
if (power_user.auto_swipe) { if (power_user.auto_swipe) {
@ -3989,7 +3917,6 @@ async function Generate(type, { automatic_trigger, force_name2, quiet_prompt, qu
is_send_press = false; is_send_press = false;
swipe_right(); swipe_right();
// TODO: do we want to resolve after an auto-swipe? // TODO: do we want to resolve after an auto-swipe?
resolve();
return; return;
} }
} }
@ -3999,7 +3926,7 @@ async function Generate(type, { automatic_trigger, force_name2, quiet_prompt, qu
if (data?.response) { if (data?.response) {
toastr.error(data.response, 'API Error'); toastr.error(data.response, 'API Error');
} }
reject(data.response); throw data?.response;
} }
console.debug('/api/chats/save called by /Generate'); console.debug('/api/chats/save called by /Generate');
@ -4010,7 +3937,6 @@ async function Generate(type, { automatic_trigger, force_name2, quiet_prompt, qu
if (type !== 'quiet') { if (type !== 'quiet') {
triggerAutoContinue(messageChunk, isImpersonate); triggerAutoContinue(messageChunk, isImpersonate);
} }
resolve();
} }
function onError(exception) { function onError(exception) {
@ -4018,23 +3944,18 @@ async function Generate(type, { automatic_trigger, force_name2, quiet_prompt, qu
toastr.error(exception.error.message, 'Error', { timeOut: 10000, extendedTimeOut: 20000 }); toastr.error(exception.error.message, 'Error', { timeOut: 10000, extendedTimeOut: 20000 });
} }
reject(exception);
unblockGeneration(); unblockGeneration();
console.log(exception); console.log(exception);
streamingProcessor = null; streamingProcessor = null;
throw exception;
} }
});
} //rungenerate ends
} else { //generate's primary loop ends, after this is error handling for no-connection or safety-id } else { //generate's primary loop ends, after this is error handling for no-connection or safety-id
if (this_chid === undefined || this_chid === 'invalid-safety-id') { if (this_chid === undefined || this_chid === 'invalid-safety-id') {
toastr.warning('Сharacter is not selected'); toastr.warning('Сharacter is not selected');
} }
is_send_press = false; is_send_press = false;
} }
}
//console.log('generate ending');
} //generate ends
function flushWIDepthInjections() { function flushWIDepthInjections() {
//prevent custom depth WI entries (which have unique random key names) from duplicating //prevent custom depth WI entries (which have unique random key names) from duplicating
@ -4165,13 +4086,16 @@ function formatMessageHistoryItem(chatItem, isInstruct, forceOutputSequence) {
textResult = formatInstructModeChat(itemName, chatItem.mes, chatItem.is_user, isNarratorType, chatItem.force_avatar, name1, name2, forceOutputSequence); textResult = formatInstructModeChat(itemName, chatItem.mes, chatItem.is_user, isNarratorType, chatItem.force_avatar, name1, name2, forceOutputSequence);
} }
textResult = replaceBiasMarkup(textResult);
return textResult; return textResult;
} }
export function replaceBiasMarkup(str) { /**
return (str ?? '').replace(/\{\{[\s\S]*?\}\}/gm, ''); * Removes all {{macros}} from a string.
* @param {string} str String to remove macros from.
* @returns {string} String with macros removed.
*/
export function removeMacros(str) {
return (str ?? '').replace(/\{\{[\s\S]*?\}\}/gm, '').trim();
} }
/** /**
@ -4481,22 +4405,82 @@ function setInContextMessages(lastmsg, type) {
} }
} }
function getGenerateUrl(api) { /**
let generate_url = ''; * Sends a non-streaming request to the API.
if (api == 'kobold') { * @param {string} type Generation type
generate_url = '/api/backends/kobold/generate'; * @param {object} data Generation data
} else if (api == 'textgenerationwebui') { * @returns {Promise<object>} Response data from the API
generate_url = '/api/backends/text-completions/generate'; */
} else if (api == 'novel') { async function sendGenerationRequest(type, data) {
generate_url = '/api/novelai/generate'; if (main_api === 'openai') {
return await sendOpenAIRequest(type, data.prompt, abortController.signal);
}
if (main_api === 'koboldhorde') {
return await generateHorde(data.prompt, data, abortController.signal, true);
}
const response = await fetch(getGenerateUrl(main_api), {
method: 'POST',
headers: getRequestHeaders(),
cache: 'no-cache',
body: JSON.stringify(data),
signal: abortController.signal,
});
if (!response.ok) {
const error = await response.json();
throw error;
}
const responseData = await response.json();
return responseData;
}
/**
* Sends a streaming request to the API.
* @param {string} type Generation type
* @param {object} data Generation data
* @returns {Promise<any>} Streaming generator
*/
async function sendStreamingRequest(type, data) {
switch (main_api) {
case 'openai':
return await sendOpenAIRequest(type, data.prompt, streamingProcessor.abortController.signal);
case 'textgenerationwebui':
return await generateTextGenWithStreaming(data, streamingProcessor.abortController.signal);
case 'novel':
return await generateNovelWithStreaming(data, streamingProcessor.abortController.signal);
case 'kobold':
return await generateKoboldWithStreaming(data, streamingProcessor.abortController.signal);
default:
throw new Error('Streaming is enabled, but the current API does not support streaming.');
}
}
/**
* Gets the generation endpoint URL for the specified API.
* @param {string} api API name
* @returns {string} Generation URL
*/
function getGenerateUrl(api) {
switch (api) {
case 'kobold':
return '/api/backends/kobold/generate';
case 'koboldhorde':
return '/api/backends/koboldhorde/generate';
case 'textgenerationwebui':
return '/api/backends/text-completions/generate';
case 'novel':
return '/api/novelai/generate';
default:
throw new Error(`Unknown API: ${api}`);
} }
return generate_url;
} }
function throwCircuitBreakerError() { function throwCircuitBreakerError() {
callPopup(`Could not extract reply in ${MAX_GENERATION_LOOPS} attempts. Try generating again`, 'text'); callPopup(`Could not extract reply in ${MAX_GENERATION_LOOPS} attempts. Try generating again`, 'text');
unblockGeneration(); unblockGeneration();
throw new Error('Generate circuit breaker interruption');
} }
function extractTitleFromData(data) { function extractTitleFromData(data) {
@ -7178,7 +7162,10 @@ window['SillyTavern'].getContext = function () {
saveReply, saveReply,
registerSlashCommand: registerSlashCommand, registerSlashCommand: registerSlashCommand,
executeSlashCommands: executeSlashCommands, executeSlashCommands: executeSlashCommands,
registerHelper: registerExtensionHelper, /**
* @deprecated Handlebars for extensions are no longer supported.
*/
registerHelper: () => { },
registedDebugFunction: registerDebugFunction, registedDebugFunction: registerDebugFunction,
renderExtensionTemplate: renderExtensionTemplate, renderExtensionTemplate: renderExtensionTemplate,
callPopup: callPopup, callPopup: callPopup,
@ -7198,7 +7185,7 @@ function swipe_left() { // when we swipe left..but no generation.
} }
if (isStreamingEnabled() && streamingProcessor) { if (isStreamingEnabled() && streamingProcessor) {
streamingProcessor.isStopped = true; streamingProcessor.onStopStreaming();
} }
const swipe_duration = 120; const swipe_duration = 120;
@ -7662,6 +7649,10 @@ export async function processDroppedFiles(files) {
const allowedMimeTypes = [ const allowedMimeTypes = [
'application/json', 'application/json',
'image/png', 'image/png',
'application/yaml',
'application/x-yaml',
'text/yaml',
'text/x-yaml',
]; ];
for (const file of files) { for (const file of files) {
@ -7675,10 +7666,7 @@ export async function processDroppedFiles(files) {
async function importCharacter(file) { async function importCharacter(file) {
const ext = file.name.match(/\.(\w+)$/); const ext = file.name.match(/\.(\w+)$/);
if ( if (!ext || !(['json', 'png', 'yaml', 'yml'].includes(ext[1].toLowerCase()))) {
!ext ||
(ext[1].toLowerCase() != 'json' && ext[1].toLowerCase() != 'png')
) {
return; return;
} }
@ -9327,8 +9315,6 @@ jQuery(async function () {
$(document).on('click', '.mes_stop', function () { $(document).on('click', '.mes_stop', function () {
if (streamingProcessor) { if (streamingProcessor) {
streamingProcessor.abortController.abort();
streamingProcessor.isStopped = true;
streamingProcessor.onStopStreaming(); streamingProcessor.onStopStreaming();
streamingProcessor = null; streamingProcessor = null;
} }
@ -9583,7 +9569,7 @@ jQuery(async function () {
cancelTtsPlay(); cancelTtsPlay();
if (streamingProcessor) { if (streamingProcessor) {
console.log('Page reloaded. Aborting streaming...'); console.log('Page reloaded. Aborting streaming...');
streamingProcessor.abortController.abort(); streamingProcessor.onStopStreaming();
} }
}); });

View File

@ -47,8 +47,6 @@ export function saveMetadataDebounced() {
}, 1000); }, 1000);
} }
export const extensionsHandlebars = Handlebars.create();
/** /**
* Provides an ability for extensions to render HTML templates. * Provides an ability for extensions to render HTML templates.
* Templates sanitation and localization is forced. * Templates sanitation and localization is forced.
@ -61,40 +59,6 @@ export function renderExtensionTemplate(extensionName, templateId, templateData
return renderTemplate(`scripts/extensions/${extensionName}/${templateId}.html`, templateData, sanitize, localize, true); return renderTemplate(`scripts/extensions/${extensionName}/${templateId}.html`, templateData, sanitize, localize, true);
} }
/**
* Registers a Handlebars helper for use in extensions.
* @param {string} name Handlebars helper name
* @param {function} helper Handlebars helper function
*/
export function registerExtensionHelper(name, helper) {
extensionsHandlebars.registerHelper(name, helper);
}
/**
* Applies handlebars extension helpers to a message.
* @param {number} messageId Message index in the chat.
*/
export function processExtensionHelpers(messageId) {
const context = getContext();
const message = context.chat[messageId];
if (!message?.mes || typeof message.mes !== 'string') {
return;
}
// Don't waste time if there are no mustaches
if (!substituteParams(message.mes).includes('{{')) {
return;
}
try {
const template = extensionsHandlebars.compile(substituteParams(message.mes), { noEscape: true });
message.mes = template({});
} catch {
// Ignore
}
}
// Disables parallel updates // Disables parallel updates
class ModuleWorkerWrapper { class ModuleWorkerWrapper {
constructor(callback) { constructor(callback) {

View File

@ -12,6 +12,7 @@ import {
} from '../../../script.js'; } from '../../../script.js';
import { extension_settings, getContext } from '../../extensions.js'; import { extension_settings, getContext } from '../../extensions.js';
import { secret_state, writeSecret } from '../../secrets.js'; import { secret_state, writeSecret } from '../../secrets.js';
import { splitRecursive } from '../../utils.js';
export const autoModeOptions = { export const autoModeOptions = {
NONE: 'none', NONE: 'none',
@ -315,6 +316,28 @@ async function translateProviderBing(text, lang) {
throw new Error(response.statusText); throw new Error(response.statusText);
} }
/**
* Splits text into chunks and translates each chunk separately
* @param {string} text Text to translate
* @param {string} lang Target language code
* @param {(text: string, lang: string) => Promise<string>} translateFn Function to translate a single chunk (must return a Promise)
* @param {number} chunkSize Maximum chunk size
* @returns {Promise<string>} Translated text
*/
async function chunkedTranslate(text, lang, translateFn, chunkSize = 5000) {
if (text.length <= chunkSize) {
return await translateFn(text, lang);
}
const chunks = splitRecursive(text, chunkSize);
let result = '';
for (const chunk of chunks) {
result += await translateFn(chunk, lang);
}
return result;
}
/** /**
* Translates text using the selected translation provider * Translates text using the selected translation provider
* @param {string} text Text to translate * @param {string} text Text to translate
@ -331,15 +354,15 @@ async function translate(text, lang) {
case 'libre': case 'libre':
return await translateProviderLibre(text, lang); return await translateProviderLibre(text, lang);
case 'google': case 'google':
return await translateProviderGoogle(text, lang); return await chunkedTranslate(text, lang, translateProviderGoogle, 5000);
case 'deepl': case 'deepl':
return await translateProviderDeepl(text, lang); return await translateProviderDeepl(text, lang);
case 'deeplx': case 'deeplx':
return await translateProviderDeepLX(text, lang); return await chunkedTranslate(text, lang, translateProviderDeepLX, 1500);
case 'oneringtranslator': case 'oneringtranslator':
return await translateProviderOneRing(text, lang); return await translateProviderOneRing(text, lang);
case 'bing': case 'bing':
return await translateProviderBing(text, lang); return await chunkedTranslate(text, lang, translateProviderBing, 1000);
default: default:
console.error('Unknown translation provider', extension_settings.translate.provider); console.error('Unknown translation provider', extension_settings.translate.provider);
return text; return text;

View File

@ -21,7 +21,6 @@ import {
MAX_INJECTION_DEPTH, MAX_INJECTION_DEPTH,
name1, name1,
name2, name2,
replaceBiasMarkup,
replaceItemizedPromptText, replaceItemizedPromptText,
resultCheckStatus, resultCheckStatus,
saveSettingsDebounced, saveSettingsDebounced,
@ -443,8 +442,6 @@ function setOpenAIMessages(chat) {
content = `${chat[j].name}: ${content}`; content = `${chat[j].name}: ${content}`;
} }
} }
content = replaceBiasMarkup(content);
// remove caret return (waste of tokens) // remove caret return (waste of tokens)
content = content.replace(/\r/gm, ''); content = content.replace(/\r/gm, '');

View File

@ -20,7 +20,7 @@ import {
main_api, main_api,
name1, name1,
reloadCurrentChat, reloadCurrentChat,
replaceBiasMarkup, removeMacros,
saveChatConditional, saveChatConditional,
sendMessageAsUser, sendMessageAsUser,
sendSystemMessage, sendSystemMessage,
@ -1260,7 +1260,7 @@ export async function sendMessageAs(args, text) {
// Messages that do nothing but set bias will be hidden from the context // Messages that do nothing but set bias will be hidden from the context
const bias = extractMessageBias(mesText); const bias = extractMessageBias(mesText);
const isSystem = replaceBiasMarkup(mesText).trim().length === 0; const isSystem = bias && !removeMacros(mesText).length;
const character = characters.find(x => x.name === name); const character = characters.find(x => x.name === name);
let force_avatar, original_avatar; let force_avatar, original_avatar;
@ -1313,7 +1313,7 @@ export async function sendNarratorMessage(args, text) {
const name = chat_metadata[NARRATOR_NAME_KEY] || NARRATOR_NAME_DEFAULT; const name = chat_metadata[NARRATOR_NAME_KEY] || NARRATOR_NAME_DEFAULT;
// Messages that do nothing but set bias will be hidden from the context // Messages that do nothing but set bias will be hidden from the context
const bias = extractMessageBias(text); const bias = extractMessageBias(text);
const isSystem = replaceBiasMarkup(text).trim().length === 0; const isSystem = bias && !removeMacros(text).length;
const message = { const message = {
name: name, name: name,

View File

@ -637,6 +637,9 @@ hr {
order: 2; order: 2;
padding-right: 2px; padding-right: 2px;
place-self: center; place-self: center;
cursor: pointer;
transition: 0.3s;
opacity: 0.7;
} }
#options_button { #options_button {

View File

@ -5,7 +5,7 @@ const Readable = require('stream').Readable;
const { jsonParser } = require('../../express-common'); const { jsonParser } = require('../../express-common');
const { TEXTGEN_TYPES, TOGETHERAI_KEYS, OLLAMA_KEYS } = require('../../constants'); const { TEXTGEN_TYPES, TOGETHERAI_KEYS, OLLAMA_KEYS } = require('../../constants');
const { forwardFetchResponse } = require('../../util'); const { forwardFetchResponse, trimV1 } = require('../../util');
const { setAdditionalHeaders } = require('../../additional-headers'); const { setAdditionalHeaders } = require('../../additional-headers');
const router = express.Router(); const router = express.Router();
@ -57,6 +57,26 @@ async function parseOllamaStream(jsonStream, request, response) {
} }
} }
/**
* Abort KoboldCpp generation request.
* @param {string} url Server base URL
* @returns {Promise<void>} Promise resolving when we are done
*/
async function abortKoboldCppRequest(url) {
try {
console.log('Aborting Kobold generation...');
const abortResponse = await fetch(`${url}/api/extra/abort`, {
method: 'POST',
});
if (!abortResponse.ok) {
console.log('Error sending abort request to Kobold:', abortResponse.status, abortResponse.statusText);
}
} catch (error) {
console.log(error);
}
}
//************** Ooba/OpenAI text completions API //************** Ooba/OpenAI text completions API
router.post('/status', jsonParser, async function (request, response) { router.post('/status', jsonParser, async function (request, response) {
if (!request.body) return response.sendStatus(400); if (!request.body) return response.sendStatus(400);
@ -67,9 +87,7 @@ router.post('/status', jsonParser, async function (request, response) {
} }
console.log('Trying to connect to API:', request.body); console.log('Trying to connect to API:', request.body);
const baseUrl = trimV1(request.body.api_server);
// Convert to string + remove trailing slash + /v1 suffix
const baseUrl = String(request.body.api_server).replace(/\/$/, '').replace(/\/v1$/, '');
const args = { const args = {
headers: { 'Content-Type': 'application/json' }, headers: { 'Content-Type': 'application/json' },
@ -195,12 +213,15 @@ router.post('/generate', jsonParser, async function (request, response) {
const controller = new AbortController(); const controller = new AbortController();
request.socket.removeAllListeners('close'); request.socket.removeAllListeners('close');
request.socket.on('close', function () { request.socket.on('close', async function () {
if (request.body.api_type === TEXTGEN_TYPES.KOBOLDCPP && !response.writableEnded) {
await abortKoboldCppRequest(trimV1(baseUrl));
}
controller.abort(); controller.abort();
}); });
// Convert to string + remove trailing slash + /v1 suffix let url = trimV1(baseUrl);
let url = String(baseUrl).replace(/\/$/, '').replace(/\/v1$/, '');
if (request.body.legacy_api) { if (request.body.legacy_api) {
url += '/v1/generate'; url += '/v1/generate';
@ -337,8 +358,7 @@ ollama.post('/caption-image', jsonParser, async function (request, response) {
} }
console.log('Ollama caption request:', request.body); console.log('Ollama caption request:', request.body);
// Convert to string + remove trailing slash + /v1 suffix const baseUrl = trimV1(request.body.server_url);
const baseUrl = String(request.body.server_url).replace(/\/$/, '').replace(/\/v1$/, '');
const fetchResponse = await fetch(`${baseUrl}/api/generate`, { const fetchResponse = await fetch(`${baseUrl}/api/generate`, {
method: 'POST', method: 'POST',
@ -383,8 +403,7 @@ llamacpp.post('/caption-image', jsonParser, async function (request, response) {
} }
console.log('LlamaCpp caption request:', request.body); console.log('LlamaCpp caption request:', request.body);
// Convert to string + remove trailing slash + /v1 suffix const baseUrl = trimV1(request.body.server_url);
const baseUrl = String(request.body.server_url).replace(/\/$/, '').replace(/\/v1$/, '');
const fetchResponse = await fetch(`${baseUrl}/completion`, { const fetchResponse = await fetch(`${baseUrl}/completion`, {
method: 'POST', method: 'POST',

View File

@ -4,6 +4,7 @@ const readline = require('readline');
const express = require('express'); const express = require('express');
const sanitize = require('sanitize-filename'); const sanitize = require('sanitize-filename');
const writeFileAtomicSync = require('write-file-atomic').sync; const writeFileAtomicSync = require('write-file-atomic').sync;
const yaml = require('yaml');
const _ = require('lodash'); const _ = require('lodash');
const encode = require('png-chunks-encode'); const encode = require('png-chunks-encode');
@ -19,6 +20,7 @@ const characterCardParser = require('../character-card-parser.js');
const { readWorldInfoFile } = require('./worldinfo'); const { readWorldInfoFile } = require('./worldinfo');
const { invalidateThumbnail } = require('./thumbnails'); const { invalidateThumbnail } = require('./thumbnails');
const { importRisuSprites } = require('./sprites'); const { importRisuSprites } = require('./sprites');
const defaultAvatarPath = './public/img/ai4.png';
let characters = {}; let characters = {};
@ -394,6 +396,36 @@ function convertWorldInfoToCharacterBook(name, entries) {
return result; return result;
} }
/**
* Import a character from a YAML file.
* @param {string} uploadPath Path to the uploaded file
* @param {import('express').Response} response Express response object
*/
function importFromYaml(uploadPath, response) {
const fileText = fs.readFileSync(uploadPath, 'utf8');
fs.rmSync(uploadPath);
const yamlData = yaml.parse(fileText);
console.log('importing from yaml');
yamlData.name = sanitize(yamlData.name);
const fileName = getPngName(yamlData.name);
let char = convertToV2({
'name': yamlData.name,
'description': yamlData.context ?? '',
'first_mes': yamlData.greeting ?? '',
'create_date': humanizedISO8601DateTime(),
'chat': `${yamlData.name} - ${humanizedISO8601DateTime()}`,
'personality': '',
'creatorcomment': '',
'avatar': 'none',
'mes_example': '',
'scenario': '',
'talkativeness': 0.5,
'creator': '',
'tags': '',
});
charaWrite(defaultAvatarPath, JSON.stringify(char), fileName, response, { file_name: fileName });
}
const router = express.Router(); const router = express.Router();
router.post('/create', urlencodedParser, async function (request, response) { router.post('/create', urlencodedParser, async function (request, response) {
@ -760,17 +792,21 @@ function getPngName(file) {
} }
router.post('/import', urlencodedParser, async function (request, response) { router.post('/import', urlencodedParser, async function (request, response) {
if (!request.body || !request.file) return response.sendStatus(400);
if (!request.body || request.file === undefined) return response.sendStatus(400);
let png_name = ''; let png_name = '';
let filedata = request.file; let filedata = request.file;
let uploadPath = path.join(UPLOADS_PATH, filedata.filename); let uploadPath = path.join(UPLOADS_PATH, filedata.filename);
var format = request.body.file_type; let format = request.body.file_type;
const defaultAvatarPath = './public/img/ai4.png';
//console.log(format); if (format == 'yaml' || format == 'yml') {
if (filedata) { try {
if (format == 'json') { importFromYaml(uploadPath, response);
} catch (err) {
console.log(err);
response.send({ error: true });
}
} else if (format == 'json') {
fs.readFile(uploadPath, 'utf8', async (err, data) => { fs.readFile(uploadPath, 'utf8', async (err, data) => {
fs.unlinkSync(uploadPath); fs.unlinkSync(uploadPath);
@ -899,7 +935,6 @@ router.post('/import', urlencodedParser, async function (request, response) {
response.send({ error: true }); response.send({ error: true });
} }
} }
}
}); });
router.post('/duplicate', jsonParser, async function (request, response) { router.post('/duplicate', jsonParser, async function (request, response) {

View File

@ -36,20 +36,12 @@ function sanitizeHordeImagePrompt(prompt) {
prompt = prompt.replace(/\b(boy)\b/gmi, 'man'); prompt = prompt.replace(/\b(boy)\b/gmi, 'man');
prompt = prompt.replace(/\b(girls)\b/gmi, 'women'); prompt = prompt.replace(/\b(girls)\b/gmi, 'women');
prompt = prompt.replace(/\b(boys)\b/gmi, 'men'); prompt = prompt.replace(/\b(boys)\b/gmi, 'men');
//always remove these high risk words from prompt, as they add little value to image gen while increasing the risk the prompt gets flagged //always remove these high risk words from prompt, as they add little value to image gen while increasing the risk the prompt gets flagged
prompt = prompt.replace(/\b(under.age|under.aged|underage|underaged|loli|pedo|pedophile|(\w+).year.old|(\w+).years.old|minor|prepubescent|minors|shota)\b/gmi, ''); prompt = prompt.replace(/\b(under.age|under.aged|underage|underaged|loli|pedo|pedophile|(\w+).year.old|(\w+).years.old|minor|prepubescent|minors|shota)\b/gmi, '');
//if nsfw is detected, do not remove it but apply additional precautions
let isNsfw = prompt.match(/\b(cock|ahegao|hentai|uncensored|lewd|cocks|deepthroat|deepthroating|dick|dicks|cumshot|lesbian|fuck|fucked|fucking|sperm|naked|nipples|tits|boobs|breasts|boob|breast|topless|ass|butt|fingering|masturbate|masturbating|bitch|blowjob|pussy|piss|asshole|dildo|dildos|vibrator|erection|foreskin|handjob|nude|penis|porn|vibrator|virgin|vagina|vulva|threesome|orgy|bdsm|hickey|condom|testicles|anal|bareback|bukkake|creampie|stripper|strap-on|missionary|clitoris|clit|clitty|cowgirl|fleshlight|sex|buttplug|milf|oral|sucking|bondage|orgasm|scissoring|railed|slut|sluts|slutty|cumming|cunt|faggot|sissy|anal|anus|cum|semen|scat|nsfw|xxx|explicit|erotic|horny|aroused|jizz|moan|rape|raped|raping|throbbing|humping)\b/gmi);
if (isNsfw) {
//replace risky subject nouns with person //replace risky subject nouns with person
prompt = prompt.replace(/\b(youngster|infant|baby|toddler|child|teen|kid|kiddie|kiddo|teenager|student|preteen|pre.teen)\b/gmi, 'person'); prompt = prompt.replace(/\b(youngster|infant|baby|toddler|child|teen|kid|kiddie|kiddo|teenager|student|preteen|pre.teen)\b/gmi, 'person');
//remove risky adjectives and related words //remove risky adjectives and related words
prompt = prompt.replace(/\b(young|younger|youthful|youth|small|smaller|smallest|girly|boyish|lil|tiny|teenaged|lit[tl]le|school.aged|school|highschool|kindergarten|teens|children|kids)\b/gmi, ''); prompt = prompt.replace(/\b(young|younger|youthful|youth|small|smaller|smallest|girly|boyish|lil|tiny|teenaged|lit[tl]le|school.aged|school|highschool|kindergarten|teens|children|kids)\b/gmi, '');
}
return prompt; return prompt;
} }

View File

@ -458,6 +458,15 @@ function excludeKeysByYaml(obj, yamlString) {
} }
} }
/**
* Removes trailing slash and /v1 from a string.
* @param {string} str Input string
* @returns {string} Trimmed string
*/
function trimV1(str) {
return String(str ?? '').replace(/\/$/, '').replace(/\/v1$/, '');
}
module.exports = { module.exports = {
getConfig, getConfig,
getConfigValue, getConfigValue,
@ -481,4 +490,5 @@ module.exports = {
getHexString, getHexString,
mergeObjectWithYaml, mergeObjectWithYaml,
excludeKeysByYaml, excludeKeysByYaml,
trimV1,
}; };