Merge pull request #1493 from valadaptive/generate-cleanups
Clean up Generate(), part 1
This commit is contained in:
commit
7482a75bbd
514
public/script.js
514
public/script.js
|
@ -694,7 +694,6 @@ let abortController;
|
|||
//css
|
||||
var css_mes_bg = $('<div class="mes"></div>').css('background');
|
||||
var css_send_form_display = $('<div id=send_form></div>').css('display');
|
||||
let generate_loop_counter = 0;
|
||||
const MAX_GENERATION_LOOPS = 5;
|
||||
|
||||
var kobold_horde_model = '';
|
||||
|
@ -2316,26 +2315,8 @@ function getStoppingStrings(isImpersonate, isContinue) {
|
|||
*/
|
||||
export async function generateQuietPrompt(quiet_prompt, quietToLoud, skipWIAN, quietImage = null) {
|
||||
console.log('got into genQuietPrompt');
|
||||
return await new Promise(
|
||||
async function promptPromise(resolve, reject) {
|
||||
if (quietToLoud === true) {
|
||||
try {
|
||||
await Generate('quiet', { resolve, reject, quiet_prompt, quietToLoud: true, skipWIAN: skipWIAN, force_name2: true, quietImage: quietImage });
|
||||
}
|
||||
catch {
|
||||
reject();
|
||||
}
|
||||
}
|
||||
else {
|
||||
try {
|
||||
console.log('going to generate non-QuietToLoud');
|
||||
await Generate('quiet', { resolve, reject, quiet_prompt, quietToLoud: false, skipWIAN: skipWIAN, force_name2: true, quietImage: quietImage });
|
||||
}
|
||||
catch {
|
||||
reject();
|
||||
}
|
||||
}
|
||||
});
|
||||
const generateFinished = await Generate('quiet', { quiet_prompt, quietToLoud, skipWIAN: skipWIAN, force_name2: true, quietImage: quietImage });
|
||||
return generateFinished;
|
||||
}
|
||||
|
||||
async function processCommands(message, type, dryRun) {
|
||||
|
@ -2915,7 +2896,8 @@ export async function generateRaw(prompt, api, instructOverride) {
|
|||
return message;
|
||||
}
|
||||
|
||||
async function Generate(type, { automatic_trigger, force_name2, resolve, reject, quiet_prompt, quietToLoud, skipWIAN, force_chid, signal, quietImage } = {}, dryRun = false) {
|
||||
// Returns a promise that resolves when the text is done generating.
|
||||
async function Generate(type, { automatic_trigger, force_name2, quiet_prompt, quietToLoud, skipWIAN, force_chid, signal, quietImage, maxLoops } = {}, dryRun = false) {
|
||||
console.log('Generate entered');
|
||||
setGenerationProgress(0);
|
||||
generation_started = new Date();
|
||||
|
@ -2936,13 +2918,13 @@ async function Generate(type, { automatic_trigger, force_name2, resolve, reject,
|
|||
if (interruptedByCommand) {
|
||||
//$("#send_textarea").val('').trigger('input');
|
||||
unblockGeneration();
|
||||
return;
|
||||
return Promise.resolve();
|
||||
}
|
||||
|
||||
if (main_api == 'kobold' && kai_settings.streaming_kobold && !kai_flags.can_use_streaming) {
|
||||
toastr.error('Streaming is enabled, but the version of Kobold used does not support token streaming.', undefined, { timeOut: 10000, preventDuplicates: true });
|
||||
unblockGeneration();
|
||||
return;
|
||||
return Promise.resolve();
|
||||
}
|
||||
|
||||
if (main_api === 'textgenerationwebui' &&
|
||||
|
@ -2951,12 +2933,12 @@ async function Generate(type, { automatic_trigger, force_name2, resolve, reject,
|
|||
textgen_settings.type !== MANCER) {
|
||||
toastr.error('Streaming is not supported for the Legacy API. Update Ooba and use --extensions openai to enable streaming.', undefined, { timeOut: 10000, preventDuplicates: true });
|
||||
unblockGeneration();
|
||||
return;
|
||||
return Promise.resolve();
|
||||
}
|
||||
|
||||
if (isHordeGenerationNotAllowed()) {
|
||||
unblockGeneration();
|
||||
return;
|
||||
return Promise.resolve();
|
||||
}
|
||||
|
||||
// Hide swipes if not in a dry run.
|
||||
|
@ -2964,17 +2946,9 @@ async function Generate(type, { automatic_trigger, force_name2, resolve, reject,
|
|||
hideSwipeButtons();
|
||||
}
|
||||
|
||||
// Set empty promise resolution functions
|
||||
if (typeof resolve !== 'function') {
|
||||
resolve = () => { };
|
||||
}
|
||||
if (typeof reject !== 'function') {
|
||||
reject = () => { };
|
||||
}
|
||||
|
||||
if (selected_group && !is_group_generating && !dryRun) {
|
||||
generateGroupWrapper(false, type, { resolve, reject, quiet_prompt, force_chid, signal: abortController.signal, quietImage });
|
||||
return;
|
||||
// Returns the promise that generateGroupWrapper returns; resolves when generation is done
|
||||
return generateGroupWrapper(false, type, { quiet_prompt, force_chid, signal: abortController.signal, quietImage, maxLoops });
|
||||
} else if (selected_group && !is_group_generating && dryRun) {
|
||||
const characterIndexMap = new Map(characters.map((char, index) => [char.avatar, index]));
|
||||
const group = groups.find((x) => x.id === selected_group);
|
||||
|
@ -2996,7 +2970,7 @@ async function Generate(type, { automatic_trigger, force_name2, resolve, reject,
|
|||
} else {
|
||||
console.log('No enabled members found');
|
||||
unblockGeneration();
|
||||
return;
|
||||
return Promise.resolve();
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -3160,7 +3134,7 @@ async function Generate(type, { automatic_trigger, force_name2, resolve, reject,
|
|||
if (aborted) {
|
||||
console.debug('Generation aborted by extension interceptors');
|
||||
unblockGeneration();
|
||||
return;
|
||||
return Promise.resolve();
|
||||
}
|
||||
} else {
|
||||
console.debug('Skipping extension interceptors for dry run');
|
||||
|
@ -3215,7 +3189,7 @@ async function Generate(type, { automatic_trigger, force_name2, resolve, reject,
|
|||
}
|
||||
catch {
|
||||
unblockGeneration();
|
||||
return;
|
||||
return Promise.resolve();
|
||||
}
|
||||
if (horde_settings.auto_adjust_context_length) {
|
||||
this_max_context = (adjustedParams.maxContextLength - adjustedParams.maxLength);
|
||||
|
@ -3375,7 +3349,7 @@ async function Generate(type, { automatic_trigger, force_name2, resolve, reject,
|
|||
}
|
||||
|
||||
const originalType = type;
|
||||
runGenerate(cyclePrompt);
|
||||
return runGenerate(cyclePrompt);
|
||||
|
||||
async function runGenerate(cycleGenerationPrompt = '') {
|
||||
if (!dryRun) {
|
||||
|
@ -3723,256 +3697,267 @@ async function Generate(type, { automatic_trigger, force_name2, resolve, reject,
|
|||
}
|
||||
}
|
||||
|
||||
if (true === dryRun) return onSuccess({ error: 'dryRun' });
|
||||
return new Promise(async (resolve, reject) => {
|
||||
if (true === dryRun) return onSuccess({ error: 'dryRun' });
|
||||
|
||||
if (power_user.console_log_prompts) {
|
||||
console.log(generate_data.prompt);
|
||||
}
|
||||
if (power_user.console_log_prompts) {
|
||||
console.log(generate_data.prompt);
|
||||
}
|
||||
|
||||
let generate_url = getGenerateUrl(main_api);
|
||||
console.debug('rungenerate calling API');
|
||||
let generate_url = getGenerateUrl(main_api);
|
||||
console.debug('rungenerate calling API');
|
||||
|
||||
showStopButton();
|
||||
showStopButton();
|
||||
|
||||
//set array object for prompt token itemization of this message
|
||||
let currentArrayEntry = Number(thisPromptBits.length - 1);
|
||||
let additionalPromptStuff = {
|
||||
...thisPromptBits[currentArrayEntry],
|
||||
rawPrompt: generate_data.prompt || generate_data.input,
|
||||
mesId: getNextMessageId(type),
|
||||
allAnchors: allAnchors,
|
||||
summarizeString: (extension_prompts['1_memory']?.value || ''),
|
||||
authorsNoteString: (extension_prompts['2_floating_prompt']?.value || ''),
|
||||
smartContextString: (extension_prompts['chromadb']?.value || ''),
|
||||
worldInfoString: worldInfoString,
|
||||
storyString: storyString,
|
||||
beforeScenarioAnchor: beforeScenarioAnchor,
|
||||
afterScenarioAnchor: afterScenarioAnchor,
|
||||
examplesString: examplesString,
|
||||
mesSendString: mesSendString,
|
||||
generatedPromptCache: generatedPromptCache,
|
||||
promptBias: promptBias,
|
||||
finalPrompt: finalPrompt,
|
||||
charDescription: description,
|
||||
charPersonality: personality,
|
||||
scenarioText: scenario,
|
||||
this_max_context: this_max_context,
|
||||
padding: power_user.token_padding,
|
||||
main_api: main_api,
|
||||
instruction: isInstruct ? substituteParams(power_user.prefer_character_prompt && system ? system : power_user.instruct.system_prompt) : '',
|
||||
userPersona: (power_user.persona_description || ''),
|
||||
};
|
||||
//set array object for prompt token itemization of this message
|
||||
let currentArrayEntry = Number(thisPromptBits.length - 1);
|
||||
let additionalPromptStuff = {
|
||||
...thisPromptBits[currentArrayEntry],
|
||||
rawPrompt: generate_data.prompt || generate_data.input,
|
||||
mesId: getNextMessageId(type),
|
||||
allAnchors: allAnchors,
|
||||
summarizeString: (extension_prompts['1_memory']?.value || ''),
|
||||
authorsNoteString: (extension_prompts['2_floating_prompt']?.value || ''),
|
||||
smartContextString: (extension_prompts['chromadb']?.value || ''),
|
||||
worldInfoString: worldInfoString,
|
||||
storyString: storyString,
|
||||
beforeScenarioAnchor: beforeScenarioAnchor,
|
||||
afterScenarioAnchor: afterScenarioAnchor,
|
||||
examplesString: examplesString,
|
||||
mesSendString: mesSendString,
|
||||
generatedPromptCache: generatedPromptCache,
|
||||
promptBias: promptBias,
|
||||
finalPrompt: finalPrompt,
|
||||
charDescription: description,
|
||||
charPersonality: personality,
|
||||
scenarioText: scenario,
|
||||
this_max_context: this_max_context,
|
||||
padding: power_user.token_padding,
|
||||
main_api: main_api,
|
||||
instruction: isInstruct ? substituteParams(power_user.prefer_character_prompt && system ? system : power_user.instruct.system_prompt) : '',
|
||||
userPersona: (power_user.persona_description || ''),
|
||||
};
|
||||
|
||||
thisPromptBits = additionalPromptStuff;
|
||||
thisPromptBits = additionalPromptStuff;
|
||||
|
||||
//console.log(thisPromptBits);
|
||||
const itemizedIndex = itemizedPrompts.findIndex((item) => item.mesId === thisPromptBits['mesId']);
|
||||
//console.log(thisPromptBits);
|
||||
const itemizedIndex = itemizedPrompts.findIndex((item) => item.mesId === thisPromptBits['mesId']);
|
||||
|
||||
if (itemizedIndex !== -1) {
|
||||
itemizedPrompts[itemizedIndex] = thisPromptBits;
|
||||
}
|
||||
else {
|
||||
itemizedPrompts.push(thisPromptBits);
|
||||
}
|
||||
|
||||
console.debug(`pushed prompt bits to itemizedPrompts array. Length is now: ${itemizedPrompts.length}`);
|
||||
/** @type {Promise<any>} */
|
||||
let streamingGeneratorPromise = Promise.resolve();
|
||||
|
||||
if (main_api == 'openai') {
|
||||
if (isStreamingEnabled() && type !== 'quiet') {
|
||||
streamingGeneratorPromise = sendOpenAIRequest(type, generate_data.prompt, streamingProcessor.abortController.signal);
|
||||
if (itemizedIndex !== -1) {
|
||||
itemizedPrompts[itemizedIndex] = thisPromptBits;
|
||||
}
|
||||
else {
|
||||
sendOpenAIRequest(type, generate_data.prompt, abortController.signal).then(onSuccess).catch(onError);
|
||||
itemizedPrompts.push(thisPromptBits);
|
||||
}
|
||||
}
|
||||
else if (main_api == 'koboldhorde') {
|
||||
generateHorde(finalPrompt, generate_data, abortController.signal, true).then(onSuccess).catch(onError);
|
||||
}
|
||||
else if (main_api == 'textgenerationwebui' && isStreamingEnabled() && type !== 'quiet') {
|
||||
streamingGeneratorPromise = generateTextGenWithStreaming(generate_data, streamingProcessor.abortController.signal);
|
||||
}
|
||||
else if (main_api == 'novel' && isStreamingEnabled() && type !== 'quiet') {
|
||||
streamingGeneratorPromise = generateNovelWithStreaming(generate_data, streamingProcessor.abortController.signal);
|
||||
}
|
||||
else if (main_api == 'kobold' && isStreamingEnabled() && type !== 'quiet') {
|
||||
streamingGeneratorPromise = generateKoboldWithStreaming(generate_data, streamingProcessor.abortController.signal);
|
||||
}
|
||||
else {
|
||||
try {
|
||||
const response = await fetch(generate_url, {
|
||||
method: 'POST',
|
||||
headers: getRequestHeaders(),
|
||||
cache: 'no-cache',
|
||||
body: JSON.stringify(generate_data),
|
||||
signal: abortController.signal,
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
const error = await response.json();
|
||||
throw error;
|
||||
console.debug(`pushed prompt bits to itemizedPrompts array. Length is now: ${itemizedPrompts.length}`);
|
||||
/** @type {Promise<any>} */
|
||||
let streamingGeneratorPromise = Promise.resolve();
|
||||
|
||||
if (main_api == 'openai') {
|
||||
if (isStreamingEnabled() && type !== 'quiet') {
|
||||
streamingGeneratorPromise = sendOpenAIRequest(type, generate_data.prompt, streamingProcessor.abortController.signal);
|
||||
}
|
||||
else {
|
||||
sendOpenAIRequest(type, generate_data.prompt, abortController.signal).then(onSuccess).catch(onError);
|
||||
}
|
||||
}
|
||||
else if (main_api == 'koboldhorde') {
|
||||
generateHorde(finalPrompt, generate_data, abortController.signal, true).then(onSuccess).catch(onError);
|
||||
}
|
||||
else if (main_api == 'textgenerationwebui' && isStreamingEnabled() && type !== 'quiet') {
|
||||
streamingGeneratorPromise = generateTextGenWithStreaming(generate_data, streamingProcessor.abortController.signal);
|
||||
}
|
||||
else if (main_api == 'novel' && isStreamingEnabled() && type !== 'quiet') {
|
||||
streamingGeneratorPromise = generateNovelWithStreaming(generate_data, streamingProcessor.abortController.signal);
|
||||
}
|
||||
else if (main_api == 'kobold' && isStreamingEnabled() && type !== 'quiet') {
|
||||
streamingGeneratorPromise = generateKoboldWithStreaming(generate_data, streamingProcessor.abortController.signal);
|
||||
}
|
||||
else {
|
||||
try {
|
||||
const response = await fetch(generate_url, {
|
||||
method: 'POST',
|
||||
headers: getRequestHeaders(),
|
||||
cache: 'no-cache',
|
||||
body: JSON.stringify(generate_data),
|
||||
signal: abortController.signal,
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
const error = await response.json();
|
||||
throw error;
|
||||
}
|
||||
|
||||
const data = await response.json();
|
||||
onSuccess(data);
|
||||
} catch (error) {
|
||||
onError(error);
|
||||
}
|
||||
}
|
||||
|
||||
if (isStreamingEnabled() && type !== 'quiet') {
|
||||
try {
|
||||
const streamingGenerator = await streamingGeneratorPromise;
|
||||
streamingProcessor.generator = streamingGenerator;
|
||||
hideSwipeButtons();
|
||||
let getMessage = await streamingProcessor.generate();
|
||||
let messageChunk = cleanUpMessage(getMessage, isImpersonate, isContinue, false);
|
||||
|
||||
if (isContinue) {
|
||||
getMessage = continue_mag + getMessage;
|
||||
}
|
||||
|
||||
if (streamingProcessor && !streamingProcessor.isStopped && streamingProcessor.isFinished) {
|
||||
await streamingProcessor.onFinishStreaming(streamingProcessor.messageId, getMessage);
|
||||
streamingProcessor = null;
|
||||
triggerAutoContinue(messageChunk, isImpersonate);
|
||||
}
|
||||
resolve();
|
||||
} catch (err) {
|
||||
onError(err);
|
||||
}
|
||||
|
||||
const data = await response.json();
|
||||
onSuccess(data);
|
||||
} catch (error) {
|
||||
onError(error);
|
||||
}
|
||||
}
|
||||
|
||||
if (isStreamingEnabled() && type !== 'quiet') {
|
||||
try {
|
||||
const streamingGenerator = await streamingGeneratorPromise;
|
||||
streamingProcessor.generator = streamingGenerator;
|
||||
hideSwipeButtons();
|
||||
let getMessage = await streamingProcessor.generate();
|
||||
let messageChunk = cleanUpMessage(getMessage, isImpersonate, isContinue, false);
|
||||
async function onSuccess(data) {
|
||||
let messageChunk = '';
|
||||
|
||||
if (isContinue) {
|
||||
getMessage = continue_mag + getMessage;
|
||||
if (data.error == 'dryRun') {
|
||||
generatedPromptCache = '';
|
||||
resolve();
|
||||
return;
|
||||
}
|
||||
|
||||
if (streamingProcessor && !streamingProcessor.isStopped && streamingProcessor.isFinished) {
|
||||
await streamingProcessor.onFinishStreaming(streamingProcessor.messageId, getMessage);
|
||||
streamingProcessor = null;
|
||||
if (!data.error) {
|
||||
//const getData = await response.json();
|
||||
let getMessage = extractMessageFromData(data);
|
||||
let title = extractTitleFromData(data);
|
||||
kobold_horde_model = title;
|
||||
|
||||
const swipes = extractMultiSwipes(data, type);
|
||||
|
||||
messageChunk = cleanUpMessage(getMessage, isImpersonate, isContinue, false);
|
||||
|
||||
if (isContinue) {
|
||||
getMessage = continue_mag + getMessage;
|
||||
}
|
||||
|
||||
//Formating
|
||||
const displayIncomplete = type === 'quiet' && !quietToLoud;
|
||||
getMessage = cleanUpMessage(getMessage, isImpersonate, isContinue, displayIncomplete);
|
||||
|
||||
if (getMessage.length > 0) {
|
||||
if (isImpersonate) {
|
||||
$('#send_textarea').val(getMessage).trigger('input');
|
||||
generatedPromptCache = '';
|
||||
await eventSource.emit(event_types.IMPERSONATE_READY, getMessage);
|
||||
}
|
||||
else if (type == 'quiet') {
|
||||
resolve(getMessage);
|
||||
}
|
||||
else {
|
||||
// Without streaming we'll be having a full message on continuation. Treat it as a last chunk.
|
||||
if (originalType !== 'continue') {
|
||||
({ type, getMessage } = await saveReply(type, getMessage, false, title, swipes));
|
||||
}
|
||||
else {
|
||||
({ type, getMessage } = await saveReply('appendFinal', getMessage, false, title, swipes));
|
||||
}
|
||||
}
|
||||
activateSendButtons();
|
||||
|
||||
if (type !== 'quiet') {
|
||||
playMessageSound();
|
||||
}
|
||||
} else {
|
||||
// If maxLoops is not passed in (e.g. first time generating), set it to MAX_GENERATION_LOOPS
|
||||
maxLoops ??= MAX_GENERATION_LOOPS;
|
||||
|
||||
if (maxLoops === 0) {
|
||||
reject(new Error('Generate circuit breaker interruption'));
|
||||
if (type !== 'quiet') {
|
||||
throwCircuitBreakerError();
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
// regenerate with character speech reenforced
|
||||
// to make sure we leave on swipe type while also adding the name2 appendage
|
||||
delay(1000).then(async () => {
|
||||
// The first await is for waiting for the generate to start. The second one is waiting for it to finish
|
||||
const result = await await Generate(type, { automatic_trigger, force_name2: true, quiet_prompt, skipWIAN, force_chid, maxLoops: maxLoops - 1 });
|
||||
resolve(result);
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
if (power_user.auto_swipe) {
|
||||
console.debug('checking for autoswipeblacklist on non-streaming message');
|
||||
function containsBlacklistedWords(getMessage, blacklist, threshold) {
|
||||
console.debug('checking blacklisted words');
|
||||
const regex = new RegExp(`\\b(${blacklist.join('|')})\\b`, 'gi');
|
||||
const matches = getMessage.match(regex) || [];
|
||||
return matches.length >= threshold;
|
||||
}
|
||||
|
||||
const generatedTextFiltered = (getMessage) => {
|
||||
if (power_user.auto_swipe_blacklist_threshold) {
|
||||
if (containsBlacklistedWords(getMessage, power_user.auto_swipe_blacklist, power_user.auto_swipe_blacklist_threshold)) {
|
||||
console.debug('Generated text has blacklisted words');
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
return false;
|
||||
};
|
||||
if (generatedTextFiltered(getMessage)) {
|
||||
console.debug('swiping right automatically');
|
||||
is_send_press = false;
|
||||
swipe_right();
|
||||
// TODO: do we want to resolve after an auto-swipe?
|
||||
resolve();
|
||||
return;
|
||||
}
|
||||
}
|
||||
} else {
|
||||
generatedPromptCache = '';
|
||||
activateSendButtons();
|
||||
//console.log('runGenerate calling showSwipeBtns');
|
||||
showSwipeButtons();
|
||||
|
||||
if (data?.response) {
|
||||
toastr.error(data.response, 'API Error');
|
||||
}
|
||||
reject(data.response);
|
||||
}
|
||||
console.debug('/api/chats/save called by /Generate');
|
||||
|
||||
await saveChatConditional();
|
||||
is_send_press = false;
|
||||
hideStopButton();
|
||||
activateSendButtons();
|
||||
showSwipeButtons();
|
||||
setGenerationProgress(0);
|
||||
streamingProcessor = null;
|
||||
|
||||
if (type !== 'quiet') {
|
||||
triggerAutoContinue(messageChunk, isImpersonate);
|
||||
}
|
||||
resolve();
|
||||
} catch (err) {
|
||||
onError(err);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
async function onSuccess(data) {
|
||||
let messageChunk = '';
|
||||
|
||||
if (data.error == 'dryRun') {
|
||||
generatedPromptCache = '';
|
||||
resolve();
|
||||
return;
|
||||
}
|
||||
|
||||
if (!data.error) {
|
||||
//const getData = await response.json();
|
||||
let getMessage = extractMessageFromData(data);
|
||||
let title = extractTitleFromData(data);
|
||||
kobold_horde_model = title;
|
||||
|
||||
const swipes = extractMultiSwipes(data, type);
|
||||
|
||||
messageChunk = cleanUpMessage(getMessage, isImpersonate, isContinue, false);
|
||||
|
||||
if (isContinue) {
|
||||
getMessage = continue_mag + getMessage;
|
||||
function onError(exception) {
|
||||
if (typeof exception?.error?.message === 'string') {
|
||||
toastr.error(exception.error.message, 'Error', { timeOut: 10000, extendedTimeOut: 20000 });
|
||||
}
|
||||
|
||||
//Formating
|
||||
const displayIncomplete = type === 'quiet' && !quietToLoud;
|
||||
getMessage = cleanUpMessage(getMessage, isImpersonate, isContinue, displayIncomplete);
|
||||
|
||||
if (getMessage.length > 0) {
|
||||
if (isImpersonate) {
|
||||
$('#send_textarea').val(getMessage).trigger('input');
|
||||
generatedPromptCache = '';
|
||||
await eventSource.emit(event_types.IMPERSONATE_READY, getMessage);
|
||||
}
|
||||
else if (type == 'quiet') {
|
||||
resolve(getMessage);
|
||||
}
|
||||
else {
|
||||
// Without streaming we'll be having a full message on continuation. Treat it as a last chunk.
|
||||
if (originalType !== 'continue') {
|
||||
({ type, getMessage } = await saveReply(type, getMessage, false, title, swipes));
|
||||
}
|
||||
else {
|
||||
({ type, getMessage } = await saveReply('appendFinal', getMessage, false, title, swipes));
|
||||
}
|
||||
}
|
||||
activateSendButtons();
|
||||
|
||||
if (type !== 'quiet') {
|
||||
playMessageSound();
|
||||
}
|
||||
|
||||
generate_loop_counter = 0;
|
||||
} else {
|
||||
++generate_loop_counter;
|
||||
|
||||
if (generate_loop_counter > MAX_GENERATION_LOOPS) {
|
||||
throwCircuitBreakerError();
|
||||
}
|
||||
|
||||
// regenerate with character speech reenforced
|
||||
// to make sure we leave on swipe type while also adding the name2 appendage
|
||||
setTimeout(() => {
|
||||
Generate(type, { automatic_trigger, force_name2: true, resolve, reject, quiet_prompt, skipWIAN, force_chid });
|
||||
}, generate_loop_counter * 1000);
|
||||
}
|
||||
|
||||
if (power_user.auto_swipe) {
|
||||
console.debug('checking for autoswipeblacklist on non-streaming message');
|
||||
function containsBlacklistedWords(getMessage, blacklist, threshold) {
|
||||
console.debug('checking blacklisted words');
|
||||
const regex = new RegExp(`\\b(${blacklist.join('|')})\\b`, 'gi');
|
||||
const matches = getMessage.match(regex) || [];
|
||||
return matches.length >= threshold;
|
||||
}
|
||||
|
||||
const generatedTextFiltered = (getMessage) => {
|
||||
if (power_user.auto_swipe_blacklist_threshold) {
|
||||
if (containsBlacklistedWords(getMessage, power_user.auto_swipe_blacklist, power_user.auto_swipe_blacklist_threshold)) {
|
||||
console.debug('Generated text has blacklisted words');
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
return false;
|
||||
};
|
||||
if (generatedTextFiltered(getMessage)) {
|
||||
console.debug('swiping right automatically');
|
||||
is_send_press = false;
|
||||
swipe_right();
|
||||
return;
|
||||
}
|
||||
}
|
||||
} else {
|
||||
generatedPromptCache = '';
|
||||
activateSendButtons();
|
||||
//console.log('runGenerate calling showSwipeBtns');
|
||||
showSwipeButtons();
|
||||
|
||||
if (data?.response) {
|
||||
toastr.error(data.response, 'API Error');
|
||||
}
|
||||
reject(exception);
|
||||
unblockGeneration();
|
||||
console.log(exception);
|
||||
streamingProcessor = null;
|
||||
}
|
||||
console.debug('/api/chats/save called by /Generate');
|
||||
|
||||
await saveChatConditional();
|
||||
is_send_press = false;
|
||||
hideStopButton();
|
||||
activateSendButtons();
|
||||
showSwipeButtons();
|
||||
setGenerationProgress(0);
|
||||
streamingProcessor = null;
|
||||
|
||||
if (type !== 'quiet') {
|
||||
triggerAutoContinue(messageChunk, isImpersonate);
|
||||
resolve();
|
||||
}
|
||||
}
|
||||
|
||||
function onError(exception) {
|
||||
if (typeof exception?.error?.message === 'string') {
|
||||
toastr.error(exception.error.message, 'Error', { timeOut: 10000, extendedTimeOut: 20000 });
|
||||
}
|
||||
|
||||
reject(exception);
|
||||
unblockGeneration();
|
||||
console.log(exception);
|
||||
streamingProcessor = null;
|
||||
}
|
||||
});
|
||||
|
||||
} //rungenerate ends
|
||||
} else { //generate's primary loop ends, after this is error handling for no-connection or safety-id
|
||||
|
@ -4440,7 +4425,6 @@ function getGenerateUrl(api) {
|
|||
|
||||
function throwCircuitBreakerError() {
|
||||
callPopup(`Could not extract reply in ${MAX_GENERATION_LOOPS} attempts. Try generating again`, 'text');
|
||||
generate_loop_counter = 0;
|
||||
unblockGeneration();
|
||||
throw new Error('Generate circuit breaker interruption');
|
||||
}
|
||||
|
|
|
@ -1781,7 +1781,14 @@ async function generateMultimodalPrompt(generationType, quietPrompt) {
|
|||
*/
|
||||
async function generatePrompt(quietPrompt) {
|
||||
const reply = await generateQuietPrompt(quietPrompt, false, false);
|
||||
return processReply(reply);
|
||||
const processedReply = processReply(reply);
|
||||
|
||||
if (!processedReply) {
|
||||
toastr.error('Prompt generation produced no text. Make sure you\'re using a valid instruct template and try again', 'Image Generation');
|
||||
throw new Error('Prompt generation failed.');
|
||||
}
|
||||
|
||||
return processedReply;
|
||||
}
|
||||
|
||||
async function sendGenerationRequest(generationType, prompt, characterName = null, callback) {
|
||||
|
|
|
@ -8,7 +8,6 @@ import {
|
|||
extractAllWords,
|
||||
saveBase64AsFile,
|
||||
PAGINATION_TEMPLATE,
|
||||
waitUntilCondition,
|
||||
getBase64Async,
|
||||
} from './utils.js';
|
||||
import { RA_CountCharTokens, humanizedDateTime, dragElement, favsToHotswap, getMessageTimeStamp } from './RossAscends-mods.js';
|
||||
|
@ -46,7 +45,6 @@ import {
|
|||
updateChatMetadata,
|
||||
isStreamingEnabled,
|
||||
getThumbnailUrl,
|
||||
streamingProcessor,
|
||||
getRequestHeaders,
|
||||
setMenuType,
|
||||
menu_type,
|
||||
|
@ -612,14 +610,20 @@ function getGroupChatNames(groupId) {
|
|||
}
|
||||
|
||||
async function generateGroupWrapper(by_auto_mode, type = null, params = {}) {
|
||||
function throwIfAborted() {
|
||||
if (params.signal instanceof AbortSignal && params.signal.aborted) {
|
||||
throw new Error('AbortSignal was fired. Group generation stopped');
|
||||
}
|
||||
}
|
||||
|
||||
if (online_status === 'no_connection') {
|
||||
is_group_generating = false;
|
||||
setSendButtonState(false);
|
||||
return;
|
||||
return Promise.resolve();
|
||||
}
|
||||
|
||||
if (is_group_generating) {
|
||||
return false;
|
||||
return Promise.resolve();
|
||||
}
|
||||
|
||||
// Auto-navigate back to group menu
|
||||
|
@ -630,13 +634,15 @@ async function generateGroupWrapper(by_auto_mode, type = null, params = {}) {
|
|||
|
||||
const group = groups.find((x) => x.id === selected_group);
|
||||
let typingIndicator = $('#chat .typing_indicator');
|
||||
let textResult = '';
|
||||
|
||||
if (!group || !Array.isArray(group.members) || !group.members.length) {
|
||||
sendSystemMessage(system_message_types.EMPTY, '', { isSmallSys: true });
|
||||
return;
|
||||
return Promise.resolve();
|
||||
}
|
||||
|
||||
try {
|
||||
throwIfAborted();
|
||||
hideSwipeButtons();
|
||||
is_group_generating = true;
|
||||
setCharacterName('');
|
||||
|
@ -654,50 +660,18 @@ async function generateGroupWrapper(by_auto_mode, type = null, params = {}) {
|
|||
// id of this specific batch for regeneration purposes
|
||||
group_generation_id = Date.now();
|
||||
const lastMessage = chat[chat.length - 1];
|
||||
let messagesBefore = chat.length;
|
||||
let lastMessageText = lastMessage?.mes || '';
|
||||
let activationText = '';
|
||||
let isUserInput = false;
|
||||
let isGenerationDone = false;
|
||||
let isGenerationAborted = false;
|
||||
|
||||
if (userInput?.length && !by_auto_mode) {
|
||||
isUserInput = true;
|
||||
activationText = userInput;
|
||||
messagesBefore++;
|
||||
} else {
|
||||
if (lastMessage && !lastMessage.is_system) {
|
||||
activationText = lastMessage.mes;
|
||||
}
|
||||
}
|
||||
|
||||
const resolveOriginal = params.resolve;
|
||||
const rejectOriginal = params.reject;
|
||||
|
||||
if (params.signal instanceof AbortSignal) {
|
||||
if (params.signal.aborted) {
|
||||
throw new Error('Already aborted signal passed. Group generation stopped');
|
||||
}
|
||||
|
||||
params.signal.onabort = () => {
|
||||
isGenerationAborted = true;
|
||||
};
|
||||
}
|
||||
|
||||
if (typeof params.resolve === 'function') {
|
||||
params.resolve = function () {
|
||||
isGenerationDone = true;
|
||||
resolveOriginal.apply(this, arguments);
|
||||
};
|
||||
}
|
||||
|
||||
if (typeof params.reject === 'function') {
|
||||
params.reject = function () {
|
||||
isGenerationDone = true;
|
||||
rejectOriginal.apply(this, arguments);
|
||||
};
|
||||
}
|
||||
|
||||
const activationStrategy = Number(group.activation_strategy ?? group_activation_strategy.NATURAL);
|
||||
const enabledMembers = group.members.filter(x => !group.disabled_members.includes(x));
|
||||
let activatedMembers = [];
|
||||
|
@ -742,14 +716,12 @@ async function generateGroupWrapper(by_auto_mode, type = null, params = {}) {
|
|||
|
||||
// now the real generation begins: cycle through every activated character
|
||||
for (const chId of activatedMembers) {
|
||||
throwIfAborted();
|
||||
deactivateSendButtons();
|
||||
isGenerationDone = false;
|
||||
const generateType = type == 'swipe' || type == 'impersonate' || type == 'quiet' || type == 'continue' ? type : 'group_chat';
|
||||
setCharacterId(chId);
|
||||
setCharacterName(characters[chId].name);
|
||||
|
||||
await Generate(generateType, { automatic_trigger: by_auto_mode, ...(params || {}) });
|
||||
|
||||
if (type !== 'swipe' && type !== 'impersonate' && !isStreamingEnabled()) {
|
||||
// update indicator and scroll down
|
||||
typingIndicator
|
||||
|
@ -758,75 +730,9 @@ async function generateGroupWrapper(by_auto_mode, type = null, params = {}) {
|
|||
typingIndicator.show();
|
||||
}
|
||||
|
||||
// TODO: This is awful. Refactor this
|
||||
while (true) {
|
||||
deactivateSendButtons();
|
||||
if (isGenerationAborted) {
|
||||
throw new Error('Group generation aborted');
|
||||
}
|
||||
|
||||
// if not swipe - check if message generated already
|
||||
if (generateType === 'group_chat' && chat.length == messagesBefore) {
|
||||
await delay(100);
|
||||
}
|
||||
// if swipe - see if message changed
|
||||
else if (type === 'swipe') {
|
||||
if (isStreamingEnabled()) {
|
||||
if (streamingProcessor && !streamingProcessor.isFinished) {
|
||||
await delay(100);
|
||||
}
|
||||
else {
|
||||
break;
|
||||
}
|
||||
}
|
||||
else {
|
||||
if (lastMessageText === chat[chat.length - 1].mes) {
|
||||
await delay(100);
|
||||
}
|
||||
else {
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
else if (type === 'impersonate') {
|
||||
if (isStreamingEnabled()) {
|
||||
if (streamingProcessor && !streamingProcessor.isFinished) {
|
||||
await delay(100);
|
||||
}
|
||||
else {
|
||||
break;
|
||||
}
|
||||
}
|
||||
else {
|
||||
if (!$('#send_textarea').val() || $('#send_textarea').val() == userInput) {
|
||||
await delay(100);
|
||||
}
|
||||
else {
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
else if (type === 'quiet') {
|
||||
if (isGenerationDone) {
|
||||
break;
|
||||
} else {
|
||||
await delay(100);
|
||||
}
|
||||
}
|
||||
else if (isStreamingEnabled()) {
|
||||
if (streamingProcessor && !streamingProcessor.isFinished) {
|
||||
await delay(100);
|
||||
} else {
|
||||
await waitUntilCondition(() => streamingProcessor == null, 1000, 10);
|
||||
messagesBefore++;
|
||||
break;
|
||||
}
|
||||
}
|
||||
else {
|
||||
messagesBefore++;
|
||||
break;
|
||||
}
|
||||
}
|
||||
// Wait for generation to finish
|
||||
const generateFinished = await Generate(generateType, { automatic_trigger: by_auto_mode, ...(params || {}) });
|
||||
textResult = await generateFinished;
|
||||
}
|
||||
} finally {
|
||||
typingIndicator.hide();
|
||||
|
@ -839,6 +745,8 @@ async function generateGroupWrapper(by_auto_mode, type = null, params = {}) {
|
|||
activateSendButtons();
|
||||
showSwipeButtons();
|
||||
}
|
||||
|
||||
return Promise.resolve(textResult);
|
||||
}
|
||||
|
||||
function getLastMessageGenerationId() {
|
||||
|
|
|
@ -636,6 +636,7 @@ hr {
|
|||
display: none;
|
||||
order: 2;
|
||||
padding-right: 2px;
|
||||
place-self: center;
|
||||
}
|
||||
|
||||
#options_button {
|
||||
|
|
Loading…
Reference in New Issue