Have Generate() return a promise

Generate(), being async, now returns a promise-within-a-promise.
If called with `let p = await Generate(...)`, it'll wait for generation
to *start*. If you then `await p`, you'll wait for generation to
*finish*. This makes it much easier to tell exactly when generation's
done. generateGroupWrapper has been similarly modified.
This commit is contained in:
valadaptive 2023-12-07 17:46:15 -05:00
parent 03884b29ad
commit 33f969f097
2 changed files with 256 additions and 300 deletions

View File

@ -2307,26 +2307,8 @@ function getStoppingStrings(isImpersonate, isContinue) {
*/
export async function generateQuietPrompt(quiet_prompt, quietToLoud, skipWIAN, quietImage = null) {
console.log('got into genQuietPrompt');
return await new Promise(
async function promptPromise(resolve, reject) {
if (quietToLoud === true) {
try {
await Generate('quiet', { resolve, reject, quiet_prompt, quietToLoud: true, skipWIAN: skipWIAN, force_name2: true, quietImage: quietImage });
}
catch {
reject();
}
}
else {
try {
console.log('going to generate non-QuietToLoud');
await Generate('quiet', { resolve, reject, quiet_prompt, quietToLoud: false, skipWIAN: skipWIAN, force_name2: true, quietImage: quietImage });
}
catch {
reject();
}
}
});
const generateFinished = await Generate('quiet', { quiet_prompt, quietToLoud, skipWIAN: skipWIAN, force_name2: true, quietImage: quietImage });
await generateFinished;
}
async function processCommands(message, type, dryRun) {
@ -2906,7 +2888,8 @@ export async function generateRaw(prompt, api, instructOverride) {
return message;
}
async function Generate(type, { automatic_trigger, force_name2, resolve, reject, quiet_prompt, quietToLoud, skipWIAN, force_chid, signal, quietImage } = {}, dryRun = false) {
// Returns a promise that resolves when the text is done generating.
async function Generate(type, { automatic_trigger, force_name2, quiet_prompt, quietToLoud, skipWIAN, force_chid, signal, quietImage } = {}, dryRun = false) {
console.log('Generate entered');
setGenerationProgress(0);
generation_started = new Date();
@ -2916,14 +2899,6 @@ async function Generate(type, { automatic_trigger, force_name2, resolve, reject,
abortController = new AbortController();
}
// Set empty promise resolution functions
if (typeof resolve !== 'function') {
resolve = () => { };
}
if (typeof reject !== 'function') {
reject = () => { };
}
// OpenAI doesn't need instruct mode. Use OAI main prompt instead.
const isInstruct = power_user.instruct.enabled && main_api !== 'openai';
const isImpersonate = type == 'impersonate';
@ -2935,15 +2910,13 @@ async function Generate(type, { automatic_trigger, force_name2, resolve, reject,
if (interruptedByCommand) {
//$("#send_textarea").val('').trigger('input');
unblockGeneration();
resolve();
return;
return Promise.resolve();
}
if (main_api == 'kobold' && kai_settings.streaming_kobold && !kai_flags.can_use_streaming) {
toastr.error('Streaming is enabled, but the version of Kobold used does not support token streaming.', undefined, { timeOut: 10000, preventDuplicates: true });
unblockGeneration();
resolve();
return;
return Promise.resolve();
}
if (main_api === 'textgenerationwebui' &&
@ -2952,14 +2925,12 @@ async function Generate(type, { automatic_trigger, force_name2, resolve, reject,
textgen_settings.type !== MANCER) {
toastr.error('Streaming is not supported for the Legacy API. Update Ooba and use --extensions openai to enable streaming.', undefined, { timeOut: 10000, preventDuplicates: true });
unblockGeneration();
resolve();
return;
return Promise.resolve();
}
if (isHordeGenerationNotAllowed()) {
unblockGeneration();
resolve();
return;
return Promise.resolve();
}
// Hide swipes if not in a dry run.
@ -2968,8 +2939,8 @@ async function Generate(type, { automatic_trigger, force_name2, resolve, reject,
}
if (selected_group && !is_group_generating && !dryRun) {
generateGroupWrapper(false, type, { resolve, reject, quiet_prompt, force_chid, signal: abortController.signal, quietImage });
return;
// TODO: await here!
return generateGroupWrapper(false, type, { quiet_prompt, force_chid, signal: abortController.signal, quietImage });
} else if (selected_group && !is_group_generating && dryRun) {
const characterIndexMap = new Map(characters.map((char, index) => [char.avatar, index]));
const group = groups.find((x) => x.id === selected_group);
@ -2991,8 +2962,7 @@ async function Generate(type, { automatic_trigger, force_name2, resolve, reject,
} else {
console.log('No enabled members found');
unblockGeneration();
resolve();
return;
return Promise.resolve();
}
}
@ -3156,8 +3126,7 @@ async function Generate(type, { automatic_trigger, force_name2, resolve, reject,
if (aborted) {
console.debug('Generation aborted by extension interceptors');
unblockGeneration();
resolve();
return;
return Promise.resolve();
}
} else {
console.debug('Skipping extension interceptors for dry run');
@ -3212,8 +3181,7 @@ async function Generate(type, { automatic_trigger, force_name2, resolve, reject,
}
catch {
unblockGeneration();
resolve();
return;
return Promise.resolve();
}
if (horde_settings.auto_adjust_context_length) {
this_max_context = (adjustedParams.maxContextLength - adjustedParams.maxLength);
@ -3373,7 +3341,7 @@ async function Generate(type, { automatic_trigger, force_name2, resolve, reject,
}
const originalType = type;
runGenerate(cyclePrompt);
return runGenerate(cyclePrompt);
async function runGenerate(cycleGenerationPrompt = '') {
if (!dryRun) {
@ -3721,258 +3689,260 @@ async function Generate(type, { automatic_trigger, force_name2, resolve, reject,
}
}
if (true === dryRun) return onSuccess({ error: 'dryRun' });
return new Promise(async (resolve, reject) => {
if (true === dryRun) return onSuccess({ error: 'dryRun' });
if (power_user.console_log_prompts) {
console.log(generate_data.prompt);
}
if (power_user.console_log_prompts) {
console.log(generate_data.prompt);
}
let generate_url = getGenerateUrl(main_api);
console.debug('rungenerate calling API');
let generate_url = getGenerateUrl(main_api);
console.debug('rungenerate calling API');
showStopButton();
showStopButton();
//set array object for prompt token itemization of this message
let currentArrayEntry = Number(thisPromptBits.length - 1);
let additionalPromptStuff = {
...thisPromptBits[currentArrayEntry],
rawPrompt: generate_data.prompt || generate_data.input,
mesId: getNextMessageId(type),
allAnchors: allAnchors,
summarizeString: (extension_prompts['1_memory']?.value || ''),
authorsNoteString: (extension_prompts['2_floating_prompt']?.value || ''),
smartContextString: (extension_prompts['chromadb']?.value || ''),
worldInfoString: worldInfoString,
storyString: storyString,
beforeScenarioAnchor: beforeScenarioAnchor,
afterScenarioAnchor: afterScenarioAnchor,
examplesString: examplesString,
mesSendString: mesSendString,
generatedPromptCache: generatedPromptCache,
promptBias: promptBias,
finalPrompt: finalPrompt,
charDescription: description,
charPersonality: personality,
scenarioText: scenario,
this_max_context: this_max_context,
padding: power_user.token_padding,
main_api: main_api,
instruction: isInstruct ? substituteParams(power_user.prefer_character_prompt && system ? system : power_user.instruct.system_prompt) : '',
userPersona: (power_user.persona_description || ''),
};
//set array object for prompt token itemization of this message
let currentArrayEntry = Number(thisPromptBits.length - 1);
let additionalPromptStuff = {
...thisPromptBits[currentArrayEntry],
rawPrompt: generate_data.prompt || generate_data.input,
mesId: getNextMessageId(type),
allAnchors: allAnchors,
summarizeString: (extension_prompts['1_memory']?.value || ''),
authorsNoteString: (extension_prompts['2_floating_prompt']?.value || ''),
smartContextString: (extension_prompts['chromadb']?.value || ''),
worldInfoString: worldInfoString,
storyString: storyString,
beforeScenarioAnchor: beforeScenarioAnchor,
afterScenarioAnchor: afterScenarioAnchor,
examplesString: examplesString,
mesSendString: mesSendString,
generatedPromptCache: generatedPromptCache,
promptBias: promptBias,
finalPrompt: finalPrompt,
charDescription: description,
charPersonality: personality,
scenarioText: scenario,
this_max_context: this_max_context,
padding: power_user.token_padding,
main_api: main_api,
instruction: isInstruct ? substituteParams(power_user.prefer_character_prompt && system ? system : power_user.instruct.system_prompt) : '',
userPersona: (power_user.persona_description || ''),
};
thisPromptBits = additionalPromptStuff;
thisPromptBits = additionalPromptStuff;
//console.log(thisPromptBits);
const itemizedIndex = itemizedPrompts.findIndex((item) => item.mesId === thisPromptBits['mesId']);
//console.log(thisPromptBits);
const itemizedIndex = itemizedPrompts.findIndex((item) => item.mesId === thisPromptBits['mesId']);
if (itemizedIndex !== -1) {
itemizedPrompts[itemizedIndex] = thisPromptBits;
}
else {
itemizedPrompts.push(thisPromptBits);
}
console.debug(`pushed prompt bits to itemizedPrompts array. Length is now: ${itemizedPrompts.length}`);
/** @type {Promise<any>} */
let streamingGeneratorPromise = Promise.resolve();
if (main_api == 'openai') {
if (isStreamingEnabled() && type !== 'quiet') {
streamingGeneratorPromise = sendOpenAIRequest(type, generate_data.prompt, streamingProcessor.abortController.signal);
if (itemizedIndex !== -1) {
itemizedPrompts[itemizedIndex] = thisPromptBits;
}
else {
sendOpenAIRequest(type, generate_data.prompt, abortController.signal).then(onSuccess).catch(onError);
itemizedPrompts.push(thisPromptBits);
}
}
else if (main_api == 'koboldhorde') {
generateHorde(finalPrompt, generate_data, abortController.signal, true).then(onSuccess).catch(onError);
}
else if (main_api == 'textgenerationwebui' && isStreamingEnabled() && type !== 'quiet') {
streamingGeneratorPromise = generateTextGenWithStreaming(generate_data, streamingProcessor.abortController.signal);
}
else if (main_api == 'novel' && isStreamingEnabled() && type !== 'quiet') {
streamingGeneratorPromise = generateNovelWithStreaming(generate_data, streamingProcessor.abortController.signal);
}
else if (main_api == 'kobold' && isStreamingEnabled() && type !== 'quiet') {
streamingGeneratorPromise = generateKoboldWithStreaming(generate_data, streamingProcessor.abortController.signal);
}
else {
try {
const response = await fetch(generate_url, {
method: 'POST',
headers: getRequestHeaders(),
cache: 'no-cache',
body: JSON.stringify(generate_data),
signal: abortController.signal,
});
if (!response.ok) {
const error = await response.json();
throw error;
console.debug(`pushed prompt bits to itemizedPrompts array. Length is now: ${itemizedPrompts.length}`);
/** @type {Promise<any>} */
let streamingGeneratorPromise = Promise.resolve();
if (main_api == 'openai') {
if (isStreamingEnabled() && type !== 'quiet') {
streamingGeneratorPromise = sendOpenAIRequest(type, generate_data.prompt, streamingProcessor.abortController.signal);
}
else {
sendOpenAIRequest(type, generate_data.prompt, abortController.signal).then(onSuccess).catch(onError);
}
}
else if (main_api == 'koboldhorde') {
generateHorde(finalPrompt, generate_data, abortController.signal, true).then(onSuccess).catch(onError);
}
else if (main_api == 'textgenerationwebui' && isStreamingEnabled() && type !== 'quiet') {
streamingGeneratorPromise = generateTextGenWithStreaming(generate_data, streamingProcessor.abortController.signal);
}
else if (main_api == 'novel' && isStreamingEnabled() && type !== 'quiet') {
streamingGeneratorPromise = generateNovelWithStreaming(generate_data, streamingProcessor.abortController.signal);
}
else if (main_api == 'kobold' && isStreamingEnabled() && type !== 'quiet') {
streamingGeneratorPromise = generateKoboldWithStreaming(generate_data, streamingProcessor.abortController.signal);
}
else {
try {
const response = await fetch(generate_url, {
method: 'POST',
headers: getRequestHeaders(),
cache: 'no-cache',
body: JSON.stringify(generate_data),
signal: abortController.signal,
});
if (!response.ok) {
const error = await response.json();
throw error;
}
const data = await response.json();
onSuccess(data);
} catch (error) {
onError(error);
}
}
if (isStreamingEnabled() && type !== 'quiet') {
try {
const streamingGenerator = await streamingGeneratorPromise;
streamingProcessor.generator = streamingGenerator;
hideSwipeButtons();
let getMessage = await streamingProcessor.generate();
let messageChunk = cleanUpMessage(getMessage, isImpersonate, isContinue, false);
if (isContinue) {
getMessage = continue_mag + getMessage;
}
if (streamingProcessor && !streamingProcessor.isStopped && streamingProcessor.isFinished) {
await streamingProcessor.onFinishStreaming(streamingProcessor.messageId, getMessage);
streamingProcessor = null;
triggerAutoContinue(messageChunk, isImpersonate);
}
resolve();
} catch (err) {
onError(err);
}
const data = await response.json();
onSuccess(data);
} catch (error) {
onError(error);
}
}
if (isStreamingEnabled() && type !== 'quiet') {
try {
const streamingGenerator = await streamingGeneratorPromise;
streamingProcessor.generator = streamingGenerator;
hideSwipeButtons();
let getMessage = await streamingProcessor.generate();
let messageChunk = cleanUpMessage(getMessage, isImpersonate, isContinue, false);
async function onSuccess(data) {
let messageChunk = '';
if (isContinue) {
getMessage = continue_mag + getMessage;
if (data.error == 'dryRun') {
generatedPromptCache = '';
resolve();
return;
}
if (streamingProcessor && !streamingProcessor.isStopped && streamingProcessor.isFinished) {
await streamingProcessor.onFinishStreaming(streamingProcessor.messageId, getMessage);
streamingProcessor = null;
if (!data.error) {
//const getData = await response.json();
let getMessage = extractMessageFromData(data);
let title = extractTitleFromData(data);
kobold_horde_model = title;
const swipes = extractMultiSwipes(data, type);
messageChunk = cleanUpMessage(getMessage, isImpersonate, isContinue, false);
if (isContinue) {
getMessage = continue_mag + getMessage;
}
//Formating
const displayIncomplete = type === 'quiet' && !quietToLoud;
getMessage = cleanUpMessage(getMessage, isImpersonate, isContinue, displayIncomplete);
if (getMessage.length > 0) {
if (isImpersonate) {
$('#send_textarea').val(getMessage).trigger('input');
generatedPromptCache = '';
await eventSource.emit(event_types.IMPERSONATE_READY, getMessage);
}
else if (type == 'quiet') {
resolve(getMessage);
}
else {
// Without streaming we'll be having a full message on continuation. Treat it as a last chunk.
if (originalType !== 'continue') {
({ type, getMessage } = await saveReply(type, getMessage, false, title, swipes));
}
else {
({ type, getMessage } = await saveReply('appendFinal', getMessage, false, title, swipes));
}
}
activateSendButtons();
if (type !== 'quiet') {
playMessageSound();
}
generate_loop_counter = 0;
} else {
++generate_loop_counter;
if (generate_loop_counter > MAX_GENERATION_LOOPS) {
throwCircuitBreakerError();
}
// regenerate with character speech reenforced
// to make sure we leave on swipe type while also adding the name2 appendage
setTimeout(() => {
Generate(type, { automatic_trigger, force_name2: true, resolve, reject, quiet_prompt, skipWIAN, force_chid });
}, generate_loop_counter * 1000);
}
if (power_user.auto_swipe) {
console.debug('checking for autoswipeblacklist on non-streaming message');
function containsBlacklistedWords(getMessage, blacklist, threshold) {
console.debug('checking blacklisted words');
const regex = new RegExp(`\\b(${blacklist.join('|')})\\b`, 'gi');
const matches = getMessage.match(regex) || [];
return matches.length >= threshold;
}
const generatedTextFiltered = (getMessage) => {
if (power_user.auto_swipe_blacklist_threshold) {
if (containsBlacklistedWords(getMessage, power_user.auto_swipe_blacklist, power_user.auto_swipe_blacklist_threshold)) {
console.debug('Generated text has blacklisted words');
return true;
}
}
return false;
};
if (generatedTextFiltered(getMessage)) {
console.debug('swiping right automatically');
is_send_press = false;
swipe_right();
// TODO: do we want to resolve after an auto-swipe?
resolve();
return;
}
}
} else {
generatedPromptCache = '';
activateSendButtons();
//console.log('runGenerate calling showSwipeBtns');
showSwipeButtons();
if (data?.response) {
toastr.error(data.response, 'API Error');
}
}
console.debug('/api/chats/save called by /Generate');
await saveChatConditional();
is_send_press = false;
hideStopButton();
activateSendButtons();
showSwipeButtons();
setGenerationProgress(0);
streamingProcessor = null;
if (type !== 'quiet') {
triggerAutoContinue(messageChunk, isImpersonate);
}
resolve();
} catch (err) {
onError(err);
}
}
async function onSuccess(data) {
let messageChunk = '';
if (data.error == 'dryRun') {
generatedPromptCache = '';
resolve();
return;
}
if (!data.error) {
//const getData = await response.json();
let getMessage = extractMessageFromData(data);
let title = extractTitleFromData(data);
kobold_horde_model = title;
const swipes = extractMultiSwipes(data, type);
messageChunk = cleanUpMessage(getMessage, isImpersonate, isContinue, false);
if (isContinue) {
getMessage = continue_mag + getMessage;
function onError(exception) {
if (typeof exception?.error?.message === 'string') {
toastr.error(exception.error.message, 'Error', { timeOut: 10000, extendedTimeOut: 20000 });
}
//Formating
const displayIncomplete = type === 'quiet' && !quietToLoud;
getMessage = cleanUpMessage(getMessage, isImpersonate, isContinue, displayIncomplete);
if (getMessage.length > 0) {
if (isImpersonate) {
$('#send_textarea').val(getMessage).trigger('input');
generatedPromptCache = '';
await eventSource.emit(event_types.IMPERSONATE_READY, getMessage);
}
else if (type == 'quiet') {
resolve(getMessage);
}
else {
// Without streaming we'll be having a full message on continuation. Treat it as a last chunk.
if (originalType !== 'continue') {
({ type, getMessage } = await saveReply(type, getMessage, false, title, swipes));
}
else {
({ type, getMessage } = await saveReply('appendFinal', getMessage, false, title, swipes));
}
}
activateSendButtons();
if (type !== 'quiet') {
playMessageSound();
}
generate_loop_counter = 0;
} else {
++generate_loop_counter;
if (generate_loop_counter > MAX_GENERATION_LOOPS) {
throwCircuitBreakerError();
}
// regenerate with character speech reenforced
// to make sure we leave on swipe type while also adding the name2 appendage
setTimeout(() => {
Generate(type, { automatic_trigger, force_name2: true, resolve, reject, quiet_prompt, skipWIAN, force_chid });
}, generate_loop_counter * 1000);
}
if (power_user.auto_swipe) {
console.debug('checking for autoswipeblacklist on non-streaming message');
function containsBlacklistedWords(getMessage, blacklist, threshold) {
console.debug('checking blacklisted words');
const regex = new RegExp(`\\b(${blacklist.join('|')})\\b`, 'gi');
const matches = getMessage.match(regex) || [];
return matches.length >= threshold;
}
const generatedTextFiltered = (getMessage) => {
if (power_user.auto_swipe_blacklist_threshold) {
if (containsBlacklistedWords(getMessage, power_user.auto_swipe_blacklist, power_user.auto_swipe_blacklist_threshold)) {
console.debug('Generated text has blacklisted words');
return true;
}
}
return false;
};
if (generatedTextFiltered(getMessage)) {
console.debug('swiping right automatically');
is_send_press = false;
swipe_right();
// TODO: do we want to resolve after an auto-swipe?
resolve();
return;
}
}
} else {
generatedPromptCache = '';
activateSendButtons();
//console.log('runGenerate calling showSwipeBtns');
showSwipeButtons();
if (data?.response) {
toastr.error(data.response, 'API Error');
}
reject(exception);
unblockGeneration();
console.log(exception);
streamingProcessor = null;
}
console.debug('/api/chats/save called by /Generate');
await saveChatConditional();
is_send_press = false;
hideStopButton();
activateSendButtons();
showSwipeButtons();
setGenerationProgress(0);
streamingProcessor = null;
if (type !== 'quiet') {
triggerAutoContinue(messageChunk, isImpersonate);
}
resolve();
}
function onError(exception) {
if (typeof exception?.error?.message === 'string') {
toastr.error(exception.error.message, 'Error', { timeOut: 10000, extendedTimeOut: 20000 });
}
reject(exception);
unblockGeneration();
console.log(exception);
streamingProcessor = null;
}
});
} //rungenerate ends
} else { //generate's primary loop ends, after this is error handling for no-connection or safety-id

View File

@ -612,11 +612,11 @@ async function generateGroupWrapper(by_auto_mode, type = null, params = {}) {
if (online_status === 'no_connection') {
is_group_generating = false;
setSendButtonState(false);
return;
return Promise.resolve();
}
if (is_group_generating) {
return false;
return Promise.resolve();
}
// Auto-navigate back to group menu
@ -630,7 +630,7 @@ async function generateGroupWrapper(by_auto_mode, type = null, params = {}) {
if (!group || !Array.isArray(group.members) || !group.members.length) {
sendSystemMessage(system_message_types.EMPTY, '', { isSmallSys: true });
return;
return Promise.resolve();
}
try {
@ -717,32 +717,16 @@ async function generateGroupWrapper(by_auto_mode, type = null, params = {}) {
setCharacterName(characters[chId].name);
// Wait for generation to finish
await new Promise(async (resolve, reject) => {
await Generate(generateType, {
automatic_trigger: by_auto_mode,
...(params || {}),
resolve: function(...args) {
if (typeof params.resolve === 'function') {
params.resolve(...args);
}
resolve();
},
reject: function(...args) {
if (typeof params.reject === 'function') {
params.reject(...args);
}
reject();
},
});
const generateFinished = await Generate(generateType, { automatic_trigger: by_auto_mode, ...(params || {}) });
await generateFinished;
if (type !== 'swipe' && type !== 'impersonate' && !isStreamingEnabled()) {
// update indicator and scroll down
typingIndicator
.find('.typing_indicator_name')
.text(characters[chId].name);
typingIndicator.show();
}
});
if (type !== 'swipe' && type !== 'impersonate' && !isStreamingEnabled()) {
// update indicator and scroll down
typingIndicator
.find('.typing_indicator_name')
.text(characters[chId].name);
typingIndicator.show();
}
}
} finally {
typingIndicator.hide();
@ -755,6 +739,8 @@ async function generateGroupWrapper(by_auto_mode, type = null, params = {}) {
activateSendButtons();
showSwipeButtons();
}
return Promise.resolve();
}
function getLastMessageGenerationId() {