Try to minimize side effects of generate dry running

This commit is contained in:
Cohee 2023-08-15 00:01:55 +03:00
parent 238b667262
commit a92333875c
2 changed files with 38 additions and 20 deletions

View File

@ -2348,7 +2348,7 @@ async function Generate(type, { automatic_trigger, force_name2, resolve, reject,
}
// Hide swipes on either multigen or real streaming
if (isStreamingEnabled() || isMultigenEnabled()) {
if ((isStreamingEnabled() || isMultigenEnabled()) && !dryRun) {
hideSwipeButtons();
}
@ -2411,7 +2411,10 @@ async function Generate(type, { automatic_trigger, force_name2, resolve, reject,
}
const isContinue = type == 'continue';
deactivateSendButtons();
if (!dryRun) {
deactivateSendButtons();
}
let { messageBias, promptBias, isUserPromptBias } = getBiasStrings(textareaText, type);
@ -2637,7 +2640,10 @@ async function Generate(type, { automatic_trigger, force_name2, resolve, reject,
let mesSend = [];
console.debug('calling runGenerate');
streamingProcessor = isStreamingEnabled() ? new StreamingProcessor(type, force_name2) : false;
if (!dryRun) {
streamingProcessor = isStreamingEnabled() ? new StreamingProcessor(type, force_name2) : false;
}
if (isContinue) {
// Coping mechanism for OAI spacing
@ -2657,7 +2663,9 @@ async function Generate(type, { automatic_trigger, force_name2, resolve, reject,
runGenerate(cyclePrompt);
async function runGenerate(cycleGenerationPromt = '') {
is_send_press = true;
if (!dryRun) {
is_send_press = true;
}
generatedPromtCache += cycleGenerationPromt;
if (generatedPromtCache.length == 0 || type === 'continue') {
@ -3010,6 +3018,12 @@ async function Generate(type, { automatic_trigger, force_name2, resolve, reject,
}
async function onSuccess(data) {
if (data.error == 'dryRun') {
generatedPromtCache = '';
resolve();
return;
}
hideStopButton();
is_send_press = false;
if (!data.error) {

View File

@ -1,7 +1,7 @@
import {callPopup, event_types, eventSource, main_api, saveSettingsDebounced, substituteParams} from "../script.js";
import {callPopup, event_types, eventSource, is_send_press, main_api, saveSettingsDebounced, substituteParams} from "../script.js";
import {TokenHandler} from "./openai.js";
import {power_user} from "./power-user.js";
import { debounce } from "./utils.js";
import { debounce, waitUntilCondition } from "./utils.js";
/**
* Register migrations for the prompt manager when settings are loaded or an Open AI preset is loaded.
@ -596,25 +596,29 @@ PromptManagerModule.prototype.render = function (afterTryGenerate = true) {
if (null === this.activeCharacter) return;
this.error = null;
if (true === afterTryGenerate) {
// Executed during dry-run for determining context composition
this.profileStart('filling context');
this.tryGenerate().then(() => {
this.profileEnd('filling context');
waitUntilCondition(() => is_send_press == false, 1024 * 1024, 100).then(() => {
if (true === afterTryGenerate) {
// Executed during dry-run for determining context composition
this.profileStart('filling context');
this.tryGenerate().then(() => {
this.profileEnd('filling context');
this.profileStart('render');
this.renderPromptManager();
this.renderPromptManagerListItems()
this.makeDraggable();
this.profileEnd('render');
});
} else {
// Executed during live communication
this.profileStart('render');
this.renderPromptManager();
this.renderPromptManagerListItems()
this.makeDraggable();
this.profileEnd('render');
});
} else {
// Executed during live communication
this.profileStart('render');
this.renderPromptManager();
this.renderPromptManagerListItems()
this.makeDraggable();
this.profileEnd('render');
}
}
}).catch(() => {
console.log('Timeout while waiting for send press to be false');
});
}
/**