Only generate negative prompt for textgen API

The original comment mentions that we need to get the negative prompt
first since it "has the unmodified mesSend array", but we've cloned the
mesSend array since forever, so I don't think mutation is an issue
anymore.
This commit is contained in:
valadaptive 2024-04-25 08:57:09 -04:00
parent 2d0767306e
commit 2a0497ca9e
1 changed files with 4 additions and 6 deletions

View File

@ -3896,13 +3896,8 @@ async function Generate(type, { automatic_trigger, force_name2, quiet_prompt, qu
return !data.combinedPrompt ? combine() : data.combinedPrompt; return !data.combinedPrompt ? combine() : data.combinedPrompt;
} }
// Get the negative prompt first since it has the unmodified mesSend array
let negativePrompt = main_api == 'textgenerationwebui' ? getCombinedPrompt(true) : undefined;
let finalPrompt = getCombinedPrompt(false); let finalPrompt = getCombinedPrompt(false);
// Include the entire guidance scale object
const cfgValues = useCfgPrompt ? { guidanceScale: cfgGuidanceScale, negativePrompt: negativePrompt } : null;
let maxLength = Number(amount_gen); // how many tokens the AI will be requested to generate let maxLength = Number(amount_gen); // how many tokens the AI will be requested to generate
let thisPromptBits = []; let thisPromptBits = [];
@ -3930,10 +3925,13 @@ async function Generate(type, { automatic_trigger, force_name2, quiet_prompt, qu
generate_data = getKoboldGenerationData(finalPrompt, presetSettings, maxLength, maxContext, isHorde, type); generate_data = getKoboldGenerationData(finalPrompt, presetSettings, maxLength, maxContext, isHorde, type);
} }
break; break;
case 'textgenerationwebui': case 'textgenerationwebui': {
const cfgValues = useCfgPrompt ? { guidanceScale: cfgGuidanceScale, negativePrompt: getCombinedPrompt(true) } : null;
generate_data = getTextGenGenerationData(finalPrompt, maxLength, isImpersonate, isContinue, cfgValues, type); generate_data = getTextGenGenerationData(finalPrompt, maxLength, isImpersonate, isContinue, cfgValues, type);
break; break;
}
case 'novel': { case 'novel': {
const cfgValues = useCfgPrompt ? { guidanceScale: cfgGuidanceScale } : null;
const presetSettings = novelai_settings[novelai_setting_names[nai_settings.preset_settings_novel]]; const presetSettings = novelai_settings[novelai_setting_names[nai_settings.preset_settings_novel]];
generate_data = getNovelGenerationData(finalPrompt, presetSettings, maxLength, isImpersonate, isContinue, cfgValues, type); generate_data = getNovelGenerationData(finalPrompt, presetSettings, maxLength, isImpersonate, isContinue, cfgValues, type);
break; break;