cfg: Adjust max context size based on injected prompt

Close #2424
This commit is contained in:
Cohee
2024-12-27 23:43:16 +02:00
parent 39cfb35c1a
commit c4f119ebf9
2 changed files with 26 additions and 7 deletions

View File

@@ -3792,6 +3792,23 @@ export async function Generate(type, { automatic_trigger, force_name2, quiet_pro
}
}
// Fetches the combined prompt for both negative and positive prompts
const cfgGuidanceScale = getGuidanceScale();
const useCfgPrompt = cfgGuidanceScale && cfgGuidanceScale.value !== 1;
// Adjust max context based on CFG prompt to prevent overfitting
if (useCfgPrompt) {
const negativePrompt = getCfgPrompt(cfgGuidanceScale, true, true)?.value || '';
const positivePrompt = getCfgPrompt(cfgGuidanceScale, false, true)?.value || '';
if (negativePrompt || positivePrompt) {
const previousMaxContext = this_max_context;
const [negativePromptTokenCount, positivePromptTokenCount] = await Promise.all([getTokenCountAsync(negativePrompt), getTokenCountAsync(positivePrompt)]);
const decrement = Math.max(negativePromptTokenCount, positivePromptTokenCount);
this_max_context -= decrement;
console.log(`Max context reduced by ${decrement} tokens of CFG prompt (${previousMaxContext} -> ${this_max_context})`);
}
}
console.log(`Core/all messages: ${coreChat.length}/${chat.length}`);
// kingbri MARK: - Make sure the prompt bias isn't the same as the user bias
@@ -4299,10 +4316,6 @@ export async function Generate(type, { automatic_trigger, force_name2, quiet_pro
setPromptString();
}
// Fetches the combined prompt for both negative and positive prompts
const cfgGuidanceScale = getGuidanceScale();
const useCfgPrompt = cfgGuidanceScale && cfgGuidanceScale.value !== 1;
// For prompt bit itemization
let mesSendString = '';