Fix double count of chat injects for message fitting logic

This commit is contained in:
Cohee 2024-04-09 19:06:10 +03:00
parent 2411a7480e
commit 4f83782430
1 changed files with 5 additions and 4 deletions

View File

@ -3466,7 +3466,6 @@ async function Generate(type, { automatic_trigger, force_name2, quiet_prompt, qu
// Add persona description to prompt
addPersonaDescriptionExtensionPrompt();
// Call combined AN into Generate
let allAnchors = getAllExtensionPrompts();
const beforeScenarioAnchor = getExtensionPrompt(extension_prompt_types.BEFORE_PROMPT).trimStart();
const afterScenarioAnchor = getExtensionPrompt(extension_prompt_types.IN_PROMPT);
@ -3513,10 +3512,11 @@ async function Generate(type, { automatic_trigger, force_name2, quiet_prompt, qu
function getMessagesTokenCount() {
const encodeString = [
beforeScenarioAnchor,
storyString,
afterScenarioAnchor,
examplesString,
chatString,
allAnchors,
quiet_prompt,
cyclePrompt,
userAlignmentMessage,
@ -3784,12 +3784,13 @@ async function Generate(type, { automatic_trigger, force_name2, quiet_prompt, qu
console.debug('---checking Prompt size');
setPromptString();
const prompt = [
beforeScenarioAnchor,
storyString,
afterScenarioAnchor,
mesExmString,
mesSend.map((e) => `${e.extensionPrompts.join('')}${e.message}`).join(''),
'\n',
generatedPromptCache,
allAnchors,
quiet_prompt,
].join('').replace(/\r/gm, '');
let thisPromptContextSize = getTokenCount(prompt, power_user.token_padding);
@ -4025,7 +4026,7 @@ async function Generate(type, { automatic_trigger, force_name2, quiet_prompt, qu
...thisPromptBits[currentArrayEntry],
rawPrompt: generate_data.prompt || generate_data.input,
mesId: getNextMessageId(type),
allAnchors: allAnchors,
allAnchors: '',
summarizeString: (extension_prompts['1_memory']?.value || ''),
authorsNoteString: (extension_prompts['2_floating_prompt']?.value || ''),
smartContextString: (extension_prompts['chromadb']?.value || ''),