Fix double count of chat injects for message fitting logic

This commit is contained in:
Cohee 2024-04-09 19:06:10 +03:00
parent fc1896dcff
commit 3fd40a33de
1 changed files with 5 additions and 4 deletions

View File

@ -3467,7 +3467,6 @@ async function Generate(type, { automatic_trigger, force_name2, quiet_prompt, qu
// Add persona description to prompt
addPersonaDescriptionExtensionPrompt();
// Call combined AN into Generate
let allAnchors = getAllExtensionPrompts();
const beforeScenarioAnchor = getExtensionPrompt(extension_prompt_types.BEFORE_PROMPT).trimStart();
const afterScenarioAnchor = getExtensionPrompt(extension_prompt_types.IN_PROMPT);
@ -3514,10 +3513,11 @@ async function Generate(type, { automatic_trigger, force_name2, quiet_prompt, qu
function getMessagesTokenCount() {
const encodeString = [
beforeScenarioAnchor,
storyString,
afterScenarioAnchor,
examplesString,
chatString,
allAnchors,
quiet_prompt,
cyclePrompt,
userAlignmentMessage,
@ -3785,12 +3785,13 @@ async function Generate(type, { automatic_trigger, force_name2, quiet_prompt, qu
console.debug('---checking Prompt size');
setPromptString();
const prompt = [
beforeScenarioAnchor,
storyString,
afterScenarioAnchor,
mesExmString,
mesSend.map((e) => `${e.extensionPrompts.join('')}${e.message}`).join(''),
'\n',
generatedPromptCache,
allAnchors,
quiet_prompt,
].join('').replace(/\r/gm, '');
let thisPromptContextSize = getTokenCount(prompt, power_user.token_padding);
@ -4026,7 +4027,7 @@ async function Generate(type, { automatic_trigger, force_name2, quiet_prompt, qu
...thisPromptBits[currentArrayEntry],
rawPrompt: generate_data.prompt || generate_data.input,
mesId: getNextMessageId(type),
allAnchors: allAnchors,
allAnchors: '',
summarizeString: (extension_prompts['1_memory']?.value || ''),
authorsNoteString: (extension_prompts['2_floating_prompt']?.value || ''),
smartContextString: (extension_prompts['chromadb']?.value || ''),