Fix last prompt line not counted toward token limit

This commit is contained in:
Cohee
2024-08-30 00:00:51 +03:00
parent b21ba850c0
commit 9010880b64

View File

@ -3769,10 +3769,10 @@ export async function Generate(type, { automatic_trigger, force_name2, quiet_pro
storyString, storyString,
afterScenarioAnchor, afterScenarioAnchor,
examplesString, examplesString,
chatString,
quiet_prompt,
cyclePrompt,
userAlignmentMessage, userAlignmentMessage,
chatString,
modifyLastPromptLine(''),
cyclePrompt,
].join('').replace(/\r/gm, ''); ].join('').replace(/\r/gm, '');
return getTokenCountAsync(encodeString, power_user.token_padding); return getTokenCountAsync(encodeString, power_user.token_padding);
} }
@ -3803,8 +3803,8 @@ export async function Generate(type, { automatic_trigger, force_name2, quiet_pro
} }
tokenCount += await getTokenCountAsync(item.replace(/\r/gm, '')); tokenCount += await getTokenCountAsync(item.replace(/\r/gm, ''));
chatString = item + chatString;
if (tokenCount < this_max_context) { if (tokenCount < this_max_context) {
chatString = chatString + item;
arrMes[index] = item; arrMes[index] = item;
lastAddedIndex = Math.max(lastAddedIndex, index); lastAddedIndex = Math.max(lastAddedIndex, index);
} else { } else {
@ -3830,8 +3830,8 @@ export async function Generate(type, { automatic_trigger, force_name2, quiet_pro
} }
tokenCount += await getTokenCountAsync(item.replace(/\r/gm, '')); tokenCount += await getTokenCountAsync(item.replace(/\r/gm, ''));
chatString = item + chatString;
if (tokenCount < this_max_context) { if (tokenCount < this_max_context) {
chatString = chatString + item;
arrMes[i] = item; arrMes[i] = item;
lastAddedIndex = Math.max(lastAddedIndex, i); lastAddedIndex = Math.max(lastAddedIndex, i);
} else { } else {
@ -4036,8 +4036,8 @@ export async function Generate(type, { automatic_trigger, force_name2, quiet_pro
mesExmString, mesExmString,
addChatsPreamble(addChatsSeparator(jointMessages)), addChatsPreamble(addChatsSeparator(jointMessages)),
'\n', '\n',
modifyLastPromptLine(''),
generatedPromptCache, generatedPromptCache,
quiet_prompt,
].join('').replace(/\r/gm, ''); ].join('').replace(/\r/gm, '');
let thisPromptContextSize = await getTokenCountAsync(prompt, power_user.token_padding); let thisPromptContextSize = await getTokenCountAsync(prompt, power_user.token_padding);