Fill tokenizer legacy counts where possible.

This commit is contained in:
maver 2023-07-08 18:48:50 +02:00
parent 9784950c54
commit 2ed1cee82e
2 changed files with 20 additions and 1 deletions

View File

@ -894,6 +894,25 @@ PromptManagerModule.prototype.populateTokenHandler = function(messageCollection)
this.tokenUsage = this.tokenHandler.getTotal();
// Update general token counts
const chatHistory = messageCollection.getItemByIdentifier('chatHistory');
const startChat = chatHistory.getCollection()[0]?.getTokens() || 0;
const continueNudge = chatHistory.getCollection().find(message => message.identifier === 'continueNudge')?.getTokens() || 0;
this.tokenHandler.counts = {
...this.tokenHandler.counts,
...{
'start_chat': startChat,
'prompt': 0,
'bias': this.tokenHandler.counts.bias ?? 0,
'nudge': continueNudge,
'jailbreak': this.tokenHandler.counts.jailbreak ?? 0,
'impersonate': 0,
'examples': this.tokenHandler.counts.dialogueExamples ?? 0,
'conversation': this.tokenHandler.counts.chatHistory ?? 0,
}
};
this.log('Updated token cache with ' + this.tokenUsage);
}

View File

@ -703,7 +703,7 @@ function prepareOpenAIMessages({
const chat = chatCompletion.getChat();
openai_messages_count = chat.filter(x => x?.role === "user" || x?.role === "assistant")?.length || 0;
return [chat, false];
return [chat, promptManager.tokenHandler.counts];
}
function getGroupMembers(activeGroup) {