Refactor token counting after completion. Simplify code.
This commit is contained in:
parent
dfd49813af
commit
57fd04b974
|
@ -976,28 +976,52 @@ PromptManagerModule.prototype.getPromptCollection = function () {
|
|||
/**
|
||||
* Setter for messages property
|
||||
*
|
||||
* @param messages
|
||||
* @param {MessageCollection} messages
|
||||
*/
|
||||
PromptManagerModule.prototype.setMessages = function (messages) {
|
||||
this.messages = messages;
|
||||
};
|
||||
|
||||
/**
|
||||
* Set and process a finished chat completion object
|
||||
*
|
||||
* @param {ChatCompletion} chatCompletion
|
||||
*/
|
||||
PromptManagerModule.prototype.setChatCompletion = function(chatCompletion) {
|
||||
const messages = chatCompletion.getMessages();
|
||||
|
||||
this.setMessages(messages);
|
||||
this.populateTokenCounts(messages);
|
||||
this.populateLegacyTokenCounts(messages);
|
||||
}
|
||||
|
||||
/**
|
||||
* Populates the token handler
|
||||
*
|
||||
* @param messageCollection
|
||||
* @param {MessageCollection} messages
|
||||
*/
|
||||
PromptManagerModule.prototype.populateTokenHandler = function(messageCollection) {
|
||||
PromptManagerModule.prototype.populateTokenCounts = function(messages) {
|
||||
this.tokenHandler.resetCounts();
|
||||
const counts = this.tokenHandler.getCounts();
|
||||
messageCollection.getCollection().forEach(message => {
|
||||
messages.getCollection().forEach(message => {
|
||||
counts[message.identifier] = message.getTokens();
|
||||
});
|
||||
|
||||
this.tokenUsage = this.tokenHandler.getTotal();
|
||||
|
||||
this.log('Updated token usage with ' + this.tokenUsage);
|
||||
}
|
||||
|
||||
/**
|
||||
* Populates legacy token counts
|
||||
*
|
||||
* @deprecated This might serve no purpose and should be evaluated for removal
|
||||
*
|
||||
* @param {MessageCollection} messages
|
||||
*/
|
||||
PromptManagerModule.prototype.populateLegacyTokenCounts = function(messages) {
|
||||
// Update general token counts
|
||||
const chatHistory = messageCollection.getItemByIdentifier('chatHistory');
|
||||
const chatHistory = messages.getItemByIdentifier('chatHistory');
|
||||
const startChat = chatHistory?.getCollection()[0].getTokens() || 0;
|
||||
const continueNudge = chatHistory?.getCollection().find(message => message.identifier === 'continueNudge')?.getTokens() || 0;
|
||||
|
||||
|
@ -1014,8 +1038,6 @@ PromptManagerModule.prototype.populateTokenHandler = function(messageCollection)
|
|||
'conversation': this.tokenHandler.counts.chatHistory ?? 0,
|
||||
}
|
||||
};
|
||||
|
||||
this.log('Updated token cache with ' + this.tokenUsage);
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -512,7 +512,7 @@ function populateDialogueExamples(prompts, chatCompletion) {
|
|||
* @param {string} options.type - The type of the chat, can be 'impersonate'.
|
||||
*/
|
||||
function populateChatCompletion (prompts, chatCompletion, {bias, quietPrompt, type, cyclePrompt} = {}) {
|
||||
//Helper function for the recurring task of preparing a prompt for the chat completion
|
||||
// Helper function for the recurring task of preparing a prompt for the chat completion
|
||||
const addToChatCompletion = (source, target = null) => {
|
||||
// We need the prompts array to determine a position for the source.
|
||||
if (false === prompts.has(source)) return;
|
||||
|
@ -779,9 +779,8 @@ function prepareOpenAIMessages({
|
|||
chatCompletion.log(error);
|
||||
}
|
||||
} finally {
|
||||
const messages = chatCompletion.getMessages();
|
||||
if (null === promptManager.error) promptManager.populateTokenHandler(messages);
|
||||
promptManager.setMessages(messages);
|
||||
// Pass chat completion to prompt manager for inspection
|
||||
promptManager.setChatCompletion(chatCompletion);
|
||||
|
||||
// All information are up-to-date, render.
|
||||
if (false === dryRun) promptManager.render(false);
|
||||
|
|
Loading…
Reference in New Issue