Fix Chat Completions itemization

This commit is contained in:
Cohee 2023-08-22 20:34:20 +03:00
parent 6923044a34
commit 16406f2698
2 changed files with 45 additions and 28 deletions

View File

@ -3395,17 +3395,25 @@ function getMaxContextSize() {
} }
function parseTokenCounts(counts, thisPromptBits) { function parseTokenCounts(counts, thisPromptBits) {
const total = Object.values(counts).filter(x => !Number.isNaN(x)).reduce((acc, val) => acc + val, 0); /**
* @param {any[]} numbers
*/
function getSum(...numbers) {
return numbers.map(x => Number(x)).filter(x => !Number.isNaN(x)).reduce((acc, val) => acc + val, 0);
}
const total = getSum(Object.values(counts));
thisPromptBits.push({ thisPromptBits.push({
oaiStartTokens: Object.entries(counts)?.[0]?.[1] ?? 0, oaiStartTokens: (counts?.start + counts?.controlPrompts) || 0,
oaiPromptTokens: Object.entries(counts)?.[1]?.[1] ?? 0, oaiPromptTokens: getSum(counts?.prompt, counts?.charDescription, counts?.charPersonality, counts?.scenario) || 0,
oaiBiasTokens: Object.entries(counts)?.[2]?.[1] ?? 0, oaiBiasTokens: counts?.bias || 0,
oaiNudgeTokens: Object.entries(counts)?.[3]?.[1] ?? 0, oaiNudgeTokens: counts?.nudge || 0,
oaiJailbreakTokens: Object.entries(counts)?.[4]?.[1] ?? 0, oaiJailbreakTokens: counts?.jailbreak || 0,
oaiImpersonateTokens: Object.entries(counts)?.[5]?.[1] ?? 0, oaiImpersonateTokens: counts?.impersonate || 0,
oaiExamplesTokens: Object.entries(counts)?.[6]?.[1] ?? 0, oaiExamplesTokens: (counts?.dialogueExamples + counts?.examples) || 0,
oaiConversationTokens: Object.entries(counts)?.[7]?.[1] ?? 0, oaiConversationTokens: (counts?.conversation + counts?.chatHistory) || 0,
oaiNsfwTokens: counts?.nsfw || 0,
oaiMainTokens: counts?.main || 0,
oaiTotalTokens: total, oaiTotalTokens: total,
}); });
} }
@ -3448,25 +3456,25 @@ function appendZeroDepthAnchor(force_name2, zeroDepthAnchor, finalPromt) {
} }
function getMultigenAmount() { function getMultigenAmount() {
let this_amount_gen = parseInt(amount_gen); let this_amount_gen = Number(amount_gen);
if (tokens_already_generated === 0) { if (tokens_already_generated === 0) {
// if the max gen setting is > 50...( // if the max gen setting is > 50...(
if (parseInt(amount_gen) >= power_user.multigen_first_chunk) { if (Number(amount_gen) >= power_user.multigen_first_chunk) {
// then only try to make 50 this cycle.. // then only try to make 50 this cycle..
this_amount_gen = power_user.multigen_first_chunk; this_amount_gen = power_user.multigen_first_chunk;
} }
else { else {
// otherwise, make as much as the max amount request. // otherwise, make as much as the max amount request.
this_amount_gen = parseInt(amount_gen); this_amount_gen = Number(amount_gen);
} }
} }
// if we already received some generated text... // if we already received some generated text...
else { else {
// if the remaining tokens to be made is less than next potential cycle count // if the remaining tokens to be made is less than next potential cycle count
if (parseInt(amount_gen) - tokens_already_generated < power_user.multigen_next_chunks) { if (Number(amount_gen) - tokens_already_generated < power_user.multigen_next_chunks) {
// subtract already generated amount from the desired max gen amount // subtract already generated amount from the desired max gen amount
this_amount_gen = parseInt(amount_gen) - tokens_already_generated; this_amount_gen = Number(amount_gen) - tokens_already_generated;
} }
else { else {
// otherwise make the standard cycle amount (first 50, and 30 after that) // otherwise make the standard cycle amount (first 50, and 30 after that)
@ -3549,17 +3557,21 @@ function promptItemize(itemizedPrompts, requestedMesId) {
//console.log('-- Counting OAI Tokens'); //console.log('-- Counting OAI Tokens');
//var finalPromptTokens = itemizedPrompts[thisPromptSet].oaiTotalTokens; //var finalPromptTokens = itemizedPrompts[thisPromptSet].oaiTotalTokens;
var oaiMainTokens = itemizedPrompts[thisPromptSet].oaiMainTokens;
var oaiStartTokens = itemizedPrompts[thisPromptSet].oaiStartTokens; var oaiStartTokens = itemizedPrompts[thisPromptSet].oaiStartTokens;
var oaiPromptTokens = itemizedPrompts[thisPromptSet].oaiPromptTokens - worldInfoStringTokens - afterScenarioAnchorTokens;
var ActualChatHistoryTokens = itemizedPrompts[thisPromptSet].oaiConversationTokens; var ActualChatHistoryTokens = itemizedPrompts[thisPromptSet].oaiConversationTokens;
var examplesStringTokens = itemizedPrompts[thisPromptSet].oaiExamplesTokens; var examplesStringTokens = itemizedPrompts[thisPromptSet].oaiExamplesTokens;
var oaiPromptTokens = itemizedPrompts[thisPromptSet].oaiPromptTokens - worldInfoStringTokens - afterScenarioAnchorTokens + examplesStringTokens;
var oaiBiasTokens = itemizedPrompts[thisPromptSet].oaiBiasTokens; var oaiBiasTokens = itemizedPrompts[thisPromptSet].oaiBiasTokens;
var oaiJailbreakTokens = itemizedPrompts[thisPromptSet].oaiJailbreakTokens; var oaiJailbreakTokens = itemizedPrompts[thisPromptSet].oaiJailbreakTokens;
var oaiNudgeTokens = itemizedPrompts[thisPromptSet].oaiNudgeTokens; var oaiNudgeTokens = itemizedPrompts[thisPromptSet].oaiNudgeTokens;
var oaiImpersonateTokens = itemizedPrompts[thisPromptSet].oaiImpersonateTokens; var oaiImpersonateTokens = itemizedPrompts[thisPromptSet].oaiImpersonateTokens;
var oaiNsfwTokens = itemizedPrompts[thisPromptSet].oaiNsfwTokens;
var finalPromptTokens = var finalPromptTokens =
oaiStartTokens + oaiStartTokens +
oaiPromptTokens + oaiPromptTokens +
oaiMainTokens +
oaiNsfwTokens +
oaiBiasTokens + oaiBiasTokens +
oaiImpersonateTokens + oaiImpersonateTokens +
oaiJailbreakTokens + oaiJailbreakTokens +
@ -3569,8 +3581,7 @@ function promptItemize(itemizedPrompts, requestedMesId) {
//charPersonalityTokens + //charPersonalityTokens +
//allAnchorsTokens + //allAnchorsTokens +
worldInfoStringTokens + worldInfoStringTokens +
afterScenarioAnchorTokens + afterScenarioAnchorTokens;
examplesStringTokens;
// OAI doesn't use padding // OAI doesn't use padding
thisPrompt_padding = 0; thisPrompt_padding = 0;
// Max context size - max completion tokens // Max context size - max completion tokens
@ -3601,13 +3612,13 @@ function promptItemize(itemizedPrompts, requestedMesId) {
if (this_main_api == 'openai') { if (this_main_api == 'openai') {
//console.log('-- applying % on OAI tokens'); //console.log('-- applying % on OAI tokens');
var oaiStartTokensPercentage = ((oaiStartTokens / (finalPromptTokens)) * 100).toFixed(2); var oaiStartTokensPercentage = ((oaiStartTokens / (finalPromptTokens)) * 100).toFixed(2);
var storyStringTokensPercentage = (((examplesStringTokens + afterScenarioAnchorTokens + oaiPromptTokens) / (finalPromptTokens)) * 100).toFixed(2); var storyStringTokensPercentage = (((afterScenarioAnchorTokens + oaiPromptTokens) / (finalPromptTokens)) * 100).toFixed(2);
var ActualChatHistoryTokensPercentage = ((ActualChatHistoryTokens / (finalPromptTokens)) * 100).toFixed(2); var ActualChatHistoryTokensPercentage = ((ActualChatHistoryTokens / (finalPromptTokens)) * 100).toFixed(2);
var promptBiasTokensPercentage = ((oaiBiasTokens / (finalPromptTokens)) * 100).toFixed(2); var promptBiasTokensPercentage = ((oaiBiasTokens / (finalPromptTokens)) * 100).toFixed(2);
var worldInfoStringTokensPercentage = ((worldInfoStringTokens / (finalPromptTokens)) * 100).toFixed(2); var worldInfoStringTokensPercentage = ((worldInfoStringTokens / (finalPromptTokens)) * 100).toFixed(2);
var allAnchorsTokensPercentage = ((allAnchorsTokens / (finalPromptTokens)) * 100).toFixed(2); var allAnchorsTokensPercentage = ((allAnchorsTokens / (finalPromptTokens)) * 100).toFixed(2);
var selectedTokenizer = `tiktoken (${getTokenizerModel()})`; var selectedTokenizer = `tiktoken (${getTokenizerModel()})`;
var oaiSystemTokens = oaiImpersonateTokens + oaiJailbreakTokens + oaiNudgeTokens + oaiStartTokens; var oaiSystemTokens = oaiImpersonateTokens + oaiJailbreakTokens + oaiNudgeTokens + oaiStartTokens + oaiNsfwTokens + oaiMainTokens;
var oaiSystemTokensPercentage = ((oaiSystemTokens / (finalPromptTokens)) * 100).toFixed(2); var oaiSystemTokensPercentage = ((oaiSystemTokens / (finalPromptTokens)) * 100).toFixed(2);
} else { } else {
@ -3656,6 +3667,8 @@ function promptItemize(itemizedPrompts, requestedMesId) {
oaiImpersonateTokens, oaiImpersonateTokens,
oaiPromptTokens, oaiPromptTokens,
oaiBiasTokens, oaiBiasTokens,
oaiNsfwTokens,
oaiMainTokens,
}; };
if (this_main_api == 'openai') { if (this_main_api == 'openai') {

View File

@ -33,13 +33,17 @@ API Used: {{this_main_api}}<br>
<div class=" flex1 tokenItemizingSubclass">-- Chat Start: </div> <div class=" flex1 tokenItemizingSubclass">-- Chat Start: </div>
<div class="tokenItemizingSubclass"> {{oaiStartTokens}}</div> <div class="tokenItemizingSubclass"> {{oaiStartTokens}}</div>
</div> </div>
<div class="flex-container ">
<div class=" flex1 tokenItemizingSubclass">-- Main: </div>
<div class="tokenItemizingSubclass">{{oaiMainTokens}}</div>
</div>
<div class="flex-container "> <div class="flex-container ">
<div class=" flex1 tokenItemizingSubclass">-- Jailbreak: </div> <div class=" flex1 tokenItemizingSubclass">-- Jailbreak: </div>
<div class="tokenItemizingSubclass">{{oaiJailbreakTokens}}</div> <div class="tokenItemizingSubclass">{{oaiJailbreakTokens}}</div>
</div> </div>
<div class="flex-container "> <div class="flex-container ">
<div class=" flex1 tokenItemizingSubclass">-- NSFW: </div> <div class=" flex1 tokenItemizingSubclass">-- NSFW: </div>
<div class="tokenItemizingSubclass">??</div> <div class="tokenItemizingSubclass">{{oaiNsfwTokens}}</div>
</div> </div>
<div class="flex-container "> <div class="flex-container ">
<div class=" flex1 tokenItemizingSubclass">-- Nudge: </div> <div class=" flex1 tokenItemizingSubclass">-- Nudge: </div>