mirror of
https://github.com/SillyTavern/SillyTavern.git
synced 2025-06-05 21:59:27 +02:00
add in process files for OAI tokenization merge
This commit is contained in:
@ -366,6 +366,15 @@
|
|||||||
<input id="openai_reverse_proxy" type="text" class="text_pole" placeholder="https://api.openai.com/v1" maxlength="100" />
|
<input id="openai_reverse_proxy" type="text" class="text_pole" placeholder="https://api.openai.com/v1" maxlength="100" />
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
<div class="range-block">
|
||||||
|
<label for="oai_breakdown" class="checkbox_label widthFreeExpand">
|
||||||
|
<input id="oai_breakdown" type="checkbox" />
|
||||||
|
Token Breakdown
|
||||||
|
</label>
|
||||||
|
<div class="toggle-description justifyLeft">
|
||||||
|
Display a breakdown of the tokens used in the request.
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
<div class="range-block">
|
<div class="range-block">
|
||||||
<div class="range-block-title">
|
<div class="range-block-title">
|
||||||
Context Size (tokens)
|
Context Size (tokens)
|
||||||
@ -1176,7 +1185,7 @@
|
|||||||
|
|
||||||
<div id="advanced-formatting-button" class="drawer">
|
<div id="advanced-formatting-button" class="drawer">
|
||||||
<div class="drawer-toggle">
|
<div class="drawer-toggle">
|
||||||
<div class="drawer-icon fa-solid fa-font closedIcon" title="AI Reponse Formatting"></div>
|
<div class="drawer-icon fa-solid fa-font closedIcon" title="AI Response Formatting"></div>
|
||||||
</div>
|
</div>
|
||||||
<div class="drawer-content">
|
<div class="drawer-content">
|
||||||
<h3>Advanced Formatting
|
<h3>Advanced Formatting
|
||||||
@ -2022,7 +2031,7 @@
|
|||||||
</div>
|
</div>
|
||||||
|
|
||||||
<div>
|
<div>
|
||||||
<h3 id="character_popup_text_h3"></h3> - Advanced Defininitions
|
<h3 id="character_popup_text_h3"></h3> - Advanced Definitions
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
</div>
|
</div>
|
||||||
@ -2047,7 +2056,7 @@
|
|||||||
|
|
||||||
<div id="talkativeness_div">
|
<div id="talkativeness_div">
|
||||||
<h4>Talkativeness</h4>
|
<h4>Talkativeness</h4>
|
||||||
<h5>How often the chracter speaks in <span class="warning">group chats!</span>
|
<h5>How often the character speaks in <span class="warning">group chats!</span>
|
||||||
</h5>
|
</h5>
|
||||||
<input id="talkativeness_slider" name="talkativeness" type="range" min="0" max="1" step="0.05" value="0.5" form="form_create">
|
<input id="talkativeness_slider" name="talkativeness" type="range" min="0" max="1" step="0.05" value="0.5" form="form_create">
|
||||||
<div id="talkativeness_hint">
|
<div id="talkativeness_hint">
|
||||||
@ -2422,6 +2431,11 @@
|
|||||||
<div id="chat">
|
<div id="chat">
|
||||||
</div>
|
</div>
|
||||||
<div id="form_sheld">
|
<div id="form_sheld">
|
||||||
|
<div id="token_breakdown" style="display:none;">
|
||||||
|
<div>
|
||||||
|
<!-- Token Breakdown Goes Here -->
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
<div id="dialogue_del_mes">
|
<div id="dialogue_del_mes">
|
||||||
<div id="dialogue_del_mes_ok" class="menu_button">Delete</div>
|
<div id="dialogue_del_mes_ok" class="menu_button">Delete</div>
|
||||||
<div id="dialogue_del_mes_cancel" class="menu_button">Cancel</div>
|
<div id="dialogue_del_mes_cancel" class="menu_button">Cancel</div>
|
||||||
|
@ -1157,7 +1157,7 @@ function addOneMessage(mes, { type = "normal", insertAfter = null, scroll = true
|
|||||||
} else if (params.isUser !== true) { //hide all when prompt cache is empty
|
} else if (params.isUser !== true) { //hide all when prompt cache is empty
|
||||||
console.log('saw empty prompt cache, hiding all prompt buttons');
|
console.log('saw empty prompt cache, hiding all prompt buttons');
|
||||||
$(".mes_prompt").hide();
|
$(".mes_prompt").hide();
|
||||||
console.log(itemizedPrompts);
|
//console.log(itemizedPrompts);
|
||||||
} else { console.log('skipping prompt data for User Message'); }
|
} else { console.log('skipping prompt data for User Message'); }
|
||||||
|
|
||||||
newMessage.find('.avatar img').on('error', function () {
|
newMessage.find('.avatar img').on('error', function () {
|
||||||
@ -2250,6 +2250,7 @@ async function Generate(type, { automatic_trigger, force_name2, resolve, reject,
|
|||||||
breakdown_bar.empty();
|
breakdown_bar.empty();
|
||||||
|
|
||||||
const total = Object.values(counts).reduce((acc, val) => acc + val, 0);
|
const total = Object.values(counts).reduce((acc, val) => acc + val, 0);
|
||||||
|
console.log(`oai start tokens: ${Object.entries(counts)[0][1]}`);
|
||||||
|
|
||||||
thisPromptBits.push({
|
thisPromptBits.push({
|
||||||
oaiStartTokens: Object.entries(counts)[0][1],
|
oaiStartTokens: Object.entries(counts)[0][1],
|
||||||
@ -2350,7 +2351,7 @@ async function Generate(type, { automatic_trigger, force_name2, resolve, reject,
|
|||||||
|
|
||||||
thisPromptBits = additionalPromptStuff;
|
thisPromptBits = additionalPromptStuff;
|
||||||
|
|
||||||
//console.log(thisPromptBits);
|
console.log(thisPromptBits);
|
||||||
|
|
||||||
itemizedPrompts.push(thisPromptBits);
|
itemizedPrompts.push(thisPromptBits);
|
||||||
//console.log(`pushed prompt bits to itemizedPrompts array. Length is now: ${itemizedPrompts.length}`);
|
//console.log(`pushed prompt bits to itemizedPrompts array. Length is now: ${itemizedPrompts.length}`);
|
||||||
@ -2361,6 +2362,23 @@ async function Generate(type, { automatic_trigger, force_name2, resolve, reject,
|
|||||||
hideSwipeButtons();
|
hideSwipeButtons();
|
||||||
let getMessage = await streamingProcessor.generate();
|
let getMessage = await streamingProcessor.generate();
|
||||||
|
|
||||||
|
// Cohee: Basically a dead-end code... (disabled by isStreamingEnabled)
|
||||||
|
// I wasn't able to get multigen working with real streaming
|
||||||
|
// consistently without screwing the interim prompting
|
||||||
|
if (isMultigenEnabled()) {
|
||||||
|
tokens_already_generated += this_amount_gen; // add new gen amt to any prev gen counter..
|
||||||
|
message_already_generated += getMessage;
|
||||||
|
promptBias = '';
|
||||||
|
if (!streamingProcessor.isStopped && shouldContinueMultigen(getMessage, isImpersonate)) {
|
||||||
|
streamingProcessor.isFinished = false;
|
||||||
|
runGenerate(getMessage);
|
||||||
|
console.log('returning to make generate again');
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
getMessage = message_already_generated;
|
||||||
|
}
|
||||||
|
|
||||||
if (streamingProcessor && !streamingProcessor.isStopped && streamingProcessor.isFinished) {
|
if (streamingProcessor && !streamingProcessor.isStopped && streamingProcessor.isFinished) {
|
||||||
streamingProcessor.onFinishStreaming(streamingProcessor.messageId, getMessage);
|
streamingProcessor.onFinishStreaming(streamingProcessor.messageId, getMessage);
|
||||||
streamingProcessor = null;
|
streamingProcessor = null;
|
||||||
@ -2524,7 +2542,6 @@ function promptItemize(itemizedPrompts, requestedMesId) {
|
|||||||
var worldInfoStringTokens = getTokenCount(itemizedPrompts[thisPromptSet].worldInfoString);
|
var worldInfoStringTokens = getTokenCount(itemizedPrompts[thisPromptSet].worldInfoString);
|
||||||
var thisPrompt_max_context = itemizedPrompts[thisPromptSet].this_max_context;
|
var thisPrompt_max_context = itemizedPrompts[thisPromptSet].this_max_context;
|
||||||
var thisPrompt_padding = itemizedPrompts[thisPromptSet].padding;
|
var thisPrompt_padding = itemizedPrompts[thisPromptSet].padding;
|
||||||
console.log(`"${itemizedPrompts[thisPromptSet].promptBias}"`);
|
|
||||||
var promptBiasTokens = getTokenCount(itemizedPrompts[thisPromptSet].promptBias);
|
var promptBiasTokens = getTokenCount(itemizedPrompts[thisPromptSet].promptBias);
|
||||||
var this_main_api = itemizedPrompts[thisPromptSet].main_api;
|
var this_main_api = itemizedPrompts[thisPromptSet].main_api;
|
||||||
|
|
||||||
@ -2533,12 +2550,12 @@ function promptItemize(itemizedPrompts, requestedMesId) {
|
|||||||
//console.log('-- Counting OAI Tokens');
|
//console.log('-- Counting OAI Tokens');
|
||||||
var finalPromptTokens = itemizedPrompts[thisPromptSet].oaiTotalTokens;
|
var finalPromptTokens = itemizedPrompts[thisPromptSet].oaiTotalTokens;
|
||||||
var oaiStartTokens = itemizedPrompts[thisPromptSet].oaiStartTokens;
|
var oaiStartTokens = itemizedPrompts[thisPromptSet].oaiStartTokens;
|
||||||
|
console.log(oaiStartTokens);
|
||||||
var oaiPromptTokens = itemizedPrompts[thisPromptSet].oaiPromptTokens;
|
var oaiPromptTokens = itemizedPrompts[thisPromptSet].oaiPromptTokens;
|
||||||
var ActualChatHistoryTokens = itemizedPrompts[thisPromptSet].oaiConversationTokens;
|
var ActualChatHistoryTokens = itemizedPrompts[thisPromptSet].oaiConversationTokens;
|
||||||
var examplesStringTokens = itemizedPrompts[thisPromptSet].oaiExamplesTokens;
|
var examplesStringTokens = itemizedPrompts[thisPromptSet].oaiExamplesTokens;
|
||||||
var oaiBiasTokens = itemizedPrompts[thisPromptSet].oaiBiasTokens;
|
var oaiBiasTokens = itemizedPrompts[thisPromptSet].oaiBiasTokens;
|
||||||
var oaiJailbreakTokens = itemizedPrompts[thisPromptSet].oaiJailbreakTokens;
|
var oaiJailbreakTokens = itemizedPrompts[thisPromptSet].oaiJailbreakTokens;
|
||||||
var oaiStartTokens = itemizedPrompts[thisPromptSet].oaiStartTokens;
|
|
||||||
var oaiNudgeTokens = itemizedPrompts[thisPromptSet].oaiNudgeTokens;
|
var oaiNudgeTokens = itemizedPrompts[thisPromptSet].oaiNudgeTokens;
|
||||||
var oaiImpersonateTokens = itemizedPrompts[thisPromptSet].oaiImpersonateTokens;
|
var oaiImpersonateTokens = itemizedPrompts[thisPromptSet].oaiImpersonateTokens;
|
||||||
|
|
||||||
@ -2566,6 +2583,7 @@ function promptItemize(itemizedPrompts, requestedMesId) {
|
|||||||
if (this_main_api == 'openai') {
|
if (this_main_api == 'openai') {
|
||||||
//console.log('-- applying % on OAI tokens');
|
//console.log('-- applying % on OAI tokens');
|
||||||
var oaiStartTokensPercentage = ((oaiStartTokens / (finalPromptTokens)) * 100).toFixed(2);
|
var oaiStartTokensPercentage = ((oaiStartTokens / (finalPromptTokens)) * 100).toFixed(2);
|
||||||
|
console.log(oaiStartTokensPercentage);
|
||||||
var storyStringTokensPercentage = ((oaiPromptTokens / (finalPromptTokens)) * 100).toFixed(2);
|
var storyStringTokensPercentage = ((oaiPromptTokens / (finalPromptTokens)) * 100).toFixed(2);
|
||||||
var ActualChatHistoryTokensPercentage = ((ActualChatHistoryTokens / (finalPromptTokens)) * 100).toFixed(2);
|
var ActualChatHistoryTokensPercentage = ((ActualChatHistoryTokens / (finalPromptTokens)) * 100).toFixed(2);
|
||||||
var promptBiasTokensPercentage = ((oaiBiasTokens / (finalPromptTokens)) * 100).toFixed(2);
|
var promptBiasTokensPercentage = ((oaiBiasTokens / (finalPromptTokens)) * 100).toFixed(2);
|
||||||
@ -3504,10 +3522,10 @@ function changeMainAPI() {
|
|||||||
// Hide common settings for OpenAI
|
// Hide common settings for OpenAI
|
||||||
if (selectedVal == "openai") {
|
if (selectedVal == "openai") {
|
||||||
$("#common-gen-settings-block").css("display", "none");
|
$("#common-gen-settings-block").css("display", "none");
|
||||||
$("#token_breakdown").css("display", "flex");
|
//$("#token_breakdown").css("display", "flex");
|
||||||
} else {
|
} else {
|
||||||
$("#common-gen-settings-block").css("display", "block");
|
$("#common-gen-settings-block").css("display", "block");
|
||||||
$("#token_breakdown").css("display", "none");
|
//$("#token_breakdown").css("display", "none");
|
||||||
}
|
}
|
||||||
// Hide amount gen for poe
|
// Hide amount gen for poe
|
||||||
if (selectedVal == "poe") {
|
if (selectedVal == "poe") {
|
||||||
|
@ -648,7 +648,7 @@ class TokenHandler {
|
|||||||
}
|
}
|
||||||
|
|
||||||
count(messages, full, type) {
|
count(messages, full, type) {
|
||||||
console.log(messages);
|
//console.log(messages);
|
||||||
const token_count = this.countTokenFn(messages, full);
|
const token_count = this.countTokenFn(messages, full);
|
||||||
this.counts[type] += token_count;
|
this.counts[type] += token_count;
|
||||||
|
|
||||||
|
@ -385,6 +385,18 @@ code {
|
|||||||
justify-content: center;
|
justify-content: center;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#token_breakdown div {
|
||||||
|
display: flex;
|
||||||
|
width: 100%;
|
||||||
|
justify-content: center;
|
||||||
|
}
|
||||||
|
|
||||||
|
.token_breakdown_segment {
|
||||||
|
min-width: 40px !important;
|
||||||
|
border: solid 2px;
|
||||||
|
border-radius: 5px;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
#loading_mes {
|
#loading_mes {
|
||||||
display: none;
|
display: none;
|
||||||
|
Reference in New Issue
Block a user