Add {{maxPrompt}} macro

This commit is contained in:
Cohee
2023-12-02 22:47:43 +02:00
parent 6e09e45651
commit ff46a249d8
2 changed files with 17 additions and 15 deletions

View File

@@ -2023,6 +2023,7 @@ function substituteParams(content, _name1, _name2, _original, _group, _replaceCh
content = content.replace(/{{mesExamples}}/gi, fields.mesExamples || ''); content = content.replace(/{{mesExamples}}/gi, fields.mesExamples || '');
} }
content = content.replace(/{{maxPrompt}}/gi, () => String(getMaxContextSize()));
content = content.replace(/{{user}}/gi, _name1); content = content.replace(/{{user}}/gi, _name1);
content = content.replace(/{{char}}/gi, _name2); content = content.replace(/{{char}}/gi, _name2);
content = content.replace(/{{charIfNotGroup}}/gi, _group); content = content.replace(/{{charIfNotGroup}}/gi, _group);
@@ -4056,7 +4057,7 @@ function getMaxContextSize() {
this_max_context = this_max_context - amount_gen; this_max_context = this_max_context - amount_gen;
} }
if (main_api == 'openai') { if (main_api == 'openai') {
this_max_context = oai_settings.openai_max_context; this_max_context = oai_settings.openai_max_context - oai_settings.openai_max_tokens;
} }
return this_max_context; return this_max_context;
} }
@@ -4129,7 +4130,7 @@ async function DupeChar() {
const confirm = await callPopup(` const confirm = await callPopup(`
<h3>Are you sure you want to duplicate this character?</h3> <h3>Are you sure you want to duplicate this character?</h3>
<span>If you just want to start a new chat with the same character, use "Start new chat" option in the bottom-left options menu.</span><br><br>`, <span>If you just want to start a new chat with the same character, use "Start new chat" option in the bottom-left options menu.</span><br><br>`,
'confirm', 'confirm',
); );
if (!confirm) { if (!confirm) {
@@ -7553,22 +7554,22 @@ function addDebugFunctions() {
`Recalculates token counts of all messages in the current chat to refresh the counters. `Recalculates token counts of all messages in the current chat to refresh the counters.
Useful when you switch between models that have different tokenizers. Useful when you switch between models that have different tokenizers.
This is a visual change only. Your chat will be reloaded.`, async () => { This is a visual change only. Your chat will be reloaded.`, async () => {
for (const message of chat) { for (const message of chat) {
// System messages are not counted // System messages are not counted
if (message.is_system) { if (message.is_system) {
continue; continue;
}
if (!message.extra) {
message.extra = {};
}
message.extra.token_count = getTokenCount(message.mes, 0);
} }
await saveChatConditional(); if (!message.extra) {
await reloadCurrentChat(); message.extra = {};
}); }
message.extra.token_count = getTokenCount(message.mes, 0);
}
await saveChatConditional();
await reloadCurrentChat();
});
registerDebugFunction('generationTest', 'Send a generation request', 'Generates text using the currently selected API.', async () => { registerDebugFunction('generationTest', 'Send a generation request', 'Generates text using the currently selected API.', async () => {
const text = prompt('Input text:', 'Hello'); const text = prompt('Input text:', 'Hello');

View File

@@ -39,6 +39,7 @@
<small>(enabled in the Advanced Formatting settings)</small> <small>(enabled in the Advanced Formatting settings)</small>
</div> </div>
<ul> <ul>
<li><tt>&lcub;&lcub;maxPrompt&rcub;&rcub;</tt> max allowed prompt length in tokens = (context size - response length)</li>
<li><tt>&lcub;&lcub;exampleSeparator&rcub;&rcub;</tt> context template example dialogues separator</li> <li><tt>&lcub;&lcub;exampleSeparator&rcub;&rcub;</tt> context template example dialogues separator</li>
<li><tt>&lcub;&lcub;chatStart&rcub;&rcub;</tt> context template chat start line</li> <li><tt>&lcub;&lcub;chatStart&rcub;&rcub;</tt> context template chat start line</li>
<li><tt>&lcub;&lcub;instructSystem&rcub;&rcub;</tt> instruct system prompt</li> <li><tt>&lcub;&lcub;instructSystem&rcub;&rcub;</tt> instruct system prompt</li>