Add system message collapse for chat comps

This commit is contained in:
Cohee 2023-10-14 22:05:09 +03:00
parent 658701a943
commit d0637750e7
2 changed files with 82 additions and 5 deletions

View File

@ -1693,6 +1693,20 @@
<span data-i18n="Send names in the ChatML objects.">Send names in the ChatML objects. Helps the model to associate messages with characters.</span>
</div>
</div>
<div class="range-block">
<label for="squash_system_messages" title="Squash system messages" class="checkbox_label widthFreeExpand">
<input id="squash_system_messages" type="checkbox" />
<span data-i18n="Squash system messages">
Squash system messages
</span>
</label>
<div class="toggle-description justifyLeft">
<span data-i18n="Combines consecutive system messages into one.">
Combines consecutive system messages into one (excluding example dialogues).
May improve coherence for some models.
</span>
</div>
</div>
<div class="range-block" data-source="ai21">
<label for="use_ai21_tokenizer" title="Use AI21 Tokenizer" class="checkbox_label widthFreeExpand">
<input id="use_ai21_tokenizer" type="checkbox" /><span data-i18n="Use AI21 Tokenizer">Use AI21 Tokenizer</span>
@ -4838,4 +4852,4 @@
</script>
</body>
</html>
</html>

View File

@ -217,6 +217,7 @@ const default_settings = {
use_ai21_tokenizer: false,
exclude_assistant: false,
use_alt_scale: false,
squash_system_messages: false,
};
const oai_settings = {
@ -261,6 +262,7 @@ const oai_settings = {
use_ai21_tokenizer: false,
exclude_assistant: false,
use_alt_scale: false,
squash_system_messages: false,
};
let openai_setting_names;
@ -937,6 +939,10 @@ function prepareOpenAIMessages({
// Pass chat completion to prompt manager for inspection
promptManager.setChatCompletion(chatCompletion);
if (oai_settings.squash_system_messages) {
chatCompletion.squashSystemMessages();
}
// All information is up-to-date, render.
if (false === dryRun) promptManager.render(false);
}
@ -1646,6 +1652,21 @@ class MessageCollection {
getTokens() {
return this.collection.reduce((tokens, message) => tokens + message.getTokens(), 0);
}
/**
* Combines message collections into a single collection.
* @returns {Message[]} The collection of messages flattened into a single array.
*/
flatten() {
return this.collection.reduce((acc, message) => {
if (message instanceof MessageCollection) {
acc.push(...message.flatten());
} else {
acc.push(message);
}
return acc;
}, []);
}
}
/**
@ -1660,6 +1681,36 @@ class MessageCollection {
*/
class ChatCompletion {
/**
* Combines consecutive system messages into one if they have no name attached.
*/
squashSystemMessages() {
const excludeList = ['newMainChat', 'newChat', 'groupNudge'];
this.messages.collection = this.messages.flatten();
let lastMessage = null;
let squashedMessages = [];
for (let message of this.messages.collection) {
if (!excludeList.includes(message.identifier) && message.role === 'system' && !message.name) {
if (lastMessage && lastMessage.role === 'system') {
lastMessage.content += '\n' + message.content;
lastMessage.tokens = tokenHandler.count({ role: lastMessage.role, content: lastMessage.content });
}
else {
squashedMessages.push(message);
lastMessage = message;
}
}
else {
squashedMessages.push(message);
lastMessage = message;
}
}
this.messages.collection = squashedMessages;
}
/**
* Initializes a new instance of ChatCompletion.
* Sets up the initial token budget and a new message collection.
@ -1819,7 +1870,11 @@ class ChatCompletion {
for (let item of this.messages.collection) {
if (item instanceof MessageCollection) {
chat.push(...item.getChat());
} else if (item instanceof Message && item.content) {
const message = { role: item.role, content: item.content, ...(item.name ? { name: item.name } : {}) };
chat.push(message);
} else {
this.log(`Item ${item} has an unknown type. Adding as-is`);
chat.push(item);
}
}
@ -1993,6 +2048,7 @@ function loadOpenAISettings(data, settings) {
oai_settings.new_group_chat_prompt = settings.new_group_chat_prompt ?? default_settings.new_group_chat_prompt;
oai_settings.new_example_chat_prompt = settings.new_example_chat_prompt ?? default_settings.new_example_chat_prompt;
oai_settings.continue_nudge_prompt = settings.continue_nudge_prompt ?? default_settings.continue_nudge_prompt;
oai_settings.squash_system_messages = settings.squash_system_messages ?? default_settings.squash_system_messages;
if (settings.wrap_in_quotes !== undefined) oai_settings.wrap_in_quotes = !!settings.wrap_in_quotes;
if (settings.names_in_completion !== undefined) oai_settings.names_in_completion = !!settings.names_in_completion;
@ -2029,6 +2085,7 @@ function loadOpenAISettings(data, settings) {
$('#exclude_assistant').prop('checked', oai_settings.exclude_assistant);
$('#scale-alt').prop('checked', oai_settings.use_alt_scale);
$('#openrouter_use_fallback').prop('checked', oai_settings.openrouter_use_fallback);
$('#squash_system_messages').prop('checked', oai_settings.squash_system_messages);
if (settings.impersonation_prompt !== undefined) oai_settings.impersonation_prompt = settings.impersonation_prompt;
$('#impersonation_prompt_textarea').val(oai_settings.impersonation_prompt);
@ -2228,6 +2285,7 @@ async function saveOpenAIPreset(name, settings, triggerUi = true) {
use_ai21_tokenizer: settings.use_ai21_tokenizer,
exclude_assistant: settings.exclude_assistant,
use_alt_scale: settings.use_alt_scale,
squash_system_messages: settings.squash_system_messages,
};
const savePresetSettings = await fetch(`/api/presets/save-openai?name=${name}`, {
@ -2572,14 +2630,14 @@ function onSettingsPresetChange() {
stream_openai: ['#stream_toggle', 'stream_openai', true],
prompts: ['', 'prompts', false],
prompt_order: ['', 'prompt_order', false],
use_openrouter: ['#use_openrouter', 'use_openrouter', true],
api_url_scale: ['#api_url_scale', 'api_url_scale', false],
show_external_models: ['#openai_show_external_models', 'show_external_models', true],
proxy_password: ['#openai_proxy_password', 'proxy_password', false],
assistant_prefill: ['#claude_assistant_prefill', 'assistant_prefill', false],
use_ai21_tokenizer: ['#use_ai21_tokenizer', 'use_ai21_tokenizer', false],
exclude_assistant: ['#exclude_assistant', 'exclude_assistant', false],
use_alt_scale: ['#use_alt_scale', 'use_alt_scale', false],
use_ai21_tokenizer: ['#use_ai21_tokenizer', 'use_ai21_tokenizer', true],
exclude_assistant: ['#exclude_assistant', 'exclude_assistant', true],
use_alt_scale: ['#use_alt_scale', 'use_alt_scale', true],
squash_system_messages: ['#squash_system_messages', 'squash_system_messages', true],
};
const presetName = $('#settings_perset_openai').find(":selected").text();
@ -3286,6 +3344,11 @@ $(document).ready(async function () {
saveSettingsDebounced();
});
$('#squash_system_messages').on('input', function () {
oai_settings.squash_system_messages = !!$(this).prop('checked');
saveSettingsDebounced();
});
$(document).on('input', '#openai_settings .autoSetHeight', function () {
resetScrollHeight($(this));
});