Allow auto-continue in group chats

This commit is contained in:
Cohee 2024-04-03 00:27:11 +03:00
parent 9221ddde57
commit 8a0997c47b
2 changed files with 94 additions and 49 deletions

View File

@ -4059,6 +4059,10 @@ async function Generate(type, { automatic_trigger, force_name2, quiet_prompt, qu
await streamingProcessor.onFinishStreaming(streamingProcessor.messageId, getMessage); await streamingProcessor.onFinishStreaming(streamingProcessor.messageId, getMessage);
streamingProcessor = null; streamingProcessor = null;
triggerAutoContinue(messageChunk, isImpersonate); triggerAutoContinue(messageChunk, isImpersonate);
return Object.defineProperties(new String(getMessage), {
'messageChunk': { value: messageChunk },
'fromStream': { value: true },
});
} }
} else { } else {
return await sendGenerationRequest(type, generate_data); return await sendGenerationRequest(type, generate_data);
@ -4069,6 +4073,11 @@ async function Generate(type, { automatic_trigger, force_name2, quiet_prompt, qu
async function onSuccess(data) { async function onSuccess(data) {
if (!data) return; if (!data) return;
if (data?.fromStream) {
return data;
}
let messageChunk = ''; let messageChunk = '';
if (data.error) { if (data.error) {
@ -4178,6 +4187,9 @@ async function Generate(type, { automatic_trigger, force_name2, quiet_prompt, qu
if (type !== 'quiet') { if (type !== 'quiet') {
triggerAutoContinue(messageChunk, isImpersonate); triggerAutoContinue(messageChunk, isImpersonate);
} }
// Don't break the API chain that expects a single string in return
return Object.defineProperty(new String(getMessage), 'messageChunk', { value: messageChunk });
} }
function onError(exception) { function onError(exception) {
@ -4272,57 +4284,81 @@ export function getNextMessageId(type) {
} }
/** /**
* * Determines if the message should be auto-continued.
* @param {string} messageChunk * @param {string} messageChunk Current message chunk
* @param {boolean} isImpersonate * @param {boolean} isImpersonate Is the user impersonation
* @returns {void} * @returns {boolean} Whether the message should be auto-continued
*/
export function shouldAutoContinue(messageChunk, isImpersonate) {
if (!power_user.auto_continue.enabled) {
console.debug('Auto-continue is disabled by user.');
return false;
}
if (typeof messageChunk !== 'string') {
console.debug('Not triggering auto-continue because message chunk is not a string');
return false;
}
if (isImpersonate) {
console.log('Continue for impersonation is not implemented yet');
return false;
}
if (is_send_press) {
console.debug('Auto-continue is disabled because a message is currently being sent.');
return false;
}
if (power_user.auto_continue.target_length <= 0) {
console.log('Auto-continue target length is 0, not triggering auto-continue');
return false;
}
if (main_api === 'openai' && !power_user.auto_continue.allow_chat_completions) {
console.log('Auto-continue for OpenAI is disabled by user.');
return false;
}
const textareaText = String($('#send_textarea').val());
const USABLE_LENGTH = 5;
if (textareaText.length > 0) {
console.log('Not triggering auto-continue because user input is not empty');
return false;
}
if (messageChunk.trim().length > USABLE_LENGTH && chat.length) {
const lastMessage = chat[chat.length - 1];
const messageLength = getTokenCount(lastMessage.mes);
const shouldAutoContinue = messageLength < power_user.auto_continue.target_length;
if (shouldAutoContinue) {
console.log(`Triggering auto-continue. Message tokens: ${messageLength}. Target tokens: ${power_user.auto_continue.target_length}. Message chunk: ${messageChunk}`);
return true;
} else {
console.log(`Not triggering auto-continue. Message tokens: ${messageLength}. Target tokens: ${power_user.auto_continue.target_length}`);
return false;
}
} else {
console.log('Last generated chunk was empty, not triggering auto-continue');
return false;
}
}
/**
* Triggers auto-continue if the message meets the criteria.
* @param {string} messageChunk Current message chunk
* @param {boolean} isImpersonate Is the user impersonation
*/ */
export function triggerAutoContinue(messageChunk, isImpersonate) { export function triggerAutoContinue(messageChunk, isImpersonate) {
if (selected_group) { if (selected_group) {
console.log('Auto-continue is disabled for group chat'); console.debug('Auto-continue is disabled for group chat');
return; return;
} }
if (power_user.auto_continue.enabled && !is_send_press) { if (shouldAutoContinue(messageChunk, isImpersonate)) {
if (power_user.auto_continue.target_length <= 0) { $('#option_continue').trigger('click');
console.log('Auto-continue target length is 0, not triggering auto-continue');
return;
}
if (main_api === 'openai' && !power_user.auto_continue.allow_chat_completions) {
console.log('Auto-continue for OpenAI is disabled by user.');
return;
}
if (isImpersonate) {
console.log('Continue for impersonation is not implemented yet');
return;
}
const textareaText = String($('#send_textarea').val());
const USABLE_LENGTH = 5;
if (textareaText.length > 0) {
console.log('Not triggering auto-continue because user input is not empty');
return;
}
if (messageChunk.trim().length > USABLE_LENGTH && chat.length) {
const lastMessage = chat[chat.length - 1];
const messageLength = getTokenCount(lastMessage.mes);
const shouldAutoContinue = messageLength < power_user.auto_continue.target_length;
if (shouldAutoContinue) {
console.log(`Triggering auto-continue. Message tokens: ${messageLength}. Target tokens: ${power_user.auto_continue.target_length}. Message chunk: ${messageChunk}`);
$('#option_continue').trigger('click');
} else {
console.log(`Not triggering auto-continue. Message tokens: ${messageLength}. Target tokens: ${power_user.auto_continue.target_length}`);
return;
}
} else {
console.log('Last generated chunk was empty, not triggering auto-continue');
return;
}
} }
} }

View File

@ -69,6 +69,7 @@ import {
loadItemizedPrompts, loadItemizedPrompts,
animation_duration, animation_duration,
depth_prompt_role_default, depth_prompt_role_default,
shouldAutoContinue,
} from '../script.js'; } from '../script.js';
import { printTagList, createTagMapFromList, applyTagsOnCharacterSelect, tag_map } from './tags.js'; import { printTagList, createTagMapFromList, applyTagsOnCharacterSelect, tag_map } from './tags.js';
import { FILTER_TYPES, FilterHelper } from './filters.js'; import { FILTER_TYPES, FilterHelper } from './filters.js';
@ -678,9 +679,10 @@ async function generateGroupWrapper(by_auto_mode, type = null, params = {}) {
await delay(1); await delay(1);
} }
const group = groups.find((x) => x.id === selected_group); /** @type {any} Caution: JS war crimes ahead */
let typingIndicator = $('#chat .typing_indicator');
let textResult = ''; let textResult = '';
let typingIndicator = $('#chat .typing_indicator');
const group = groups.find((x) => x.id === selected_group);
if (!group || !Array.isArray(group.members) || !group.members.length) { if (!group || !Array.isArray(group.members) || !group.members.length) {
sendSystemMessage(system_message_types.EMPTY, '', { isSmallSys: true }); sendSystemMessage(system_message_types.EMPTY, '', { isSmallSys: true });
@ -778,8 +780,15 @@ async function generateGroupWrapper(by_auto_mode, type = null, params = {}) {
} }
// Wait for generation to finish // Wait for generation to finish
const generateFinished = await Generate(generateType, { automatic_trigger: by_auto_mode, ...(params || {}) }); textResult = await Generate(generateType, { automatic_trigger: by_auto_mode, ...(params || {}) });
textResult = await generateFinished; let messageChunk = textResult?.messageChunk;
if (messageChunk) {
while (shouldAutoContinue(messageChunk, type === 'impersonate')) {
textResult = await Generate('continue', { automatic_trigger: by_auto_mode, ...(params || {}) });
messageChunk = textResult?.messageChunk;
}
}
} }
} finally { } finally {
typingIndicator.hide(); typingIndicator.hide();