Allow auto-continue in group chats

This commit is contained in:
Cohee 2024-04-03 00:27:11 +03:00
parent 9221ddde57
commit 8a0997c47b
2 changed files with 94 additions and 49 deletions

View File

@ -4059,6 +4059,10 @@ async function Generate(type, { automatic_trigger, force_name2, quiet_prompt, qu
await streamingProcessor.onFinishStreaming(streamingProcessor.messageId, getMessage);
streamingProcessor = null;
triggerAutoContinue(messageChunk, isImpersonate);
return Object.defineProperties(new String(getMessage), {
'messageChunk': { value: messageChunk },
'fromStream': { value: true },
});
}
} else {
return await sendGenerationRequest(type, generate_data);
@ -4069,6 +4073,11 @@ async function Generate(type, { automatic_trigger, force_name2, quiet_prompt, qu
async function onSuccess(data) {
if (!data) return;
if (data?.fromStream) {
return data;
}
let messageChunk = '';
if (data.error) {
@ -4178,6 +4187,9 @@ async function Generate(type, { automatic_trigger, force_name2, quiet_prompt, qu
if (type !== 'quiet') {
triggerAutoContinue(messageChunk, isImpersonate);
}
// Don't break the API chain that expects a single string in return
return Object.defineProperty(new String(getMessage), 'messageChunk', { value: messageChunk });
}
function onError(exception) {
@ -4272,57 +4284,81 @@ export function getNextMessageId(type) {
}
/**
*
* @param {string} messageChunk
* @param {boolean} isImpersonate
* @returns {void}
* Determines if the message should be auto-continued.
* @param {string} messageChunk Current message chunk
* @param {boolean} isImpersonate Is the user impersonation
* @returns {boolean} Whether the message should be auto-continued
*/
export function shouldAutoContinue(messageChunk, isImpersonate) {
if (!power_user.auto_continue.enabled) {
console.debug('Auto-continue is disabled by user.');
return false;
}
if (typeof messageChunk !== 'string') {
console.debug('Not triggering auto-continue because message chunk is not a string');
return false;
}
if (isImpersonate) {
console.log('Continue for impersonation is not implemented yet');
return false;
}
if (is_send_press) {
console.debug('Auto-continue is disabled because a message is currently being sent.');
return false;
}
if (power_user.auto_continue.target_length <= 0) {
console.log('Auto-continue target length is 0, not triggering auto-continue');
return false;
}
if (main_api === 'openai' && !power_user.auto_continue.allow_chat_completions) {
console.log('Auto-continue for OpenAI is disabled by user.');
return false;
}
const textareaText = String($('#send_textarea').val());
const USABLE_LENGTH = 5;
if (textareaText.length > 0) {
console.log('Not triggering auto-continue because user input is not empty');
return false;
}
if (messageChunk.trim().length > USABLE_LENGTH && chat.length) {
const lastMessage = chat[chat.length - 1];
const messageLength = getTokenCount(lastMessage.mes);
const shouldAutoContinue = messageLength < power_user.auto_continue.target_length;
if (shouldAutoContinue) {
console.log(`Triggering auto-continue. Message tokens: ${messageLength}. Target tokens: ${power_user.auto_continue.target_length}. Message chunk: ${messageChunk}`);
return true;
} else {
console.log(`Not triggering auto-continue. Message tokens: ${messageLength}. Target tokens: ${power_user.auto_continue.target_length}`);
return false;
}
} else {
console.log('Last generated chunk was empty, not triggering auto-continue');
return false;
}
}
/**
* Triggers auto-continue if the message meets the criteria.
* @param {string} messageChunk Current message chunk
* @param {boolean} isImpersonate Is the user impersonation
*/
export function triggerAutoContinue(messageChunk, isImpersonate) {
if (selected_group) {
console.log('Auto-continue is disabled for group chat');
console.debug('Auto-continue is disabled for group chat');
return;
}
if (power_user.auto_continue.enabled && !is_send_press) {
if (power_user.auto_continue.target_length <= 0) {
console.log('Auto-continue target length is 0, not triggering auto-continue');
return;
}
if (main_api === 'openai' && !power_user.auto_continue.allow_chat_completions) {
console.log('Auto-continue for OpenAI is disabled by user.');
return;
}
if (isImpersonate) {
console.log('Continue for impersonation is not implemented yet');
return;
}
const textareaText = String($('#send_textarea').val());
const USABLE_LENGTH = 5;
if (textareaText.length > 0) {
console.log('Not triggering auto-continue because user input is not empty');
return;
}
if (messageChunk.trim().length > USABLE_LENGTH && chat.length) {
const lastMessage = chat[chat.length - 1];
const messageLength = getTokenCount(lastMessage.mes);
const shouldAutoContinue = messageLength < power_user.auto_continue.target_length;
if (shouldAutoContinue) {
console.log(`Triggering auto-continue. Message tokens: ${messageLength}. Target tokens: ${power_user.auto_continue.target_length}. Message chunk: ${messageChunk}`);
$('#option_continue').trigger('click');
} else {
console.log(`Not triggering auto-continue. Message tokens: ${messageLength}. Target tokens: ${power_user.auto_continue.target_length}`);
return;
}
} else {
console.log('Last generated chunk was empty, not triggering auto-continue');
return;
}
if (shouldAutoContinue(messageChunk, isImpersonate)) {
$('#option_continue').trigger('click');
}
}

View File

@ -69,6 +69,7 @@ import {
loadItemizedPrompts,
animation_duration,
depth_prompt_role_default,
shouldAutoContinue,
} from '../script.js';
import { printTagList, createTagMapFromList, applyTagsOnCharacterSelect, tag_map } from './tags.js';
import { FILTER_TYPES, FilterHelper } from './filters.js';
@ -678,9 +679,10 @@ async function generateGroupWrapper(by_auto_mode, type = null, params = {}) {
await delay(1);
}
const group = groups.find((x) => x.id === selected_group);
let typingIndicator = $('#chat .typing_indicator');
/** @type {any} Caution: JS war crimes ahead */
let textResult = '';
let typingIndicator = $('#chat .typing_indicator');
const group = groups.find((x) => x.id === selected_group);
if (!group || !Array.isArray(group.members) || !group.members.length) {
sendSystemMessage(system_message_types.EMPTY, '', { isSmallSys: true });
@ -778,8 +780,15 @@ async function generateGroupWrapper(by_auto_mode, type = null, params = {}) {
}
// Wait for generation to finish
const generateFinished = await Generate(generateType, { automatic_trigger: by_auto_mode, ...(params || {}) });
textResult = await generateFinished;
textResult = await Generate(generateType, { automatic_trigger: by_auto_mode, ...(params || {}) });
let messageChunk = textResult?.messageChunk;
if (messageChunk) {
while (shouldAutoContinue(messageChunk, type === 'impersonate')) {
textResult = await Generate('continue', { automatic_trigger: by_auto_mode, ...(params || {}) });
messageChunk = textResult?.messageChunk;
}
}
}
} finally {
typingIndicator.hide();