Don't stack suggested replies when using impersonate on Poe

This commit is contained in:
Cohee
2023-06-15 20:05:53 +03:00
parent e420c96e77
commit 62d5f20590

View File

@@ -263,24 +263,25 @@ async function generatePoe(type, finalPrompt, signal) {
} }
const isQuiet = type === 'quiet'; const isQuiet = type === 'quiet';
const isImpersonate = type === 'impersonate';
let reply = ''; let reply = '';
if (max_context > POE_TOKEN_LENGTH && poe_settings.bot !== 'a2_100k') { if (max_context > POE_TOKEN_LENGTH && poe_settings.bot !== 'a2_100k') {
console.debug('Prompt is too long, sending in chunks'); console.debug('Prompt is too long, sending in chunks');
const result = await sendChunkedMessage(finalPrompt, !isQuiet, signal) const result = await sendChunkedMessage(finalPrompt, !isQuiet, !isQuiet && !isImpersonate, signal)
reply = result.reply; reply = result.reply;
messages_to_purge = result.chunks + 1; // +1 for the reply messages_to_purge = result.chunks + 1; // +1 for the reply
} }
else { else {
console.debug('Sending prompt in one message'); console.debug('Sending prompt in one message');
reply = await sendMessage(finalPrompt, !isQuiet, !isQuiet, signal); reply = await sendMessage(finalPrompt, !isQuiet, !isQuiet && !isImpersonate, signal);
messages_to_purge = 2; // prompt and the reply messages_to_purge = 2; // prompt and the reply
} }
return reply; return reply;
} }
async function sendChunkedMessage(finalPrompt, withStreaming, signal) { async function sendChunkedMessage(finalPrompt, withStreaming, withSuggestions, signal) {
const fastReplyPrompt = '\n[Reply to this message with a full stop only]'; const fastReplyPrompt = '\n[Reply to this message with a full stop only]';
const promptChunks = splitRecursive(finalPrompt, CHUNKED_PROMPT_LENGTH - fastReplyPrompt.length); const promptChunks = splitRecursive(finalPrompt, CHUNKED_PROMPT_LENGTH - fastReplyPrompt.length);
console.debug(`Splitting prompt into ${promptChunks.length} chunks`, promptChunks); console.debug(`Splitting prompt into ${promptChunks.length} chunks`, promptChunks);
@@ -291,7 +292,7 @@ async function sendChunkedMessage(finalPrompt, withStreaming, signal) {
console.debug(`Sending chunk ${i + 1}/${promptChunks.length}: ${promptChunk}`); console.debug(`Sending chunk ${i + 1}/${promptChunks.length}: ${promptChunk}`);
if (i == promptChunks.length - 1) { if (i == promptChunks.length - 1) {
// Extract reply of the last chunk // Extract reply of the last chunk
reply = await sendMessage(promptChunk, withStreaming, true, signal); reply = await sendMessage(promptChunk, withStreaming, withSuggestions, signal);
} else { } else {
// Add fast reply prompt to the chunk // Add fast reply prompt to the chunk
promptChunk += fastReplyPrompt; promptChunk += fastReplyPrompt;