diff --git a/public/scripts/openai.js b/public/scripts/openai.js index 1c13b4a6f..a08794ca4 100644 --- a/public/scripts/openai.js +++ b/public/scripts/openai.js @@ -469,7 +469,7 @@ function populateDialogueExamples(prompts, chatCompletion) { * @param {string} options.quietPrompt - A quiet prompt to be used in the conversation. * @param {string} options.type - The type of the chat, can be 'impersonate'. */ -function populateChatCompletion (prompts, chatCompletion, {bias, quietPrompt, type} = {}) { +function populateChatCompletion (prompts, chatCompletion, {bias, quietPrompt, type, cyclePrompt} = {}) { //Helper function for the recurring task of preparing a prompt for the chat completion const addToChatCompletion = (source, target = null) => { if (false === prompts.has(source)) return; @@ -557,6 +557,18 @@ function populateChatCompletion (prompts, chatCompletion, {bias, quietPrompt, ty } } + if (type === 'continue') { + const continuePrompt = new Prompt({ + identifier: 'continueNudge', + role: 'system,', + content: '[Continue the following message. Do not include ANY parts of the original message. Use capitalization and punctuation as if your reply is a part of the original message:\n\n + ' + cyclePrompt + ']', + system_prompt: true + }); + const preparedPrompt = promptManager.preparePrompt(continuePrompt); + const continueMessage = Message.fromPrompt(preparedPrompt); + chatCompletion.insertAtEnd(continueMessage, 'chatHistory') + } + // Decide whether dialogue examples should always be added if (power_user.pin_examples) { populateDialogueExamples(prompts, chatCompletion); @@ -664,7 +676,7 @@ function prepareOpenAIMessages({ eventSource.emit(event_types.OAI_BEFORE_CHATCOMPLETION, prompts); try { - populateChatCompletion(prompts, chatCompletion, {bias, quietPrompt, type}); + populateChatCompletion(prompts, chatCompletion, {bias, quietPrompt, type, cyclePrompt}); } catch (error) { if (error instanceof TokenBudgetExceededError) { toastr.error('An error occurred while counting tokens: Token budget exceeded.')