Simplify impersonate prompt handling
This commit is contained in:
parent
42544e9d26
commit
0513d9c8c0
|
@ -399,6 +399,9 @@ async function prepareOpenAIMessages({ systemPrompt, name2, storyString, worldIn
|
|||
// Handle bias settings
|
||||
if (bias && bias.trim().length) chatCompletion.add(biasMessage);
|
||||
|
||||
// Handle impersonation
|
||||
if (type === "impersonate") chatCompletion.replace('main', substituteParams(oai_settings.impersonation_prompt));
|
||||
|
||||
// Handle chat examples
|
||||
const exampleMessages = prepareExampleMessages(openai_msgs ,openai_msgs_example, power_user.pin_examples);
|
||||
if (exampleMessages.length) chatCompletion.replace('dialogueExamples', exampleMessages);
|
||||
|
@ -409,12 +412,6 @@ async function prepareOpenAIMessages({ systemPrompt, name2, storyString, worldIn
|
|||
chatCompletion.insertAfter('main', quietPromptMessage)
|
||||
}
|
||||
|
||||
// Handle impersonation
|
||||
if (type === "impersonate") {
|
||||
chatCompletion.insertBefore('chatHistory', 'impersonate', substituteParams(oai_settings.impersonation_prompt));
|
||||
chatCompletion.remove('main');
|
||||
}
|
||||
|
||||
promptManager.updatePrompts(chatCompletion.getPromptsWithTokenCount());
|
||||
|
||||
// Save settings with updated token calculation and return context
|
||||
|
|
Loading…
Reference in New Issue