Improve strength of main and impersonation prompt
This commit is contained in:
parent
ff07572fd4
commit
989f704605
|
@ -778,7 +778,7 @@ const openAiDefaultPrompts = {
|
|||
},
|
||||
{
|
||||
"identifier": "characterInfo",
|
||||
"name": "Character",
|
||||
"name": "Character Description",
|
||||
"system_prompt": true,
|
||||
"marker": true,
|
||||
"calculated_tokens": 0,
|
||||
|
@ -859,15 +859,11 @@ const openAiDefaultPromptList = [
|
|||
"enabled": true
|
||||
},
|
||||
{
|
||||
"identifier": "nsfw",
|
||||
"identifier": "enhanceDefinitions",
|
||||
"enabled": false
|
||||
},
|
||||
{
|
||||
"identifier": "main",
|
||||
"enabled": true
|
||||
},
|
||||
{
|
||||
"identifier": "enhanceDefinitions",
|
||||
"identifier": "nsfw",
|
||||
"enabled": false
|
||||
},
|
||||
{
|
||||
|
@ -890,6 +886,10 @@ const openAiDefaultPromptList = [
|
|||
"identifier": "chatHistory",
|
||||
"enabled": true
|
||||
},
|
||||
{
|
||||
"identifier": "main",
|
||||
"enabled": true
|
||||
},
|
||||
{
|
||||
"identifier": "jailbreak",
|
||||
"enabled": false
|
||||
|
|
|
@ -401,9 +401,6 @@ async function prepareOpenAIMessages({ systemPrompt, name2, storyString, worldIn
|
|||
// Handle bias settings
|
||||
if (bias && bias.trim().length) chatCompletion.add(biasMessage);
|
||||
|
||||
// Handle impersonation
|
||||
if (type === "impersonate") chatCompletion.replace('main', substituteParams(oai_settings.impersonation_prompt));
|
||||
|
||||
// Handle chat examples
|
||||
// ToDo: Update dialogueExamples prompt with only the token count that's actually sent.
|
||||
const exampleMessages = prepareExampleMessages(openai_msgs ,openai_msgs_example, power_user.pin_examples);
|
||||
|
@ -415,6 +412,9 @@ async function prepareOpenAIMessages({ systemPrompt, name2, storyString, worldIn
|
|||
chatCompletion.insertAfter('main', quietPromptMessage)
|
||||
}
|
||||
|
||||
// Handle impersonation
|
||||
if (type === "impersonate") chatCompletion.replace('main', chatCompletion.makeSystemMessage(substituteParams(oai_settings.impersonation_prompt)));
|
||||
|
||||
promptManager.updatePrompts(chatCompletion.getPromptsWithTokenCount());
|
||||
|
||||
// Save settings with updated token calculation and return context
|
||||
|
|
Loading…
Reference in New Issue