Let prompt manager handle character and scenario prompts

This commit is contained in:
maver
2023-06-10 20:09:48 +02:00
parent 29bc3c854f
commit eb315993b4
3 changed files with 50 additions and 18 deletions

View File

@ -366,13 +366,22 @@ function formatWorldInfo(value) {
return stringFormat(oai_settings.wi_format, value);
}
async function prepareOpenAIMessages({ systemPrompt, name2, storyString, worldInfoBefore, worldInfoAfter, extensionPrompt, bias, type, quietPrompt, jailbreakPrompt, cyclePrompt } = {}) {
async function prepareOpenAIMessages({ systemPrompt, name2, charDescription, charPersonality, Scenario, worldInfoBefore, worldInfoAfter, extensionPrompt, bias, type, quietPrompt, jailbreakPrompt, cyclePrompt } = {}) {
const chatCompletion = promptManager.getChatCompletion();
// Prepare messages
const worldInfoBeforeMessage = chatCompletion.makeSystemMessage(formatWorldInfo(worldInfoBefore));
const worldInfoAfterMessage = chatCompletion.makeSystemMessage(formatWorldInfo(worldInfoAfter));
const characterInfoMessages = chatCompletion.makeSystemMessage(substituteParams(storyString));
const charDescriptionMessage = chatCompletion.makeSystemMessage(substituteParams(charDescription));
const charPersonalityMessage = chatCompletion.makeSystemMessage(
name2 + 's personality: ' + substituteParams(charPersonality)
);
const scenarioMessage = chatCompletion.makeSystemMessage(
'Circumstances and context of the dialogue: ' + substituteParams(Scenario)
);
const newChatMessage = chatCompletion.makeSystemMessage('[Start new chat]');
const chatMessages = openai_msgs;
const biasMessage = chatCompletion.makeSystemMessage(bias.trim());
@ -381,7 +390,9 @@ async function prepareOpenAIMessages({ systemPrompt, name2, storyString, worldIn
chatCompletion
.replace('worldInfoBefore', worldInfoBeforeMessage)
.replace('worldInfoAfter', worldInfoAfterMessage)
.replace('characterInfo', characterInfoMessages)
.replace('charDescription', charDescriptionMessage)
.replace('charPersonality', charPersonalityMessage)
.replace('scenario', scenarioMessage)
.replace('newMainChat', newChatMessage)
.replace('chatHistory', chatMessages)
@ -428,13 +439,10 @@ async function prepareOpenAIMessages({ systemPrompt, name2, storyString, worldIn
{...tokenHandler.getCounts(), ...chatCompletion.getTokenCounts()}
);
// Save settings with updated token calculation and return context
return promptManager.saveServiceSettings().then(() => {
const openai_msgs_tosend = chatCompletion.getChat();
openai_messages_count = openai_msgs_tosend.filter(x => x.role === "user" || x.role === "assistant").length;
const openai_msgs_tosend = chatCompletion.getChat();
openai_messages_count = openai_msgs_tosend.filter(x => x.role === "user" || x.role === "assistant").length;
return [openai_msgs_tosend, false];
});
return [openai_msgs_tosend, false];
}
function getGroupMembers(activeGroup) {