Merge pull request #981 from SillyTavern/hotfix-prompt-manager

Hotfix prompt manager
This commit is contained in:
Cohee 2023-08-20 15:38:54 +03:00 committed by GitHub
commit 9d495d6b47
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23

View File

@ -479,9 +479,10 @@ function populateChatHistory(prompts, chatCompletion, type = null, cyclePrompt =
// Chat History // Chat History
chatCompletion.add(new MessageCollection('chatHistory'), prompts.index('chatHistory')); chatCompletion.add(new MessageCollection('chatHistory'), prompts.index('chatHistory'));
let names = (selected_group && groups.find(x => x.id === selected_group)?.members.map(member => characters.find(c => c.avatar === member)?.name).filter(Boolean).join(', ')) || '';
// Reserve budget for new chat message // Reserve budget for new chat message
const newChat = selected_group ? oai_settings.new_group_chat_prompt : oai_settings.new_chat_prompt; const newChat = selected_group ? oai_settings.new_group_chat_prompt : oai_settings.new_chat_prompt;
const newChatMessage = new Message('system', newChat, 'newMainChat'); const newChatMessage = new Message('system', substituteParams(newChat, null, null, null, names), 'newMainChat');
chatCompletion.reserveBudget(newChatMessage); chatCompletion.reserveBudget(newChatMessage);
// Reserve budget for continue nudge // Reserve budget for continue nudge
@ -541,9 +542,8 @@ function populateDialogueExamples(prompts, chatCompletion) {
chatCompletion.add(new MessageCollection('dialogueExamples'), prompts.index('dialogueExamples')); chatCompletion.add(new MessageCollection('dialogueExamples'), prompts.index('dialogueExamples'));
if (openai_msgs_example.length) { if (openai_msgs_example.length) {
const newExampleChat = new Message('system', oai_settings.new_example_chat_prompt, 'newChat'); const newExampleChat = new Message('system', oai_settings.new_example_chat_prompt, 'newChat');
chatCompletion.reserveBudget(newExampleChat);
[...openai_msgs_example].forEach((dialogue, dialogueIndex) => { [...openai_msgs_example].forEach((dialogue, dialogueIndex) => {
chatCompletion.insert(newExampleChat, 'dialogueExamples');
dialogue.forEach((prompt, promptIndex) => { dialogue.forEach((prompt, promptIndex) => {
const role = 'system'; const role = 'system';
const content = prompt.content || ''; const content = prompt.content || '';
@ -556,11 +556,6 @@ function populateDialogueExamples(prompts, chatCompletion) {
} }
}); });
}); });
chatCompletion.freeBudget(newExampleChat);
const chatExamples = chatCompletion.getMessages().getItemByIdentifier('dialogueExamples').getCollection();
if (chatExamples.length) chatCompletion.insertAtStart(newExampleChat, 'dialogueExamples');
} }
} }
@ -769,6 +764,12 @@ function preparePromptsForChatCompletion(Scenario, charPersonality, name2, world
prompts.set(jbReplacement, prompts.index('jailbreak')); prompts.set(jbReplacement, prompts.index('jailbreak'));
} }
// TODO: Integrate Group nudge into the prompt manager properly
if(selected_group) {
let group_nudge = {"role": "system", "content": `[Write the next reply only as ${name2}]`};
openai_msgs.push(group_nudge);
}
// Allow subscribers to manipulate the prompts object // Allow subscribers to manipulate the prompts object
eventSource.emit(event_types.OAI_BEFORE_CHATCOMPLETION, prompts); eventSource.emit(event_types.OAI_BEFORE_CHATCOMPLETION, prompts);
@ -1443,7 +1444,7 @@ class Message {
this.content = content; this.content = content;
if (this.content) { if (this.content) {
this.tokens = tokenHandler.count({ role: this.role, content: this.content }) this.tokens = tokenHandler.count({ role: this.role, content: this.content });
} else { } else {
this.tokens = 0; this.tokens = 0;
} }