Merge pull request #981 from SillyTavern/hotfix-prompt-manager
Hotfix prompt manager
This commit is contained in:
commit
9d495d6b47
|
@ -479,9 +479,10 @@ function populateChatHistory(prompts, chatCompletion, type = null, cyclePrompt =
|
|||
// Chat History
|
||||
chatCompletion.add(new MessageCollection('chatHistory'), prompts.index('chatHistory'));
|
||||
|
||||
let names = (selected_group && groups.find(x => x.id === selected_group)?.members.map(member => characters.find(c => c.avatar === member)?.name).filter(Boolean).join(', ')) || '';
|
||||
// Reserve budget for new chat message
|
||||
const newChat = selected_group ? oai_settings.new_group_chat_prompt : oai_settings.new_chat_prompt;
|
||||
const newChatMessage = new Message('system', newChat, 'newMainChat');
|
||||
const newChatMessage = new Message('system', substituteParams(newChat, null, null, null, names), 'newMainChat');
|
||||
chatCompletion.reserveBudget(newChatMessage);
|
||||
|
||||
// Reserve budget for continue nudge
|
||||
|
@ -541,9 +542,8 @@ function populateDialogueExamples(prompts, chatCompletion) {
|
|||
chatCompletion.add(new MessageCollection('dialogueExamples'), prompts.index('dialogueExamples'));
|
||||
if (openai_msgs_example.length) {
|
||||
const newExampleChat = new Message('system', oai_settings.new_example_chat_prompt, 'newChat');
|
||||
chatCompletion.reserveBudget(newExampleChat);
|
||||
|
||||
[...openai_msgs_example].forEach((dialogue, dialogueIndex) => {
|
||||
chatCompletion.insert(newExampleChat, 'dialogueExamples');
|
||||
dialogue.forEach((prompt, promptIndex) => {
|
||||
const role = 'system';
|
||||
const content = prompt.content || '';
|
||||
|
@ -556,11 +556,6 @@ function populateDialogueExamples(prompts, chatCompletion) {
|
|||
}
|
||||
});
|
||||
});
|
||||
|
||||
chatCompletion.freeBudget(newExampleChat);
|
||||
|
||||
const chatExamples = chatCompletion.getMessages().getItemByIdentifier('dialogueExamples').getCollection();
|
||||
if (chatExamples.length) chatCompletion.insertAtStart(newExampleChat, 'dialogueExamples');
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -769,6 +764,12 @@ function preparePromptsForChatCompletion(Scenario, charPersonality, name2, world
|
|||
prompts.set(jbReplacement, prompts.index('jailbreak'));
|
||||
}
|
||||
|
||||
// TODO: Integrate Group nudge into the prompt manager properly
|
||||
if(selected_group) {
|
||||
let group_nudge = {"role": "system", "content": `[Write the next reply only as ${name2}]`};
|
||||
openai_msgs.push(group_nudge);
|
||||
}
|
||||
|
||||
// Allow subscribers to manipulate the prompts object
|
||||
eventSource.emit(event_types.OAI_BEFORE_CHATCOMPLETION, prompts);
|
||||
|
||||
|
@ -1443,7 +1444,7 @@ class Message {
|
|||
this.content = content;
|
||||
|
||||
if (this.content) {
|
||||
this.tokens = tokenHandler.count({ role: this.role, content: this.content })
|
||||
this.tokens = tokenHandler.count({ role: this.role, content: this.content });
|
||||
} else {
|
||||
this.tokens = 0;
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue