mirror of
https://github.com/SillyTavern/SillyTavern.git
synced 2025-01-31 11:35:37 +01:00
Merge pull request #982 from SillyTavern/prompt-manager-hotfix
Prompt manager hotfix
This commit is contained in:
commit
c2c0007ad1
@ -1,3 +1,5 @@
|
||||
"use strict";
|
||||
|
||||
import { callPopup, event_types, eventSource, is_send_press, main_api, substituteParams } from "../script.js";
|
||||
import { is_group_generating } from "./group-chats.js";
|
||||
import { TokenHandler } from "./openai.js";
|
||||
@ -271,6 +273,8 @@ PromptManagerModule.prototype.init = function (moduleConfiguration, serviceSetti
|
||||
this.serviceSettings = serviceSettings;
|
||||
this.containerElement = document.getElementById(this.configuration.containerIdentifier);
|
||||
|
||||
if ('global' === this.configuration.promptOrder.strategy) this.activeCharacter = {id: this.configuration.promptOrder.dummyId};
|
||||
|
||||
this.sanitizeServiceSettings();
|
||||
|
||||
// Enable and disable prompts
|
||||
@ -590,7 +594,7 @@ PromptManagerModule.prototype.init = function (moduleConfiguration, serviceSetti
|
||||
PromptManagerModule.prototype.render = function (afterTryGenerate = true) {
|
||||
if (main_api !== 'openai') return;
|
||||
|
||||
if (null === this.activeCharacter) return;
|
||||
if ('character' === this.configuration.promptOrder.strategy && null === this.activeCharacter) return;
|
||||
this.error = null;
|
||||
|
||||
waitUntilCondition(() => !is_send_press && !is_group_generating, 1024 * 1024, 100).then(() => {
|
||||
@ -604,6 +608,11 @@ PromptManagerModule.prototype.render = function (afterTryGenerate = true) {
|
||||
this.renderPromptManagerListItems()
|
||||
this.makeDraggable();
|
||||
this.profileEnd('render');
|
||||
}).catch(error => {
|
||||
this.log('Error caught during render: ' + error);
|
||||
this.renderPromptManager();
|
||||
this.renderPromptManagerListItems()
|
||||
this.makeDraggable();
|
||||
});
|
||||
} else {
|
||||
// Executed during live communication
|
||||
|
@ -19,7 +19,6 @@ import {
|
||||
system_message_types,
|
||||
replaceBiasMarkup,
|
||||
is_send_press,
|
||||
saveSettings,
|
||||
Generate,
|
||||
main_api,
|
||||
eventSource,
|
||||
@ -389,7 +388,7 @@ function setupChatCompletionPromptManager(openAiSettings) {
|
||||
promptManager.tokenHandler = tokenHandler;
|
||||
|
||||
promptManager.init(configuration, openAiSettings);
|
||||
promptManager.render();
|
||||
promptManager.render(false);
|
||||
|
||||
return promptManager;
|
||||
}
|
||||
@ -485,6 +484,13 @@ function populateChatHistory(prompts, chatCompletion, type = null, cyclePrompt =
|
||||
const newChatMessage = new Message('system', substituteParams(newChat, null, null, null, names), 'newMainChat');
|
||||
chatCompletion.reserveBudget(newChatMessage);
|
||||
|
||||
// Reserve budget for group nudge
|
||||
let groupNudgeMessage = null;
|
||||
if(selected_group) {
|
||||
const groupNudgeMessage = new Message.fromPrompt(prompts.get('groupNudge'));
|
||||
chatCompletion.reserveBudget(groupNudgeMessage);
|
||||
}
|
||||
|
||||
// Reserve budget for continue nudge
|
||||
let continueMessage = null;
|
||||
if (type === 'continue' && cyclePrompt) {
|
||||
@ -526,6 +532,12 @@ function populateChatHistory(prompts, chatCompletion, type = null, cyclePrompt =
|
||||
chatCompletion.freeBudget(newChatMessage);
|
||||
chatCompletion.insertAtStart(newChatMessage, 'chatHistory');
|
||||
|
||||
// Reserve budget for group nudge
|
||||
if(selected_group && groupNudgeMessage) {
|
||||
chatCompletion.freeBudget(groupNudgeMessage);
|
||||
chatCompletion.insertAtEnd(groupNudgeMessage, 'chatHistory');
|
||||
}
|
||||
|
||||
// Insert and free continue nudge
|
||||
if (type === 'continue' && continueMessage) {
|
||||
chatCompletion.freeBudget(continueMessage);
|
||||
@ -544,7 +556,10 @@ function populateDialogueExamples(prompts, chatCompletion) {
|
||||
if (openai_msgs_example.length) {
|
||||
const newExampleChat = new Message('system', oai_settings.new_example_chat_prompt, 'newChat');
|
||||
[...openai_msgs_example].forEach((dialogue, dialogueIndex) => {
|
||||
chatCompletion.insert(newExampleChat, 'dialogueExamples');
|
||||
let examplesAdded = 0;
|
||||
|
||||
if (chatCompletion.canAfford(newExampleChat)) chatCompletion.insert(newExampleChat, 'dialogueExamples');
|
||||
|
||||
dialogue.forEach((prompt, promptIndex) => {
|
||||
const role = 'system';
|
||||
const content = prompt.content || '';
|
||||
@ -554,8 +569,13 @@ function populateDialogueExamples(prompts, chatCompletion) {
|
||||
chatMessage.setName(prompt.name);
|
||||
if (chatCompletion.canAfford(chatMessage)) {
|
||||
chatCompletion.insert(chatMessage, 'dialogueExamples');
|
||||
examplesAdded++;
|
||||
}
|
||||
});
|
||||
|
||||
if (0 === examplesAdded) {
|
||||
chatCompletion.removeLastFrom('dialogueExamples');
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
@ -697,7 +717,8 @@ function populateChatCompletion(prompts, chatCompletion, { bias, quietPrompt, ty
|
||||
*/
|
||||
function preparePromptsForChatCompletion(Scenario, charPersonality, name2, worldInfoBefore, worldInfoAfter, charDescription, quietPrompt, bias, extensionPrompts, systemPromptOverride, jailbreakPromptOverride) {
|
||||
const scenarioText = Scenario ? `[Circumstances and context of the dialogue: ${Scenario}]` : '';
|
||||
const charPersonalityText = charPersonality ? `[${name2}'s personality: ${charPersonality}]` : '';
|
||||
const charPersonalityText = charPersonality ? `[${name2}'s personality: ${charPersonality}]` : ''
|
||||
const groupNudge = `[Write the next reply only as ${name2}]`;
|
||||
|
||||
// Create entries for system prompts
|
||||
const systemPrompts = [
|
||||
@ -711,7 +732,8 @@ function preparePromptsForChatCompletion(Scenario, charPersonality, name2, world
|
||||
{ role: 'system', content: oai_settings.nsfw_avoidance_prompt, identifier: 'nsfwAvoidance' },
|
||||
{ role: 'system', content: oai_settings.impersonation_prompt, identifier: 'impersonate' },
|
||||
{ role: 'system', content: quietPrompt, identifier: 'quietPrompt' },
|
||||
{ role: 'system', content: bias, identifier: 'bias' }
|
||||
{ role: 'system', content: bias, identifier: 'bias' },
|
||||
{ role: 'system', content: groupNudge, identifier: 'groupNudge' }
|
||||
];
|
||||
|
||||
// Tavern Extras - Summary
|
||||
@ -765,12 +787,6 @@ function preparePromptsForChatCompletion(Scenario, charPersonality, name2, world
|
||||
prompts.set(jbReplacement, prompts.index('jailbreak'));
|
||||
}
|
||||
|
||||
// TODO: Integrate Group nudge into the prompt manager properly
|
||||
if(selected_group) {
|
||||
let group_nudge = {"role": "system", "content": `[Write the next reply only as ${name2}]`};
|
||||
openai_msgs.push(group_nudge);
|
||||
}
|
||||
|
||||
// Allow subscribers to manipulate the prompts object
|
||||
eventSource.emit(event_types.OAI_BEFORE_CHATCOMPLETION, prompts);
|
||||
|
||||
@ -1665,6 +1681,21 @@ class ChatCompletion {
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Remove the last item of the collection
|
||||
*
|
||||
* @param identifier
|
||||
*/
|
||||
removeLastFrom(identifier) {
|
||||
const index = this.findMessageIndex(identifier);
|
||||
const message = this.messages.collection[index].collection.pop();
|
||||
|
||||
this.increaseTokenBudgetBy(message.getTokens());
|
||||
|
||||
this.log(`Removed ${message.identifier} from ${identifier}. Remaining tokens: ${this.tokenBudget}`);
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks if the token budget can afford the tokens of the specified message.
|
||||
*
|
||||
|
Loading…
x
Reference in New Issue
Block a user