mirror of
https://github.com/SillyTavern/SillyTavern.git
synced 2025-06-05 21:59:27 +02:00
Refactor prepareOpenAiMessages, move prompt preparation in its own function
This commit is contained in:
@ -616,39 +616,21 @@ function populateChatCompletion (prompts, chatCompletion, {bias, quietPrompt, ty
|
|||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Take a configuration object and prepares messages for a chat with OpenAI's chat completion API.
|
* Combines system prompts with prompt manager prompts
|
||||||
* Handles prompts, prepares chat history, manages token budget, and processes various user settings.
|
|
||||||
*
|
*
|
||||||
* @param {Object} content - System prompts provided by SillyTavern
|
* @param {string} Scenario - The scenario or context of the dialogue.
|
||||||
* @param {string} content.name2 - The second name to be used in the messages.
|
* @param {string} charPersonality - Description of the character's personality.
|
||||||
* @param {string} content.charDescription - Description of the character.
|
* @param {string} name2 - The second name to be used in the messages.
|
||||||
* @param {string} content.charPersonality - Description of the character's personality.
|
* @param {string} worldInfoBefore - The world info to be added before the main conversation.
|
||||||
* @param {string} content.Scenario - The scenario or context of the dialogue.
|
* @param {string} worldInfoAfter - The world info to be added after the main conversation.
|
||||||
* @param {string} content.worldInfoBefore - The world info to be added before the main conversation.
|
* @param {string} charDescription - Description of the character.
|
||||||
* @param {string} content.worldInfoAfter - The world info to be added after the main conversation.
|
* @param {string} quietPrompt - The quiet prompt to be used in the conversation.
|
||||||
* @param {string} content.bias - The bias to be added in the conversation.
|
* @param {string} bias - The bias to be added in the conversation.
|
||||||
* @param {string} content.type - The type of the chat, can be 'impersonate'.
|
* @param {Object} extensionPrompts - An object containing additional prompts.
|
||||||
* @param {string} content.quietPrompt - The quiet prompt to be used in the conversation.
|
*
|
||||||
* @param {Array} content.extensionPrompts - An array of additional prompts.
|
* @returns {Object} prompts - The prepared and merged system and user-defined prompts.
|
||||||
* @param dryRun - Whether this is a live call or not.
|
|
||||||
* @returns {(*[]|boolean)[]} An array where the first element is the prepared chat and the second element is a boolean flag.
|
|
||||||
*/
|
*/
|
||||||
function prepareOpenAIMessages({
|
function preparePromptsForChatCompletion(Scenario, charPersonality, name2, worldInfoBefore, worldInfoAfter, charDescription, quietPrompt, bias, extensionPrompts) {
|
||||||
name2,
|
|
||||||
charDescription,
|
|
||||||
charPersonality,
|
|
||||||
Scenario,
|
|
||||||
worldInfoBefore,
|
|
||||||
worldInfoAfter,
|
|
||||||
bias,
|
|
||||||
type,
|
|
||||||
quietPrompt,
|
|
||||||
extensionPrompts,
|
|
||||||
cyclePrompt
|
|
||||||
} = {}, dryRun) {
|
|
||||||
// Without a character selected, there is no way to accurately calculate tokens
|
|
||||||
if (!promptManager.activeCharacter && dryRun) return [null, false];
|
|
||||||
|
|
||||||
const scenarioText = Scenario ? `Circumstances and context of the dialogue: ${Scenario}` : '';
|
const scenarioText = Scenario ? `Circumstances and context of the dialogue: ${Scenario}` : '';
|
||||||
const charPersonalityText = charPersonality ? `${name2}'s personality: ${charPersonality}` : '';
|
const charPersonalityText = charPersonality ? `${name2}'s personality: ${charPersonality}` : '';
|
||||||
|
|
||||||
@ -669,11 +651,19 @@ function prepareOpenAIMessages({
|
|||||||
|
|
||||||
// Tavern Extras - Summary
|
// Tavern Extras - Summary
|
||||||
const summary = extensionPrompts['1_memory'];
|
const summary = extensionPrompts['1_memory'];
|
||||||
if (summary && summary.content) systemPrompts.push({role: 'system', content: summary.content, identifier: 'summary'});
|
if (summary && summary.content) systemPrompts.push({
|
||||||
|
role: 'system',
|
||||||
|
content: summary.content,
|
||||||
|
identifier: 'summary'
|
||||||
|
});
|
||||||
|
|
||||||
// Authors Note
|
// Authors Note
|
||||||
const authorsNote = extensionPrompts['2_floating_prompt'];
|
const authorsNote = extensionPrompts['2_floating_prompt'];
|
||||||
if (authorsNote && authorsNote.value) systemPrompts.push({role: 'system', content: authorsNote.value, identifier: 'authorsNote'});
|
if (authorsNote && authorsNote.value) systemPrompts.push({
|
||||||
|
role: 'system',
|
||||||
|
content: authorsNote.value,
|
||||||
|
identifier: 'authorsNote'
|
||||||
|
});
|
||||||
|
|
||||||
// Persona Description
|
// Persona Description
|
||||||
if (power_user.persona_description) {
|
if (power_user.persona_description) {
|
||||||
@ -709,6 +699,43 @@ function prepareOpenAIMessages({
|
|||||||
// Allow subscribers to manipulate the prompts object
|
// Allow subscribers to manipulate the prompts object
|
||||||
eventSource.emit(event_types.OAI_BEFORE_CHATCOMPLETION, prompts);
|
eventSource.emit(event_types.OAI_BEFORE_CHATCOMPLETION, prompts);
|
||||||
|
|
||||||
|
return prompts;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Take a configuration object and prepares messages for a chat with OpenAI's chat completion API.
|
||||||
|
* Handles prompts, prepares chat history, manages token budget, and processes various user settings.
|
||||||
|
*
|
||||||
|
* @param {Object} content - System prompts provided by SillyTavern
|
||||||
|
* @param {string} content.name2 - The second name to be used in the messages.
|
||||||
|
* @param {string} content.charDescription - Description of the character.
|
||||||
|
* @param {string} content.charPersonality - Description of the character's personality.
|
||||||
|
* @param {string} content.Scenario - The scenario or context of the dialogue.
|
||||||
|
* @param {string} content.worldInfoBefore - The world info to be added before the main conversation.
|
||||||
|
* @param {string} content.worldInfoAfter - The world info to be added after the main conversation.
|
||||||
|
* @param {string} content.bias - The bias to be added in the conversation.
|
||||||
|
* @param {string} content.type - The type of the chat, can be 'impersonate'.
|
||||||
|
* @param {string} content.quietPrompt - The quiet prompt to be used in the conversation.
|
||||||
|
* @param {Array} content.extensionPrompts - An array of additional prompts.
|
||||||
|
* @param dryRun - Whether this is a live call or not.
|
||||||
|
* @returns {(*[]|boolean)[]} An array where the first element is the prepared chat and the second element is a boolean flag.
|
||||||
|
*/
|
||||||
|
function prepareOpenAIMessages({
|
||||||
|
name2,
|
||||||
|
charDescription,
|
||||||
|
charPersonality,
|
||||||
|
Scenario,
|
||||||
|
worldInfoBefore,
|
||||||
|
worldInfoAfter,
|
||||||
|
bias,
|
||||||
|
type,
|
||||||
|
quietPrompt,
|
||||||
|
extensionPrompts,
|
||||||
|
cyclePrompt
|
||||||
|
} = {}, dryRun) {
|
||||||
|
// Without a character selected, there is no way to accurately calculate tokens
|
||||||
|
if (!promptManager.activeCharacter && dryRun) return [null, false];
|
||||||
|
|
||||||
const chatCompletion = new ChatCompletion();
|
const chatCompletion = new ChatCompletion();
|
||||||
if (power_user.console_log_prompts) chatCompletion.enableLogging();
|
if (power_user.console_log_prompts) chatCompletion.enableLogging();
|
||||||
|
|
||||||
@ -716,6 +743,9 @@ function prepareOpenAIMessages({
|
|||||||
chatCompletion.setTokenBudget(userSettings.openai_max_context, userSettings.openai_max_tokens);
|
chatCompletion.setTokenBudget(userSettings.openai_max_context, userSettings.openai_max_tokens);
|
||||||
|
|
||||||
try {
|
try {
|
||||||
|
// Merge ordered user prompts and markers with system prompts
|
||||||
|
const prompts = preparePromptsForChatCompletion(Scenario, charPersonality, name2, worldInfoBefore, worldInfoAfter, charDescription, quietPrompt, bias, extensionPrompts);
|
||||||
|
|
||||||
// Fill the chat completion with as much context as the budget allows
|
// Fill the chat completion with as much context as the budget allows
|
||||||
populateChatCompletion(prompts, chatCompletion, {bias, quietPrompt, type, cyclePrompt});
|
populateChatCompletion(prompts, chatCompletion, {bias, quietPrompt, type, cyclePrompt});
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
|
Reference in New Issue
Block a user