diff --git a/public/script.js b/public/script.js index 64b2dae33..d838ef50b 100644 --- a/public/script.js +++ b/public/script.js @@ -79,6 +79,7 @@ import { import { setOpenAIMessageExamples, setOpenAIMessages, + setupOpenAIPromptManager, prepareOpenAIMessages, sendOpenAIRequest, loadOpenAISettings, @@ -5107,6 +5108,7 @@ async function getSettings(type) { // OpenAI loadOpenAISettings(data, settings); + setupOpenAIPromptManager(settings); // Horde loadHordeSettings(settings); diff --git a/public/scripts/openai.js b/public/scripts/openai.js index 8ab219ad7..0df146ade 100644 --- a/public/scripts/openai.js +++ b/public/scripts/openai.js @@ -22,7 +22,14 @@ import { is_send_press, main_api, } from "../script.js"; -import { groups, selected_group } from "./group-chats.js"; +import {groups, selected_group} from "./group-chats.js"; + +import { + defaultPromptManagerSettings, + openAiDefaultPromptLists, + openAiDefaultPrompts, + PromptManagerModule as PromptManager +} from "./PromptManager.js"; import { power_user, @@ -50,6 +57,7 @@ export { loadOpenAISettings, setOpenAIMessages, setOpenAIMessageExamples, + setupOpenAIPromptManager, generateOpenAIPromptCache, prepareOpenAIMessages, sendOpenAIRequest, @@ -188,6 +196,8 @@ export function getTokenCountOpenAI(text) { return countTokens(message, true); } +let promptManager = null; + function validateReverseProxy() { if (!oai_settings.reverse_proxy) { return; @@ -261,6 +271,25 @@ function setOpenAIMessageExamples(mesExamplesArray) { } } +function setupOpenAIPromptManager(settings) { + promptManager = new PromptManager(); + const configuration = { + prefix: 'openai_', + containerIdentifier: 'openai_prompt_manager', + listIdentifier: 'openai_prompt_manager_list', + draggable: true + }; + + promptManager.saveServiceSettings = () => { + saveSettingsDebounced(); + } + + promptManager.init(configuration, settings, default_settings); + promptManager.render(); + + +} + function generateOpenAIPromptCache() { openai_msgs = openai_msgs.reverse(); openai_msgs.forEach(function (msg, i, arr) {