Fix injection order

This commit is contained in:
Cohee
2023-10-11 22:42:25 +03:00
parent 59ae661f62
commit e2f0162e5a
2 changed files with 9 additions and 17 deletions

View File

@@ -86,7 +86,6 @@ import {
sendOpenAIRequest, sendOpenAIRequest,
loadOpenAISettings, loadOpenAISettings,
setOpenAIOnlineStatus, setOpenAIOnlineStatus,
generateOpenAIPromptCache,
oai_settings, oai_settings,
is_get_status_openai, is_get_status_openai,
openai_messages_count, openai_messages_count,
@@ -2853,10 +2852,6 @@ async function Generate(type, { automatic_trigger, force_name2, resolve, reject,
generatedPromptCache += cycleGenerationPrompt; generatedPromptCache += cycleGenerationPrompt;
if (generatedPromptCache.length == 0 || type === 'continue') { if (generatedPromptCache.length == 0 || type === 'continue') {
if (main_api === 'openai') {
generateOpenAIPromptCache();
}
console.debug('generating prompt'); console.debug('generating prompt');
chatString = ""; chatString = "";
arrMes = arrMes.reverse(); arrMes = arrMes.reverse();

View File

@@ -65,7 +65,6 @@ export {
setOpenAIMessages, setOpenAIMessages,
setOpenAIMessageExamples, setOpenAIMessageExamples,
setupChatCompletionPromptManager, setupChatCompletionPromptManager,
generateOpenAIPromptCache,
prepareOpenAIMessages, prepareOpenAIMessages,
sendOpenAIRequest, sendOpenAIRequest,
setOpenAIOnlineStatus, setOpenAIOnlineStatus,
@@ -387,15 +386,6 @@ function setupChatCompletionPromptManager(openAiSettings) {
return promptManager; return promptManager;
} }
function generateOpenAIPromptCache() {
openai_msgs = openai_msgs.reverse();
openai_msgs.forEach(function (msg, i, arr) {
let item = msg["content"];
msg["content"] = item;
openai_msgs[i] = msg;
});
}
function parseExampleIntoIndividual(messageExampleString) { function parseExampleIntoIndividual(messageExampleString) {
let result = []; // array of msgs let result = []; // array of msgs
let tmp = messageExampleString.split("\n"); let tmp = messageExampleString.split("\n");
@@ -471,7 +461,8 @@ function populationInjectionPrompts(prompts) {
const depthPrompts = prompts.filter(prompt => prompt.injection_depth === i && prompt.content); const depthPrompts = prompts.filter(prompt => prompt.injection_depth === i && prompt.content);
// Order of priority (most important go lower) // Order of priority (most important go lower)
const roles = ['system', 'user', 'assistant']; const roles = [ 'system', 'user', 'assistant'];
const roleMessages = [];
for (const role of roles) { for (const role of roles) {
// Get prompts for current role // Get prompts for current role
@@ -482,10 +473,16 @@ function populationInjectionPrompts(prompts) {
const jointPrompt = [rolePrompts, extensionPrompt].filter(x => x).map(x => x.trim()).join('\n'); const jointPrompt = [rolePrompts, extensionPrompt].filter(x => x).map(x => x.trim()).join('\n');
if (jointPrompt && jointPrompt.length) { if (jointPrompt && jointPrompt.length) {
openai_msgs.splice(i, 0, { "role": role, 'content': jointPrompt }); roleMessages.push({ "role": role, 'content': jointPrompt });
} }
} }
if (roleMessages.length) {
openai_msgs.splice(i, 0, ...roleMessages);
} }
}
openai_msgs = openai_msgs.reverse();
} }
/** /**