Fix injection order
This commit is contained in:
parent
59ae661f62
commit
e2f0162e5a
|
@ -86,7 +86,6 @@ import {
|
|||
sendOpenAIRequest,
|
||||
loadOpenAISettings,
|
||||
setOpenAIOnlineStatus,
|
||||
generateOpenAIPromptCache,
|
||||
oai_settings,
|
||||
is_get_status_openai,
|
||||
openai_messages_count,
|
||||
|
@ -2853,10 +2852,6 @@ async function Generate(type, { automatic_trigger, force_name2, resolve, reject,
|
|||
|
||||
generatedPromptCache += cycleGenerationPrompt;
|
||||
if (generatedPromptCache.length == 0 || type === 'continue') {
|
||||
if (main_api === 'openai') {
|
||||
generateOpenAIPromptCache();
|
||||
}
|
||||
|
||||
console.debug('generating prompt');
|
||||
chatString = "";
|
||||
arrMes = arrMes.reverse();
|
||||
|
|
|
@ -65,7 +65,6 @@ export {
|
|||
setOpenAIMessages,
|
||||
setOpenAIMessageExamples,
|
||||
setupChatCompletionPromptManager,
|
||||
generateOpenAIPromptCache,
|
||||
prepareOpenAIMessages,
|
||||
sendOpenAIRequest,
|
||||
setOpenAIOnlineStatus,
|
||||
|
@ -387,15 +386,6 @@ function setupChatCompletionPromptManager(openAiSettings) {
|
|||
return promptManager;
|
||||
}
|
||||
|
||||
function generateOpenAIPromptCache() {
|
||||
openai_msgs = openai_msgs.reverse();
|
||||
openai_msgs.forEach(function (msg, i, arr) {
|
||||
let item = msg["content"];
|
||||
msg["content"] = item;
|
||||
openai_msgs[i] = msg;
|
||||
});
|
||||
}
|
||||
|
||||
function parseExampleIntoIndividual(messageExampleString) {
|
||||
let result = []; // array of msgs
|
||||
let tmp = messageExampleString.split("\n");
|
||||
|
@ -471,7 +461,8 @@ function populationInjectionPrompts(prompts) {
|
|||
const depthPrompts = prompts.filter(prompt => prompt.injection_depth === i && prompt.content);
|
||||
|
||||
// Order of priority (most important go lower)
|
||||
const roles = ['system', 'user', 'assistant'];
|
||||
const roles = [ 'system', 'user', 'assistant'];
|
||||
const roleMessages = [];
|
||||
|
||||
for (const role of roles) {
|
||||
// Get prompts for current role
|
||||
|
@ -482,10 +473,16 @@ function populationInjectionPrompts(prompts) {
|
|||
const jointPrompt = [rolePrompts, extensionPrompt].filter(x => x).map(x => x.trim()).join('\n');
|
||||
|
||||
if (jointPrompt && jointPrompt.length) {
|
||||
openai_msgs.splice(i, 0, { "role": role, 'content': jointPrompt });
|
||||
roleMessages.push({ "role": role, 'content': jointPrompt });
|
||||
}
|
||||
}
|
||||
|
||||
if (roleMessages.length) {
|
||||
openai_msgs.splice(i, 0, ...roleMessages);
|
||||
}
|
||||
}
|
||||
|
||||
openai_msgs = openai_msgs.reverse();
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
Loading…
Reference in New Issue