diff --git a/public/index.html b/public/index.html
index ff8d0ff23..9afff4e30 100644
--- a/public/index.html
+++ b/public/index.html
@@ -1991,6 +1991,7 @@
@@ -3535,13 +3536,21 @@
-
-
Prompt Post-Processing
+
+
+
+ Prompt Post-Processing
+
+
+
+
+
None
Merge consecutive roles
Semi-strict (alternating roles)
Strict (user first, alternating roles)
+ Single user message
diff --git a/public/scripts/openai.js b/public/scripts/openai.js
index db88ea37e..a090e9922 100644
--- a/public/scripts/openai.js
+++ b/public/scripts/openai.js
@@ -203,13 +203,14 @@ const continue_postfix_types = {
DOUBLE_NEWLINE: '\n\n',
};
-const custom_prompt_post_processing_types = {
+export const custom_prompt_post_processing_types = {
NONE: '',
/** @deprecated Use MERGE instead. */
CLAUDE: 'claude',
MERGE: 'merge',
SEMI: 'semi',
STRICT: 'strict',
+ SINGLE: 'single',
};
const openrouter_middleout_types = {
diff --git a/public/scripts/tool-calling.js b/public/scripts/tool-calling.js
index 2dc2f11d8..8cc9b8ff1 100644
--- a/public/scripts/tool-calling.js
+++ b/public/scripts/tool-calling.js
@@ -575,6 +575,11 @@ export class ToolManager {
return false;
}
+ // Post-processing will forcefully remove past tool calls from the prompt, making them useless
+ if (oai_settings.custom_prompt_post_processing) {
+ return false;
+ }
+
if (oai_settings.chat_completion_source === chat_completion_sources.POLLINATIONS && Array.isArray(model_list)) {
const currentModel = model_list.find(model => model.id === oai_settings.pollinations_model);
if (currentModel) {
diff --git a/src/endpoints/backends/chat-completions.js b/src/endpoints/backends/chat-completions.js
index 35c4cd43c..ec902e07f 100644
--- a/src/endpoints/backends/chat-completions.js
+++ b/src/endpoints/backends/chat-completions.js
@@ -72,15 +72,17 @@ function postProcessPrompt(messages, type, names) {
switch (type) {
case 'merge':
case 'claude':
- return mergeMessages(messages, names, false, false);
+ return mergeMessages(messages, names, { strict: false, placeholders: false, single: false });
case 'semi':
- return mergeMessages(messages, names, true, false);
+ return mergeMessages(messages, names, { strict: true, placeholders: false, single: false });
case 'strict':
- return mergeMessages(messages, names, true, true);
+ return mergeMessages(messages, names, { strict: true, placeholders: true, single: false });
case 'deepseek':
- return addAssistantPrefix(mergeMessages(messages, names, true, false));
+ return addAssistantPrefix(mergeMessages(messages, names, { strict: true, placeholders: false, single: false }));
case 'deepseek-reasoner':
- return addAssistantPrefix(mergeMessages(messages, names, true, true));
+ return addAssistantPrefix(mergeMessages(messages, names, { strict: true, placeholders: true, single: false }));
+ case 'single':
+ return mergeMessages(messages, names, { strict: true, placeholders: false, single: true });
default:
return messages;
}
@@ -383,7 +385,7 @@ async function sendMakerSuiteRequest(request, response) {
function getGeminiBody() {
// #region UGLY MODEL LISTS AREA
- const imageGenerationModels = [
+ const imageGenerationModels = [
'gemini-2.0-flash-exp',
'gemini-2.0-flash-exp-image-generation',
];
@@ -1206,6 +1208,15 @@ router.post('/bias', async function (request, response) {
router.post('/generate', function (request, response) {
if (!request.body) return response.status(400).send({ error: true });
+ const postProcessingType = request.body.custom_prompt_post_processing;
+ if (Array.isArray(request.body.messages) && postProcessingType) {
+ console.info('Applying custom prompt post-processing of type', postProcessingType);
+ request.body.messages = postProcessPrompt(
+ request.body.messages,
+ postProcessingType,
+ getPromptNames(request));
+ }
+
switch (request.body.chat_completion_source) {
case CHAT_COMPLETION_SOURCES.CLAUDE: return sendClaudeRequest(request, response);
case CHAT_COMPLETION_SOURCES.SCALE: return sendScaleRequest(request, response);
@@ -1224,15 +1235,6 @@ router.post('/generate', function (request, response) {
let bodyParams;
const isTextCompletion = Boolean(request.body.model && TEXT_COMPLETION_MODELS.includes(request.body.model)) || typeof request.body.messages === 'string';
- const postProcessTypes = [CHAT_COMPLETION_SOURCES.CUSTOM, CHAT_COMPLETION_SOURCES.OPENROUTER];
- if (Array.isArray(request.body.messages) && postProcessTypes.includes(request.body.chat_completion_source) && request.body.custom_prompt_post_processing) {
- console.info('Applying custom prompt post-processing of type', request.body.custom_prompt_post_processing);
- request.body.messages = postProcessPrompt(
- request.body.messages,
- request.body.custom_prompt_post_processing,
- getPromptNames(request));
- }
-
if (request.body.chat_completion_source === CHAT_COMPLETION_SOURCES.OPENAI) {
apiUrl = new URL(request.body.reverse_proxy || API_OPENAI).toString();
apiKey = request.body.reverse_proxy ? request.body.proxy_password : readSecret(request.user.directories, SECRET_KEYS.OPENAI);
diff --git a/src/prompt-converters.js b/src/prompt-converters.js
index ae4c0c0fb..35f5d0b8e 100644
--- a/src/prompt-converters.js
+++ b/src/prompt-converters.js
@@ -695,11 +695,13 @@ export function convertXAIMessages(messages, names) {
* Merge messages with the same consecutive role, removing names if they exist.
* @param {any[]} messages Messages to merge
* @param {PromptNames} names Prompt names
- * @param {boolean} strict Enable strict mode: only allow one system message at the start, force user first message
- * @param {boolean} placeholders Add user placeholders to the messages in strict mode
+ * @param {object} options Options for merging
+ * @param {boolean} [options.strict] Enable strict mode: only allow one system message at the start, force user first message
+ * @param {boolean} [options.placeholders] Add user placeholders to the messages in strict mode
+ * @param {boolean} [options.single] Force every role to be user, merging all messages into one
* @returns {any[]} Merged messages
*/
-export function mergeMessages(messages, names, strict, placeholders) {
+export function mergeMessages(messages, names, { strict = false, placeholders = false, single = false } = {}) {
let mergedMessages = [];
/** @type {Map} */
@@ -744,6 +746,20 @@ export function mergeMessages(messages, names, strict, placeholders) {
if (message.role === 'tool') {
message.role = 'user';
}
+ if (single) {
+ if (message.role === 'assistant') {
+ if (names.charName && !message.content.startsWith(`${names.charName}: `) && !names.startsWithGroupName(message.content)) {
+ message.content = `${names.charName}: ${message.content}`;
+ }
+ }
+ if (message.role === 'user') {
+ if (names.userName && !message.content.startsWith(`${names.userName}: `)) {
+ message.content = `${names.userName}: ${message.content}`;
+ }
+ }
+
+ message.role = 'user';
+ }
delete message.name;
delete message.tool_calls;
delete message.tool_call_id;
@@ -807,7 +823,7 @@ export function mergeMessages(messages, names, strict, placeholders) {
mergedMessages.unshift({ role: 'user', content: PROMPT_PLACEHOLDER });
}
}
- return mergeMessages(mergedMessages, names, false, placeholders);
+ return mergeMessages(mergedMessages, names, { strict: false, placeholders, single: false });
}
return mergedMessages;