diff --git a/public/index.html b/public/index.html
index ec0e088a3..d82d7f833 100644
--- a/public/index.html
+++ b/public/index.html
@@ -3415,13 +3415,6 @@
- Prompt Post-Processing
-
@@ -3440,6 +3433,15 @@
+
+
Prompt Post-Processing
+
+
diff --git a/public/scripts/openai.js b/public/scripts/openai.js
index 80d1e9ed7..36e224bf9 100644
--- a/public/scripts/openai.js
+++ b/public/scripts/openai.js
@@ -2020,6 +2020,7 @@ async function sendOpenAIRequest(type, messages, signal) {
'reasoning_effort': String(oai_settings.reasoning_effort),
'enable_web_search': Boolean(oai_settings.enable_web_search),
'request_images': Boolean(oai_settings.request_images),
+ 'custom_prompt_post_processing': oai_settings.custom_prompt_post_processing,
};
if (!canMultiSwipe && ToolManager.canPerformToolCalls(type)) {
@@ -2100,7 +2101,6 @@ async function sendOpenAIRequest(type, messages, signal) {
generate_data['custom_include_body'] = oai_settings.custom_include_body;
generate_data['custom_exclude_body'] = oai_settings.custom_exclude_body;
generate_data['custom_include_headers'] = oai_settings.custom_include_headers;
- generate_data['custom_prompt_post_processing'] = oai_settings.custom_prompt_post_processing;
}
if (isCohere) {
diff --git a/src/endpoints/backends/chat-completions.js b/src/endpoints/backends/chat-completions.js
index 4d2eee774..a39dcbe2f 100644
--- a/src/endpoints/backends/chat-completions.js
+++ b/src/endpoints/backends/chat-completions.js
@@ -1048,6 +1048,15 @@ router.post('/generate', function (request, response) {
let bodyParams;
const isTextCompletion = Boolean(request.body.model && TEXT_COMPLETION_MODELS.includes(request.body.model)) || typeof request.body.messages === 'string';
+ const postProcessTypes = [CHAT_COMPLETION_SOURCES.CUSTOM, CHAT_COMPLETION_SOURCES.OPENROUTER];
+ if (Array.isArray(request.body.messages) && postProcessTypes.includes(request.body.chat_completion_source) && request.body.custom_prompt_post_processing) {
+ console.info('Applying custom prompt post-processing of type', request.body.custom_prompt_post_processing);
+ request.body.messages = postProcessPrompt(
+ request.body.messages,
+ request.body.custom_prompt_post_processing,
+ getPromptNames(request));
+ }
+
if (request.body.chat_completion_source === CHAT_COMPLETION_SOURCES.OPENAI) {
apiUrl = new URL(request.body.reverse_proxy || API_OPENAI).toString();
apiKey = request.body.reverse_proxy ? request.body.proxy_password : readSecret(request.user.directories, SECRET_KEYS.OPENAI);
@@ -1121,14 +1130,6 @@ router.post('/generate', function (request, response) {
mergeObjectWithYaml(bodyParams, request.body.custom_include_body);
mergeObjectWithYaml(headers, request.body.custom_include_headers);
-
- if (request.body.custom_prompt_post_processing) {
- console.info('Applying custom prompt post-processing of type', request.body.custom_prompt_post_processing);
- request.body.messages = postProcessPrompt(
- request.body.messages,
- request.body.custom_prompt_post_processing,
- getPromptNames(request));
- }
} else if (request.body.chat_completion_source === CHAT_COMPLETION_SOURCES.PERPLEXITY) {
apiUrl = API_PERPLEXITY;
apiKey = readSecret(request.user.directories, SECRET_KEYS.PERPLEXITY);