OpenRouter: Allow applying prompt post-processing

Fixes #3689
This commit is contained in:
Cohee
2025-03-18 21:33:11 +02:00
parent 283edd94b9
commit 46d5f79fd9
3 changed files with 19 additions and 16 deletions

View File

@ -3415,13 +3415,6 @@
<div class="flex-container"> <div class="flex-container">
<select id="model_custom_select" class="text_pole model_custom_select"></select> <select id="model_custom_select" class="text_pole model_custom_select"></select>
</div> </div>
<h4 data-i18n="Prompt Post-Processing">Prompt Post-Processing</h4>
<select id="custom_prompt_post_processing" class="text_pole" title="Applies additional processing to the prompt before sending it to the API." data-i18n="[title]Applies additional processing to the prompt before sending it to the API.">
<option data-i18n="prompt_post_processing_none" value="">None</option>
<option data-i18n="prompt_post_processing_merge" value="merge">Merge consecutive roles</option>
<option data-i18n="prompt_post_processing_semi" value="semi">Semi-strict (alternating roles)</option>
<option data-i18n="prompt_post_processing_strict" value="strict">Strict (user first, alternating roles)</option>
</select>
</form> </form>
<div id="01ai_form" data-source="01ai"> <div id="01ai_form" data-source="01ai">
<h4> <h4>
@ -3440,6 +3433,15 @@
<select id="model_01ai_select"> <select id="model_01ai_select">
</select> </select>
</div> </div>
<div id="prompt_post_porcessing_form" data-source="custom,openrouter">
<h4 data-i18n="Prompt Post-Processing">Prompt Post-Processing</h4>
<select id="custom_prompt_post_processing" class="text_pole" title="Applies additional processing to the prompt before sending it to the API." data-i18n="[title]Applies additional processing to the prompt before sending it to the API.">
<option data-i18n="prompt_post_processing_none" value="">None</option>
<option data-i18n="prompt_post_processing_merge" value="merge">Merge consecutive roles</option>
<option data-i18n="prompt_post_processing_semi" value="semi">Semi-strict (alternating roles)</option>
<option data-i18n="prompt_post_processing_strict" value="strict">Strict (user first, alternating roles)</option>
</select>
</div>
<div class="flex-container flex"> <div class="flex-container flex">
<div id="api_button_openai" class="api_button menu_button menu_button_icon" type="submit" data-i18n="Connect">Connect</div> <div id="api_button_openai" class="api_button menu_button menu_button_icon" type="submit" data-i18n="Connect">Connect</div>
<div class="api_loading menu_button menu_button_icon" data-i18n="Cancel">Cancel</div> <div class="api_loading menu_button menu_button_icon" data-i18n="Cancel">Cancel</div>

View File

@ -2020,6 +2020,7 @@ async function sendOpenAIRequest(type, messages, signal) {
'reasoning_effort': String(oai_settings.reasoning_effort), 'reasoning_effort': String(oai_settings.reasoning_effort),
'enable_web_search': Boolean(oai_settings.enable_web_search), 'enable_web_search': Boolean(oai_settings.enable_web_search),
'request_images': Boolean(oai_settings.request_images), 'request_images': Boolean(oai_settings.request_images),
'custom_prompt_post_processing': oai_settings.custom_prompt_post_processing,
}; };
if (!canMultiSwipe && ToolManager.canPerformToolCalls(type)) { if (!canMultiSwipe && ToolManager.canPerformToolCalls(type)) {
@ -2100,7 +2101,6 @@ async function sendOpenAIRequest(type, messages, signal) {
generate_data['custom_include_body'] = oai_settings.custom_include_body; generate_data['custom_include_body'] = oai_settings.custom_include_body;
generate_data['custom_exclude_body'] = oai_settings.custom_exclude_body; generate_data['custom_exclude_body'] = oai_settings.custom_exclude_body;
generate_data['custom_include_headers'] = oai_settings.custom_include_headers; generate_data['custom_include_headers'] = oai_settings.custom_include_headers;
generate_data['custom_prompt_post_processing'] = oai_settings.custom_prompt_post_processing;
} }
if (isCohere) { if (isCohere) {

View File

@ -1121,14 +1121,6 @@ router.post('/generate', function (request, response) {
mergeObjectWithYaml(bodyParams, request.body.custom_include_body); mergeObjectWithYaml(bodyParams, request.body.custom_include_body);
mergeObjectWithYaml(headers, request.body.custom_include_headers); mergeObjectWithYaml(headers, request.body.custom_include_headers);
if (request.body.custom_prompt_post_processing) {
console.info('Applying custom prompt post-processing of type', request.body.custom_prompt_post_processing);
request.body.messages = postProcessPrompt(
request.body.messages,
request.body.custom_prompt_post_processing,
getPromptNames(request));
}
} else if (request.body.chat_completion_source === CHAT_COMPLETION_SOURCES.PERPLEXITY) { } else if (request.body.chat_completion_source === CHAT_COMPLETION_SOURCES.PERPLEXITY) {
apiUrl = API_PERPLEXITY; apiUrl = API_PERPLEXITY;
apiKey = readSecret(request.user.directories, SECRET_KEYS.PERPLEXITY); apiKey = readSecret(request.user.directories, SECRET_KEYS.PERPLEXITY);
@ -1160,6 +1152,15 @@ router.post('/generate', function (request, response) {
return response.status(400).send({ error: true }); return response.status(400).send({ error: true });
} }
const postProcessTypes = [CHAT_COMPLETION_SOURCES.CUSTOM, CHAT_COMPLETION_SOURCES.OPENROUTER];
if (postProcessTypes.includes(request.body.chat_completion_source) && request.body.custom_prompt_post_processing) {
console.info('Applying custom prompt post-processing of type', request.body.custom_prompt_post_processing);
request.body.messages = postProcessPrompt(
request.body.messages,
request.body.custom_prompt_post_processing,
getPromptNames(request));
}
// A few of OpenAIs reasoning models support reasoning effort // A few of OpenAIs reasoning models support reasoning effort
if ([CHAT_COMPLETION_SOURCES.CUSTOM, CHAT_COMPLETION_SOURCES.OPENAI].includes(request.body.chat_completion_source)) { if ([CHAT_COMPLETION_SOURCES.CUSTOM, CHAT_COMPLETION_SOURCES.OPENAI].includes(request.body.chat_completion_source)) {
if (['o1', 'o3-mini', 'o3-mini-2025-01-31'].includes(request.body.model)) { if (['o1', 'o3-mini', 'o3-mini-2025-01-31'].includes(request.body.model)) {