Apply post-process before setting cache at depth

This commit is contained in:
Cohee
2025-03-18 23:17:26 +02:00
parent 46d5f79fd9
commit fcaea46a54

View File

@@ -1048,6 +1048,15 @@ router.post('/generate', function (request, response) {
let bodyParams; let bodyParams;
const isTextCompletion = Boolean(request.body.model && TEXT_COMPLETION_MODELS.includes(request.body.model)) || typeof request.body.messages === 'string'; const isTextCompletion = Boolean(request.body.model && TEXT_COMPLETION_MODELS.includes(request.body.model)) || typeof request.body.messages === 'string';
const postProcessTypes = [CHAT_COMPLETION_SOURCES.CUSTOM, CHAT_COMPLETION_SOURCES.OPENROUTER];
if (Array.isArray(request.body.messages) && postProcessTypes.includes(request.body.chat_completion_source) && request.body.custom_prompt_post_processing) {
console.info('Applying custom prompt post-processing of type', request.body.custom_prompt_post_processing);
request.body.messages = postProcessPrompt(
request.body.messages,
request.body.custom_prompt_post_processing,
getPromptNames(request));
}
if (request.body.chat_completion_source === CHAT_COMPLETION_SOURCES.OPENAI) { if (request.body.chat_completion_source === CHAT_COMPLETION_SOURCES.OPENAI) {
apiUrl = new URL(request.body.reverse_proxy || API_OPENAI).toString(); apiUrl = new URL(request.body.reverse_proxy || API_OPENAI).toString();
apiKey = request.body.reverse_proxy ? request.body.proxy_password : readSecret(request.user.directories, SECRET_KEYS.OPENAI); apiKey = request.body.reverse_proxy ? request.body.proxy_password : readSecret(request.user.directories, SECRET_KEYS.OPENAI);
@@ -1152,15 +1161,6 @@ router.post('/generate', function (request, response) {
return response.status(400).send({ error: true }); return response.status(400).send({ error: true });
} }
const postProcessTypes = [CHAT_COMPLETION_SOURCES.CUSTOM, CHAT_COMPLETION_SOURCES.OPENROUTER];
if (postProcessTypes.includes(request.body.chat_completion_source) && request.body.custom_prompt_post_processing) {
console.info('Applying custom prompt post-processing of type', request.body.custom_prompt_post_processing);
request.body.messages = postProcessPrompt(
request.body.messages,
request.body.custom_prompt_post_processing,
getPromptNames(request));
}
// A few of OpenAIs reasoning models support reasoning effort // A few of OpenAIs reasoning models support reasoning effort
if ([CHAT_COMPLETION_SOURCES.CUSTOM, CHAT_COMPLETION_SOURCES.OPENAI].includes(request.body.chat_completion_source)) { if ([CHAT_COMPLETION_SOURCES.CUSTOM, CHAT_COMPLETION_SOURCES.OPENAI].includes(request.body.chat_completion_source)) {
if (['o1', 'o3-mini', 'o3-mini-2025-01-31'].includes(request.body.model)) { if (['o1', 'o3-mini', 'o3-mini-2025-01-31'].includes(request.body.model)) {