From 45cfc532b1fbd22f8107becc0a8c012bb0cfdf1e Mon Sep 17 00:00:00 2001 From: Cohee <18619528+Cohee1207@users.noreply.github.com> Date: Fri, 13 Sep 2024 19:56:48 +0300 Subject: [PATCH] Redirect max_tokens to max_completion_tokens --- public/scripts/openai.js | 2 ++ src/endpoints/backends/chat-completions.js | 1 + 2 files changed, 3 insertions(+) diff --git a/public/scripts/openai.js b/public/scripts/openai.js index bd86df72a..5455637e4 100644 --- a/public/scripts/openai.js +++ b/public/scripts/openai.js @@ -1970,6 +1970,8 @@ async function sendOpenAIRequest(type, messages, signal) { msg.role = 'user'; } }); + generate_data.max_completion_tokens = generate_data.max_tokens; + delete generate_data.max_tokens; delete generate_data.stream; delete generate_data.logprobs; delete generate_data.top_logprobs; diff --git a/src/endpoints/backends/chat-completions.js b/src/endpoints/backends/chat-completions.js index 1757b9e5f..88541d937 100644 --- a/src/endpoints/backends/chat-completions.js +++ b/src/endpoints/backends/chat-completions.js @@ -965,6 +965,7 @@ router.post('/generate', jsonParser, function (request, response) { 'model': request.body.model, 'temperature': request.body.temperature, 'max_tokens': request.body.max_tokens, + 'max_completion_tokens': request.body.max_completion_tokens, 'stream': request.body.stream, 'presence_penalty': request.body.presence_penalty, 'frequency_penalty': request.body.frequency_penalty,