Adding reverse proxy support to DeepSeek chat completion (#3328)

* added reverse proxy settings to deepseek chat completion

* Update chat-completions.js

* Update chat-completions.js

* Update chat-completions.js

* Update chat-completions.js

* Update chat-completions.js

* Update chat-completions.js

* Unify API key requirement

---------

Co-authored-by: Cohee <18619528+Cohee1207@users.noreply.github.com>
This commit is contained in:
subzero5544
2025-01-23 01:02:44 -06:00
committed by GitHub
parent 6fef696268
commit a503f58d0c
3 changed files with 91 additions and 21 deletions

View File

@ -639,6 +639,89 @@ async function sendCohereRequest(request, response) {
}
}
/**
* Sends a request to DeepSeek API.
* @param {express.Request} request Express request
* @param {express.Response} response Express response
*/
async function sendDeepSeekRequest(request, response) {
const apiUrl = new URL(request.body.reverse_proxy || API_DEEPSEEK).toString();
const apiKey = request.body.reverse_proxy ? request.body.proxy_password : readSecret(request.user.directories, SECRET_KEYS.DEEPSEEK);
if (!apiKey && !request.body.reverse_proxy) {
console.log('DeepSeek API key is missing.');
return response.status(400).send({ error: true });
}
const controller = new AbortController();
request.socket.removeAllListeners('close');
request.socket.on('close', function () {
controller.abort();
});
try {
let bodyParams = {};
if (request.body.logprobs > 0) {
bodyParams['top_logprobs'] = request.body.logprobs;
bodyParams['logprobs'] = true;
}
const postProcessType = String(request.body.model).endsWith('-reasoner') ? 'deepseek-reasoner' : 'deepseek';
const processedMessages = postProcessPrompt(request.body.messages, postProcessType, getPromptNames(request));
const requestBody = {
'messages': processedMessages,
'model': request.body.model,
'temperature': request.body.temperature,
'max_tokens': request.body.max_tokens,
'stream': request.body.stream,
'presence_penalty': request.body.presence_penalty,
'frequency_penalty': request.body.frequency_penalty,
'top_p': request.body.top_p,
'stop': request.body.stop,
'seed': request.body.seed,
...bodyParams,
};
const config = {
method: 'POST',
headers: {
'Content-Type': 'application/json',
'Authorization': 'Bearer ' + apiKey,
},
body: JSON.stringify(requestBody),
signal: controller.signal,
};
console.log('DeepSeek request:', requestBody);
const generateResponse = await fetch(apiUrl + '/chat/completions', config);
if (request.body.stream) {
forwardFetchResponse(generateResponse, response);
} else {
if (!generateResponse.ok) {
const errorText = await generateResponse.text();
console.log(`DeepSeek API returned error: ${generateResponse.status} ${generateResponse.statusText} ${errorText}`);
const errorJson = tryParse(errorText) ?? { error: true };
return response.status(500).send(errorJson);
}
const generateResponseJson = await generateResponse.json();
console.log('DeepSeek response:', generateResponseJson);
return response.send(generateResponseJson);
}
} catch (error) {
console.log('Error communicating with DeepSeek API: ', error);
if (!response.headersSent) {
response.send({ error: true });
} else {
response.end();
}
}
}
export const router = express.Router();
router.post('/status', jsonParser, async function (request, response_getstatus_openai) {
@ -683,8 +766,8 @@ router.post('/status', jsonParser, async function (request, response_getstatus_o
api_key_openai = readSecret(request.user.directories, SECRET_KEYS.NANOGPT);
headers = {};
} else if (request.body.chat_completion_source === CHAT_COMPLETION_SOURCES.DEEPSEEK) {
api_url = API_DEEPSEEK.replace('/beta', '');
api_key_openai = readSecret(request.user.directories, SECRET_KEYS.DEEPSEEK);
api_url = new URL(request.body.reverse_proxy || API_DEEPSEEK.replace('/beta', ''));
api_key_openai = request.body.reverse_proxy ? request.body.proxy_password : readSecret(request.user.directories, SECRET_KEYS.DEEPSEEK);
headers = {};
} else {
console.log('This chat completion source is not supported yet.');
@ -844,6 +927,7 @@ router.post('/generate', jsonParser, function (request, response) {
case CHAT_COMPLETION_SOURCES.MAKERSUITE: return sendMakerSuiteRequest(request, response);
case CHAT_COMPLETION_SOURCES.MISTRALAI: return sendMistralAIRequest(request, response);
case CHAT_COMPLETION_SOURCES.COHERE: return sendCohereRequest(request, response);
case CHAT_COMPLETION_SOURCES.DEEPSEEK: return sendDeepSeekRequest(request, response);
}
let apiUrl;
@ -957,19 +1041,6 @@ router.post('/generate', jsonParser, function (request, response) {
apiKey = readSecret(request.user.directories, SECRET_KEYS.BLOCKENTROPY);
headers = {};
bodyParams = {};
} else if (request.body.chat_completion_source === CHAT_COMPLETION_SOURCES.DEEPSEEK) {
apiUrl = API_DEEPSEEK;
apiKey = readSecret(request.user.directories, SECRET_KEYS.DEEPSEEK);
headers = {};
bodyParams = {};
if (request.body.logprobs > 0) {
bodyParams['top_logprobs'] = request.body.logprobs;
bodyParams['logprobs'] = true;
}
const postProcessType = String(request.body.model).endsWith('-reasoner') ? 'deepseek-reasoner' : 'deepseek';
request.body.messages = postProcessPrompt(request.body.messages, postProcessType, getPromptNames(request));
} else {
console.log('This chat completion source is not supported yet.');
return response.status(400).send({ error: true });
@ -1107,4 +1178,3 @@ router.post('/generate', jsonParser, function (request, response) {
}
}
});