Replace use_[source] with chat_completion_source

Same as the is_[api] replacement--it's easier to have one enum field
than several mutually-exclusive boolean ones
This commit is contained in:
valadaptive 2023-12-03 08:31:58 -05:00
parent a68505e857
commit e33c8bd955
2 changed files with 14 additions and 25 deletions

View File

@ -1496,6 +1496,7 @@ async function sendOpenAIRequest(type, messages, signal) {
'stream': stream,
'logit_bias': logit_bias,
'stop': getCustomStoppingStrings(openai_max_stop_strings),
'chat_completion_source': oai_settings.chat_completion_source,
};
// Empty array will produce a validation error
@ -1516,7 +1517,6 @@ async function sendOpenAIRequest(type, messages, signal) {
}
if (isClaude) {
generate_data['use_claude'] = true;
generate_data['top_k'] = Number(oai_settings.top_k_openai);
generate_data['exclude_assistant'] = oai_settings.exclude_assistant;
generate_data['stop'] = getCustomStoppingStrings(); // Claude shouldn't have limits on stop strings.
@ -1527,7 +1527,6 @@ async function sendOpenAIRequest(type, messages, signal) {
}
if (isOpenRouter) {
generate_data['use_openrouter'] = true;
generate_data['top_k'] = Number(oai_settings.top_k_openai);
generate_data['use_fallback'] = oai_settings.openrouter_use_fallback;
@ -1537,20 +1536,17 @@ async function sendOpenAIRequest(type, messages, signal) {
}
if (isScale) {
generate_data['use_scale'] = true;
generate_data['api_url_scale'] = oai_settings.api_url_scale;
}
if (isPalm) {
const nameStopString = isImpersonate ? `\n${name2}:` : `\n${name1}:`;
const stopStringsLimit = 3; // 5 - 2 (nameStopString and new_chat_prompt)
generate_data['use_palm'] = true;
generate_data['top_k'] = Number(oai_settings.top_k_openai);
generate_data['stop'] = [nameStopString, oai_settings.new_chat_prompt, ...getCustomStoppingStrings(stopStringsLimit)];
}
if (isAI21) {
generate_data['use_ai21'] = true;
generate_data['top_k'] = Number(oai_settings.top_k_openai);
generate_data['count_pen'] = Number(oai_settings.count_pen);
generate_data['stop_tokens'] = [name1 + ':', oai_settings.new_chat_prompt, oai_settings.new_group_chat_prompt];
@ -2463,10 +2459,10 @@ async function getStatusOpen() {
let data = {
reverse_proxy: oai_settings.reverse_proxy,
proxy_password: oai_settings.proxy_password,
use_openrouter: oai_settings.chat_completion_source == chat_completion_sources.OPENROUTER,
chat_completion_source: oai_settings.chat_completion_source,
};
if (oai_settings.reverse_proxy && !data.use_openrouter) {
if (oai_settings.reverse_proxy && data.chat_completion_source !== 'openrouter') {
validateReverseProxy();
}

View File

@ -2794,7 +2794,7 @@ app.post('/getstatus_openai', jsonParser, async function (request, response_gets
let api_key_openai;
let headers;
if (request.body.use_openrouter == false) {
if (request.body.chat_completion_source !== 'openrouter') {
api_url = new URL(request.body.reverse_proxy || API_OPENAI).toString();
api_key_openai = request.body.reverse_proxy ? request.body.proxy_password : readSecret(SECRET_KEYS.OPENAI);
headers = {};
@ -2822,7 +2822,7 @@ app.post('/getstatus_openai', jsonParser, async function (request, response_gets
const data = await response.json();
response_getstatus_openai.send(data);
if (request.body.use_openrouter && Array.isArray(data?.data)) {
if (request.body.chat_completion_source === 'openrouter' && Array.isArray(data?.data)) {
let models = [];
data.data.forEach(model => {
@ -3237,20 +3237,11 @@ async function sendPalmRequest(request, response) {
app.post('/generate_openai', jsonParser, function (request, response_generate_openai) {
if (!request.body) return response_generate_openai.status(400).send({ error: true });
if (request.body.use_claude) {
return sendClaudeRequest(request, response_generate_openai);
}
if (request.body.use_scale) {
return sendScaleRequest(request, response_generate_openai);
}
if (request.body.use_ai21) {
return sendAI21Request(request, response_generate_openai);
}
if (request.body.use_palm) {
return sendPalmRequest(request, response_generate_openai);
switch (request.body.chat_completion_source) {
case 'claude': return sendClaudeRequest(request, response_generate_openai);
case 'scale': return sendScaleRequest(request, response_generate_openai);
case 'ai21': return sendAI21Request(request, response_generate_openai);
case 'palm': return sendPalmRequest(request, response_generate_openai);
}
let api_url;
@ -3258,7 +3249,7 @@ app.post('/generate_openai', jsonParser, function (request, response_generate_op
let headers;
let bodyParams;
if (!request.body.use_openrouter) {
if (request.body.chat_completion_source !== 'openrouter') {
api_url = new URL(request.body.reverse_proxy || API_OPENAI).toString();
api_key_openai = request.body.reverse_proxy ? request.body.proxy_password : readSecret(SECRET_KEYS.OPENAI);
headers = {};
@ -3290,7 +3281,9 @@ app.post('/generate_openai', jsonParser, function (request, response_generate_op
const isTextCompletion = Boolean(request.body.model && TEXT_COMPLETION_MODELS.includes(request.body.model)) || typeof request.body.messages === 'string';
const textPrompt = isTextCompletion ? convertChatMLPrompt(request.body.messages) : '';
const endpointUrl = isTextCompletion && !request.body.use_openrouter ? `${api_url}/completions` : `${api_url}/chat/completions`;
const endpointUrl = isTextCompletion && request.body.chat_completion_source !== 'openrouter' ?
`${api_url}/completions` :
`${api_url}/chat/completions`;
const controller = new AbortController();
request.socket.removeAllListeners('close');