OAI reverse proxy (untested)

This commit is contained in:
SillyLossy
2023-04-10 18:44:11 +03:00
parent defa7f357e
commit f2d7f74d5a
3 changed files with 57 additions and 4 deletions

View File

@ -245,6 +245,18 @@
</div> </div>
</div> </div>
<div id="range_block_openai"> <div id="range_block_openai">
<div class="range-block">
<div class="range-block-title">
OpenAI Reverse Proxy
</div>
<div class="range-block-counter">
Alternative server URL (leave empty to use the default value).<br>
<b class="failure">Don't expose your real API keys to reverse proxies!</b>
</div>
<div class="range-block-range">
<input id="openai_reverse_proxy" type="text" class="text_pole" placeholder="https://api.openai.com/v1" />
</div>
</div>
<div class="range-block"> <div class="range-block">
<div class="range-block-title"> <div class="range-block-title">
OpenAI Context Size OpenAI Context Size

View File

@ -78,6 +78,7 @@ const default_settings = {
jailbreak_prompt: default_jailbreak_prompt, jailbreak_prompt: default_jailbreak_prompt,
openai_model: 'gpt-3.5-turbo-0301', openai_model: 'gpt-3.5-turbo-0301',
jailbreak_system: false, jailbreak_system: false,
reverse_proxy: '',
}; };
const oai_settings = { const oai_settings = {
@ -98,11 +99,28 @@ const oai_settings = {
jailbreak_prompt: default_jailbreak_prompt, jailbreak_prompt: default_jailbreak_prompt,
openai_model: 'gpt-3.5-turbo-0301', openai_model: 'gpt-3.5-turbo-0301',
jailbreak_system: false, jailbreak_system: false,
reverse_proxy: '',
}; };
let openai_setting_names; let openai_setting_names;
let openai_settings; let openai_settings;
function validateReverseProxy() {
if (!oai_settings.reverse_proxy) {
return;
}
try {
new URL(oai_settings.reverse_proxy);
}
catch (err) {
callPopup('Entered reverse proxy address is not a valid URL', 'text');
setOnlineStatus('no_connection');
resultCheckStatusOpen();
throw err;
}
}
function setOpenAIOnlineStatus(value) { function setOpenAIOnlineStatus(value) {
is_get_status_openai = value; is_get_status_openai = value;
} }
@ -396,6 +414,10 @@ async function prepareOpenAIMessages(name2, storyString, worldInfoBefore, worldI
} }
async function sendOpenAIRequest(openai_msgs_tosend) { async function sendOpenAIRequest(openai_msgs_tosend) {
if (oai_settings.reverse_proxy) {
validateReverseProxy();
}
const generate_data = { const generate_data = {
"messages": openai_msgs_tosend, "messages": openai_msgs_tosend,
"model": oai_settings.openai_model, "model": oai_settings.openai_model,
@ -404,6 +426,7 @@ async function sendOpenAIRequest(openai_msgs_tosend) {
"presence_penalty": parseFloat(oai_settings.pres_pen_openai), "presence_penalty": parseFloat(oai_settings.pres_pen_openai),
"max_tokens": oai_settings.openai_max_tokens, "max_tokens": oai_settings.openai_max_tokens,
"stream": false, //oai_settings.stream_openai, "stream": false, //oai_settings.stream_openai,
"reverse_proxy": oai_settings.reverse_proxy,
}; };
const generate_url = '/generate_openai'; const generate_url = '/generate_openai';
@ -589,17 +612,28 @@ function loadOpenAISettings(data, settings) {
$('#pres_pen_openai').val(oai_settings.pres_pen_openai); $('#pres_pen_openai').val(oai_settings.pres_pen_openai);
$('#pres_pen_counter_openai').text(Number(oai_settings.pres_pen_openai).toFixed(2)); $('#pres_pen_counter_openai').text(Number(oai_settings.pres_pen_openai).toFixed(2));
if (settings.reverse_proxy !== undefined) oai_settings.reverse_proxy = settings.reverse_proxy;
$('#openai_reverse_proxy').val(oai_settings.reverse_proxy);
} }
async function getStatusOpen() { async function getStatusOpen() {
if (is_get_status_openai) { if (is_get_status_openai) {
let data = { key: oai_settings.api_key_openai };
let data = {
key: oai_settings.api_key_openai,
reverse_proxy: oai_settings.reverse_proxy,
};
jQuery.ajax({ jQuery.ajax({
type: 'POST', // type: 'POST', //
url: '/getstatus_openai', // url: '/getstatus_openai', //
data: JSON.stringify(data), data: JSON.stringify(data),
beforeSend: function () { }, beforeSend: function () {
if (oai_settings.reverse_proxy) {
validateReverseProxy();
}
},
cache: false, cache: false,
dataType: "json", dataType: "json",
contentType: "application/json", contentType: "application/json",
@ -902,4 +936,9 @@ $(document).ready(function () {
$('#jailbreak_prompt_textarea').val(oai_settings.jailbreak_prompt); $('#jailbreak_prompt_textarea').val(oai_settings.jailbreak_prompt);
saveSettingsDebounced(); saveSettingsDebounced();
}); });
$("#openai_reverse_proxy").on('input', function () {
oai_settings.reverse_proxy = $(this).val();
saveSettingsDebounced();
});
}); });

View File

@ -1923,10 +1923,11 @@ app.get('/thumbnail', jsonParser, async function (request, response) {
app.post("/getstatus_openai", jsonParser, function (request, response_getstatus_openai = response) { app.post("/getstatus_openai", jsonParser, function (request, response_getstatus_openai = response) {
if (!request.body) return response_getstatus_openai.sendStatus(400); if (!request.body) return response_getstatus_openai.sendStatus(400);
api_key_openai = request.body.key; api_key_openai = request.body.key;
const api_url = new URL(request.body.reverse_proxy || api_openai).toString();
const args = { const args = {
headers: { "Authorization": "Bearer " + api_key_openai } headers: { "Authorization": "Bearer " + api_key_openai }
}; };
client.get(api_openai + "/models", args, function (data, response) { client.get(api_url + "/models", args, function (data, response) {
if (response.statusCode == 200) { if (response.statusCode == 200) {
console.log(data); console.log(data);
response_getstatus_openai.send(data);//data); response_getstatus_openai.send(data);//data);
@ -1946,11 +1947,12 @@ app.post("/getstatus_openai", jsonParser, function (request, response_getstatus_
app.post("/generate_openai", jsonParser, function (request, response_generate_openai) { app.post("/generate_openai", jsonParser, function (request, response_generate_openai) {
if (!request.body) return response_generate_openai.sendStatus(400); if (!request.body) return response_generate_openai.sendStatus(400);
const api_url = new URL(request.body.reverse_proxy || api_openai).toString();
console.log(request.body); console.log(request.body);
const config = { const config = {
method: 'post', method: 'post',
url: api_openai + '/chat/completions', url: api_url + '/chat/completions',
headers: { headers: {
'Content-Type': 'application/json', 'Content-Type': 'application/json',
'Authorization': 'Bearer ' + api_key_openai 'Authorization': 'Bearer ' + api_key_openai