Stop button fix for window.ai. Refactor the generation function

This commit is contained in:
SillyLossy
2023-05-28 02:33:34 +03:00
parent 3897ed3b4e
commit 5a7daedfca

View File

@ -526,40 +526,7 @@ function checkQuotaError(data) {
}
}
async function sendOpenAIRequest(type, openai_msgs_tosend, signal) {
// Provide default abort signal
if (!signal) {
signal = new AbortController().signal;
}
if (oai_settings.reverse_proxy) {
validateReverseProxy();
}
let logit_bias = {};
const stream = type !== 'quiet' && oai_settings.stream_openai;
if (oai_settings.bias_preset_selected
&& Array.isArray(oai_settings.bias_presets[oai_settings.bias_preset_selected])
&& oai_settings.bias_presets[oai_settings.bias_preset_selected].length) {
logit_bias = biasCache || await calculateLogitBias();
biasCache = logit_bias;
}
const generate_data = {
"messages": openai_msgs_tosend,
"model": oai_settings.openai_model,
"temperature": parseFloat(oai_settings.temp_openai),
"frequency_penalty": parseFloat(oai_settings.freq_pen_openai),
"presence_penalty": parseFloat(oai_settings.pres_pen_openai),
"top_p": parseFloat(oai_settings.top_p_openai),
"max_tokens": oai_settings.openai_max_tokens,
"stream": stream,
"reverse_proxy": oai_settings.reverse_proxy,
"logit_bias": logit_bias,
};
if (oai_settings.use_window_ai) {
async function sendWindowAIRequest(openai_msgs_tosend, signal, stream) {
if (!('ai' in window)) {
return showWindowExtensionError();
}
@ -589,14 +556,7 @@ async function sendOpenAIRequest(type, openai_msgs_tosend, signal) {
}
}
const generatePromise = window.ai.generateText(
{
messages: openai_msgs_tosend,
},
{
temperature: parseFloat(oai_settings.temp_openai),
maxTokens: oai_settings.openai_max_tokens,
onStreamResult: (res, err) => {
const onStreamResult = (res, err) => {
if (err) {
handleWindowError(err);
}
@ -609,27 +569,86 @@ async function sendOpenAIRequest(type, openai_msgs_tosend, signal) {
else {
content = thisContent;
}
}
const generatePromise = window.ai.generateText(
{
messages: openai_msgs_tosend,
},
{
temperature: parseFloat(oai_settings.temp_openai),
maxTokens: oai_settings.openai_max_tokens,
onStreamResult: onStreamResult,
}
);
try {
if (stream) {
generatePromise.then((res) => {
const handleGeneratePromise = (resolve, reject) => {
generatePromise
.then((res) => {
content = res[0]?.message?.content;
finished = true;
}).catch(handleWindowError);
resolve && resolve(content);
})
.catch((err) => {
handleWindowError(err);
finished = true;
reject && reject(err);
});
};
if (stream) {
handleGeneratePromise();
return windowStreamingFunction;
} else {
const result = await generatePromise;
content = result[0]?.message?.content;
return content;
return new Promise((resolve, reject) => {
signal.addEventListener('abort', (reason) => {
reject(reason);
});
handleGeneratePromise(resolve, reject);
});
}
} catch (err) {
handleWindowError(err);
}
async function sendOpenAIRequest(type, openai_msgs_tosend, signal) {
// Provide default abort signal
if (!signal) {
signal = new AbortController().signal;
}
if (oai_settings.reverse_proxy) {
validateReverseProxy();
}
let logit_bias = {};
const stream = type !== 'quiet' && oai_settings.stream_openai;
// If we're using the window.ai extension, use that instead
// Doesn't support logit bias yet
if (oai_settings.use_window_ai) {
return sendWindowAIRequest(openai_msgs_tosend, signal, stream);
}
if (oai_settings.bias_preset_selected
&& Array.isArray(oai_settings.bias_presets[oai_settings.bias_preset_selected])
&& oai_settings.bias_presets[oai_settings.bias_preset_selected].length) {
logit_bias = biasCache || await calculateLogitBias();
biasCache = logit_bias;
}
const generate_data = {
"messages": openai_msgs_tosend,
"model": oai_settings.openai_model,
"temperature": parseFloat(oai_settings.temp_openai),
"frequency_penalty": parseFloat(oai_settings.freq_pen_openai),
"presence_penalty": parseFloat(oai_settings.pres_pen_openai),
"top_p": parseFloat(oai_settings.top_p_openai),
"max_tokens": oai_settings.openai_max_tokens,
"stream": stream,
"reverse_proxy": oai_settings.reverse_proxy,
"logit_bias": logit_bias,
};
const generate_url = '/generate_openai';
const response = await fetch(generate_url, {
method: 'POST',
@ -1486,7 +1505,7 @@ $(document).ready(function () {
saveSettingsDebounced();
});
$("#wi_format_textarea").on('input', function (){
$("#wi_format_textarea").on('input', function () {
oai_settings.wi_format = $('#wi_format_textarea').val();
saveSettingsDebounced();
});
@ -1555,7 +1574,7 @@ $(document).ready(function () {
saveSettingsDebounced();
});
$("#wi_format_restore").on('click', function() {
$("#wi_format_restore").on('click', function () {
oai_settings.wi_format = default_wi_format;
$('#wi_format_textarea').val(oai_settings.wi_format);
saveSettingsDebounced();