Fix window.ai streaming

This commit is contained in:
SillyLossy
2023-05-27 19:50:08 +03:00
parent 386ba29399
commit 0ab097711b
2 changed files with 63 additions and 28 deletions

View File

@ -3813,16 +3813,6 @@ async function getSettings(type) {
}
}
//Load which API we are using
if (settings.main_api != undefined) {
main_api = settings.main_api;
$("#main_api option[value=" + main_api + "]").attr(
"selected",
"true"
);
changeMainAPI();
}
//Load KoboldAI settings
koboldai_setting_names = data.koboldai_setting_names;
koboldai_settings = data.koboldai_settings;
@ -3911,7 +3901,7 @@ async function getSettings(type) {
// Load power user settings
loadPowerUserSettings(settings, data);
// Load- character tags
// Load character tags
loadTagsSettings(settings);
// Load context templates
@ -3924,8 +3914,14 @@ async function getSettings(type) {
$("#amount_gen").val(amount_gen);
$("#amount_gen_counter").text(`${amount_gen}`);
//Enable GUI deference settings if GUI is selected for Kobold
if (main_api === "kobold") {
//Load which API we are using
if (settings.main_api != undefined) {
main_api = settings.main_api;
$("#main_api option[value=" + main_api + "]").attr(
"selected",
"true"
);
changeMainAPI();
}
//Load User's Name and Avatar

View File

@ -557,8 +557,28 @@ async function sendOpenAIRequest(type, openai_msgs_tosend, signal) {
return showWindowExtensionError();
}
async function* windowStreamingFunction(res) {
yield (res?.message?.content || '');
let content = '';
let lastContent = '';
let finished = false;
async function* windowStreamingFunction() {
while (true) {
if (signal.aborted) {
return;
}
await delay(1);
if (lastContent !== content) {
yield content;
}
lastContent = content;
if (finished) {
return;
}
}
}
const generatePromise = window.ai.generateText(
@ -568,22 +588,34 @@ async function sendOpenAIRequest(type, openai_msgs_tosend, signal) {
{
temperature: parseFloat(oai_settings.temp_openai),
maxTokens: oai_settings.openai_max_tokens,
onStreamResult: windowStreamingFunction,
onStreamResult: (res, err) => {
if (err) {
handleWindowError(err);
}
const thisContent = res?.message?.content;
if (res?.isPartial) {
content += thisContent;
}
else {
content = thisContent;
}
},
}
);
if (stream) {
return windowStreamingFunction;
}
try {
const [{ message }] = await generatePromise;
windowStreamingFunction(message);
return message?.content;
if (stream) {
generatePromise.then(() => { finished = true; }).catch(handleWindowError);
return windowStreamingFunction;
} else {
const result = await generatePromise;
content = result[0]?.message?.content;
return content;
}
} catch (err) {
const text = parseWindowError(err);
toastr.error(text, 'Window.ai returned an error');
throw err;
handleWindowError(err);
}
}
@ -651,6 +683,12 @@ async function sendOpenAIRequest(type, openai_msgs_tosend, signal) {
}
}
function handleWindowError(err) {
const text = parseWindowError(err);
toastr.error(text, 'Window.ai returned an error');
throw err;
}
function parseWindowError(err) {
let text = 'Unknown error';
@ -812,6 +850,7 @@ function loadOpenAISettings(data, settings) {
oai_settings.bias_preset_selected = settings.bias_preset_selected ?? default_settings.bias_preset_selected;
oai_settings.bias_presets = settings.bias_presets ?? default_settings.bias_presets;
oai_settings.legacy_streaming = settings.legacy_streaming ?? default_settings.legacy_streaming;
oai_settings.use_window_ai = settings.use_window_ai ?? default_settings.use_window_ai;
if (settings.nsfw_toggle !== undefined) oai_settings.nsfw_toggle = !!settings.nsfw_toggle;
if (settings.keep_example_dialogue !== undefined) oai_settings.keep_example_dialogue = !!settings.keep_example_dialogue;