OAI streaming

This commit is contained in:
SillyLossy
2023-04-12 01:33:33 +03:00
parent a8c31c723b
commit 2040b8d3ff
3 changed files with 74 additions and 46 deletions

View File

@ -607,8 +607,7 @@
</label>
</div>
<!-- Currently broken -->
<div style="display: none" class="range-block">
<div class="range-block">
<label title="Enables OpenAI completion streaming" class="checkbox_label" for="stream_toggle">
<input id="stream_toggle" type="checkbox" />
Streaming

View File

@ -960,12 +960,15 @@ function addOneMessage(mes, type = "normal", insertAfter = null) {
hideSwipeButtons();
showSwipeButtons();
var $textchat = $("#chat");
$textchat.scrollTop(($textchat[0].scrollHeight));
scrollChatToBottom();
}
}
function scrollChatToBottom() {
var $textchat = $("#chat");
$textchat.scrollTop(($textchat[0].scrollHeight));
}
function substituteParams(content, _name1, _name2) {
_name1 = _name1 ?? name1;
_name2 = _name2 ?? name2;
@ -1116,19 +1119,28 @@ function isStreamingEnabled() {
class StreamingProcessor {
onStartStreaming(text) {
saveReply(type, text);
saveReply(this.type, text);
hideSwipeButtons();
return (count_view_mes - 1);
}
onProgressStreaming(messageId, text) {
let processedText = cleanUpMessage(text);
({isName, processedText} = extractNameFromMessage(processedText, force_name2));
let result = extractNameFromMessage(processedText, this.force_name2);
let isName = result.this_mes_is_name;
processedText = result.getMessage;
chat[messageId]['is_name'] = isName;
chat[messageId]['mes'] = processedText;
if (this.type == 'swipe' && Array.isArray(chat[messageId]['swipes'])) {
chat[messageId]['swipes'][chat[messageId]['swipe_id']] = processedText;
}
let formattedText = messageFormating(processedText, chat[messageId].name, chat[messageId].is_system, chat[messageId].force_avatar);
const mesText = $(`#chat .mes[mesid="${messageId}"] .mes_text`);
mesText.empty();
mesText.append(formattedText);
scrollChatToBottom();
}
onFinishStreaming(messageId, text) {
@ -1146,6 +1158,7 @@ class StreamingProcessor {
is_send_press = false;
activateSendButtons();
setGenerationProgress(0);
showSwipeButtons();
}
onStopStreaming() {
@ -1156,16 +1169,18 @@ class StreamingProcessor {
throw new Error('Generation function for streaming is not hooked up');
}
constructor() {
constructor(type, force_name2) {
this.result = "";
this.messageId = -1;
this.type = type;
this.force_name2 = force_name2;
this.isStopped = false;
this.isFinished = false;
this.generator = this.nullStreamingGeneration;
}
async generate() {
this.messageId = this.onStartStreaming('');
this.messageId = this.onStartStreaming('...');
for await (const text of this.generator()) {
if (this.isStopped) {
@ -1209,6 +1224,14 @@ async function Generate(type, automatic_trigger, force_name2) {
return;
}
if (isStreamingEnabled()) {
streamingProcessor = new StreamingProcessor(type, force_name2);
hideSwipeButtons();
}
else {
streamingProcessor = false;
}
if (selected_group && !is_group_generating) {
generateGroupWrapper(false, type = type);
return;
@ -1783,14 +1806,13 @@ async function Generate(type, automatic_trigger, force_name2) {
}
console.log('rungenerate calling API');
streamingProcessor = new StreamingProcessor();
if (main_api == 'openai') {
let prompt = await prepareOpenAIMessages(name2, storyString, worldInfoBefore, worldInfoAfter, extension_prompt, promptBias);
if (isStreamingEnabled()) {
streamingProcessor.generator = () => sendOpenAIRequest(prompt);
streamingProcessor.generator = await sendOpenAIRequest(prompt);
await streamingProcessor.generate();
streamingProcessor = null;
}
else {
sendOpenAIRequest(prompt).then(onSuccess).catch(onError);
@ -3389,6 +3411,10 @@ $(document).ready(function () {
closeMessageEditor();
}
if (isStreamingEnabled() && streamingProcessor) {
streamingProcessor.isStopped = true;
}
const swipe_duration = 120;
const swipe_range = '700px';
chat[chat.length - 1]['swipe_id']--;

View File

@ -425,15 +425,11 @@ async function sendOpenAIRequest(openai_msgs_tosend) {
"frequency_penalty": parseFloat(oai_settings.freq_pen_openai),
"presence_penalty": parseFloat(oai_settings.pres_pen_openai),
"max_tokens": oai_settings.openai_max_tokens,
"stream": false, //oai_settings.stream_openai,
"stream": oai_settings.stream_openai,
"reverse_proxy": oai_settings.reverse_proxy,
};
const generate_url = '/generate_openai';
// TODO: fix streaming
const streaming = oai_settings.stream_openai;
const last_view_mes = count_view_mes;
const response = await fetch(generate_url, {
method: 'POST',
body: JSON.stringify(generate_data),
@ -443,40 +439,47 @@ async function sendOpenAIRequest(openai_msgs_tosend) {
}
});
const data = await response.json();
if (oai_settings.stream_openai) {
return async function* streamData() {
const decoder = new TextDecoder();
const reader = response.body.getReader();
let getMessage = "";
while (true) {
const { done, value } = await reader.read();
let response = decoder.decode(value);
if (data.error) {
throw new Error(data);
}
if (response == "{\"error\":true}") {
throw new Error('error during streaming');
}
return data.choices[0]["message"]["content"];
}
let eventList = response.split("\n");
// Unused
async function* onStream(e) {
if (!oai_settings.stream_openai) {
return;
}
for (let event of eventList) {
if (!event.startsWith("data"))
continue;
if (event == "data: [DONE]") {
return;
}
let data = JSON.parse(event.substring(6));
// the first and last messages are undefined, protect against that
getMessage += data.choices[0]["delta"]["content"] || "";
yield getMessage;
}
let response = e.currentTarget.response;
if (response == "{\"error\":true}") {
throw new Error('error during streaming');
}
let eventList = response.split("\n");
let getMessage = "";
for (let event of eventList) {
if (!event.startsWith("data"))
continue;
if (event == "data: [DONE]") {
return getMessage;
if (done) {
return;
}
}
}
let data = JSON.parse(event.substring(6));
// the first and last messages are undefined, protect against that
getMessage += data.choices[0]["delta"]["content"] || "";
yield getMessage;
}
else {
const data = await response.json();
if (data.error) {
throw new Error(data);
}
return data.choices[0]["message"]["content"];
}
}