2024-04-02 19:25:37 +02:00
|
|
|
import { eventSource, event_types } from '../script.js';
|
2024-04-02 13:56:15 +02:00
|
|
|
import { power_user } from './power-user.js';
|
|
|
|
import { delay } from './utils.js';
|
|
|
|
|
2023-12-07 04:55:17 +01:00
|
|
|
/**
|
|
|
|
* A stream which handles Server-Sent Events from a binary ReadableStream like you get from the fetch API.
|
|
|
|
*/
|
|
|
|
class EventSourceStream {
|
|
|
|
constructor() {
|
2023-12-08 05:15:42 +01:00
|
|
|
const decoder = new TextDecoderStream('utf-8');
|
2023-12-07 04:55:17 +01:00
|
|
|
|
|
|
|
let streamBuffer = '';
|
|
|
|
let lastEventId = '';
|
|
|
|
|
2023-12-08 21:04:40 +01:00
|
|
|
function processChunk(controller) {
|
|
|
|
// Events are separated by two newlines
|
|
|
|
const events = streamBuffer.split(/\r\n\r\n|\r\r|\n\n/g);
|
|
|
|
if (events.length === 0) return;
|
|
|
|
|
|
|
|
// The leftover text to remain in the buffer is whatever doesn't have two newlines after it. If the buffer ended
|
|
|
|
// with two newlines, this will be an empty string.
|
|
|
|
streamBuffer = events.pop();
|
|
|
|
|
|
|
|
for (const eventChunk of events) {
|
2023-12-10 10:43:12 +01:00
|
|
|
let eventType = '';
|
2023-12-08 21:04:40 +01:00
|
|
|
// Split up by single newlines.
|
|
|
|
const lines = eventChunk.split(/\n|\r|\r\n/g);
|
|
|
|
let eventData = '';
|
|
|
|
for (const line of lines) {
|
|
|
|
const lineMatch = /([^:]+)(?:: ?(.*))?/.exec(line);
|
|
|
|
if (lineMatch) {
|
|
|
|
const field = lineMatch[1];
|
|
|
|
const value = lineMatch[2] || '';
|
2023-12-07 04:55:17 +01:00
|
|
|
|
2023-12-08 21:04:40 +01:00
|
|
|
switch (field) {
|
|
|
|
case 'event':
|
|
|
|
eventType = value;
|
|
|
|
break;
|
|
|
|
case 'data':
|
|
|
|
eventData += value;
|
|
|
|
eventData += '\n';
|
|
|
|
break;
|
|
|
|
case 'id':
|
|
|
|
// The ID field cannot contain null, per the spec
|
|
|
|
if (!value.includes('\0')) lastEventId = value;
|
|
|
|
break;
|
|
|
|
// We do nothing for the `delay` type, and other types are explicitly ignored
|
|
|
|
}
|
2023-12-07 04:55:17 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2023-12-08 21:04:40 +01:00
|
|
|
// https://html.spec.whatwg.org/multipage/server-sent-events.html#dispatchMessage
|
|
|
|
// Skip the event if the data buffer is the empty string.
|
|
|
|
if (eventData === '') continue;
|
|
|
|
|
|
|
|
if (eventData[eventData.length - 1] === '\n') {
|
|
|
|
eventData = eventData.slice(0, -1);
|
2023-12-07 04:55:17 +01:00
|
|
|
}
|
2023-12-08 21:04:40 +01:00
|
|
|
|
|
|
|
// Trim the *last* trailing newline only.
|
2023-12-10 10:43:12 +01:00
|
|
|
const event = new MessageEvent(eventType || 'message', { data: eventData, lastEventId });
|
2023-12-08 21:04:40 +01:00
|
|
|
controller.enqueue(event);
|
2023-12-07 04:55:17 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
const sseStream = new TransformStream({
|
|
|
|
transform(chunk, controller) {
|
|
|
|
streamBuffer += chunk;
|
2023-12-08 21:04:40 +01:00
|
|
|
processChunk(controller);
|
2023-12-07 04:55:17 +01:00
|
|
|
},
|
|
|
|
});
|
|
|
|
|
|
|
|
decoder.readable.pipeThrough(sseStream);
|
|
|
|
|
|
|
|
this.readable = sseStream.readable;
|
|
|
|
this.writable = decoder.writable;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2024-04-02 13:56:15 +02:00
|
|
|
/**
|
2024-04-02 14:25:23 +02:00
|
|
|
* Gets a delay based on the character.
|
|
|
|
* @param {string} s The character.
|
|
|
|
* @returns {number} The delay in milliseconds.
|
2024-04-02 13:56:15 +02:00
|
|
|
*/
|
2024-04-02 14:25:23 +02:00
|
|
|
function getDelay(s) {
|
|
|
|
if (!s) {
|
|
|
|
return 0;
|
|
|
|
}
|
2024-04-02 13:56:15 +02:00
|
|
|
|
2024-04-02 21:52:51 +02:00
|
|
|
const speedFactor = Math.max(100 - power_user.smooth_streaming_speed, 1);
|
|
|
|
const defaultDelayMs = speedFactor * 0.4;
|
|
|
|
const punctuationDelayMs = defaultDelayMs * 25;
|
|
|
|
|
2024-04-02 14:51:00 +02:00
|
|
|
if ([',', '\n'].includes(s)) {
|
2024-04-02 14:25:23 +02:00
|
|
|
return punctuationDelayMs / 2;
|
|
|
|
}
|
2024-04-02 13:56:15 +02:00
|
|
|
|
2024-04-02 14:51:00 +02:00
|
|
|
if (['.', '!', '?'].includes(s)) {
|
2024-04-02 14:25:23 +02:00
|
|
|
return punctuationDelayMs;
|
|
|
|
}
|
2024-04-02 13:56:15 +02:00
|
|
|
|
2024-04-02 14:25:23 +02:00
|
|
|
return defaultDelayMs;
|
|
|
|
}
|
|
|
|
|
|
|
|
/**
|
|
|
|
* Parses the stream data and returns the parsed data and the chunk to be sent.
|
|
|
|
* @param {object} json The JSON data.
|
2024-04-05 00:25:48 +02:00
|
|
|
* @returns {AsyncGenerator<{data: object, chunk: string}>} The parsed data and the chunk to be sent.
|
2024-04-02 14:25:23 +02:00
|
|
|
*/
|
|
|
|
async function* parseStreamData(json) {
|
|
|
|
// Claude
|
|
|
|
if (typeof json.delta === 'object') {
|
|
|
|
if (typeof json.delta.text === 'string' && json.delta.text.length > 0) {
|
|
|
|
for (let i = 0; i < json.delta.text.length; i++) {
|
|
|
|
const str = json.delta.text[i];
|
|
|
|
yield {
|
|
|
|
data: { ...json, delta: { text: str } },
|
|
|
|
chunk: str,
|
|
|
|
};
|
|
|
|
}
|
|
|
|
}
|
2024-04-05 00:25:48 +02:00
|
|
|
return;
|
2024-04-02 14:25:23 +02:00
|
|
|
}
|
|
|
|
// MakerSuite
|
|
|
|
else if (Array.isArray(json.candidates)) {
|
|
|
|
for (let i = 0; i < json.candidates.length; i++) {
|
2024-04-02 15:13:01 +02:00
|
|
|
const isNotPrimary = json.candidates?.[0]?.index > 0;
|
|
|
|
if (isNotPrimary || json.candidates.length === 0) {
|
|
|
|
return null;
|
|
|
|
}
|
|
|
|
if (typeof json.candidates[0].content === 'object' && Array.isArray(json.candidates[i].content.parts)) {
|
|
|
|
for (let j = 0; j < json.candidates[i].content.parts.length; j++) {
|
|
|
|
if (typeof json.candidates[i].content.parts[j].text === 'string') {
|
|
|
|
for (let k = 0; k < json.candidates[i].content.parts[j].text.length; k++) {
|
|
|
|
const str = json.candidates[i].content.parts[j].text[k];
|
|
|
|
const candidateClone = structuredClone(json.candidates[0]);
|
|
|
|
candidateClone.content.parts[j].text = str;
|
|
|
|
const candidates = [candidateClone];
|
|
|
|
yield {
|
|
|
|
data: { ...json, candidates },
|
|
|
|
chunk: str,
|
|
|
|
};
|
|
|
|
}
|
|
|
|
}
|
2024-04-02 14:25:23 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2024-04-05 00:25:48 +02:00
|
|
|
return;
|
2024-04-02 14:25:23 +02:00
|
|
|
}
|
|
|
|
// NovelAI / KoboldCpp Classic
|
|
|
|
else if (typeof json.token === 'string' && json.token.length > 0) {
|
|
|
|
for (let i = 0; i < json.token.length; i++) {
|
|
|
|
const str = json.token[i];
|
|
|
|
yield {
|
|
|
|
data: { ...json, token: str },
|
|
|
|
chunk: str,
|
|
|
|
};
|
2024-04-02 13:56:15 +02:00
|
|
|
}
|
2024-04-05 00:25:48 +02:00
|
|
|
return;
|
2024-04-02 14:25:23 +02:00
|
|
|
}
|
|
|
|
// llama.cpp?
|
|
|
|
else if (typeof json.content === 'string' && json.content.length > 0) {
|
|
|
|
for (let i = 0; i < json.content.length; i++) {
|
|
|
|
const str = json.content[i];
|
|
|
|
yield {
|
|
|
|
data: { ...json, content: str },
|
|
|
|
chunk: str,
|
|
|
|
};
|
|
|
|
}
|
2024-04-05 00:25:48 +02:00
|
|
|
return;
|
2024-04-02 14:25:23 +02:00
|
|
|
}
|
|
|
|
// OpenAI-likes
|
|
|
|
else if (Array.isArray(json.choices)) {
|
|
|
|
const isNotPrimary = json?.choices?.[0]?.index > 0;
|
|
|
|
if (isNotPrimary || json.choices.length === 0) {
|
|
|
|
return null;
|
|
|
|
}
|
2024-04-02 15:38:39 +02:00
|
|
|
|
|
|
|
if (typeof json.choices[0].text === 'string' && json.choices[0].text.length > 0) {
|
|
|
|
for (let j = 0; j < json.choices[0].text.length; j++) {
|
|
|
|
const str = json.choices[0].text[j];
|
|
|
|
const choiceClone = structuredClone(json.choices[0]);
|
|
|
|
choiceClone.text = str;
|
|
|
|
const choices = [choiceClone];
|
|
|
|
yield {
|
|
|
|
data: { ...json, choices },
|
|
|
|
chunk: str,
|
|
|
|
};
|
|
|
|
}
|
2024-04-05 00:25:48 +02:00
|
|
|
return;
|
2024-04-02 15:38:39 +02:00
|
|
|
}
|
|
|
|
else if (typeof json.choices[0].delta === 'object') {
|
2024-04-02 14:25:23 +02:00
|
|
|
if (typeof json.choices[0].delta.text === 'string' && json.choices[0].delta.text.length > 0) {
|
|
|
|
for (let j = 0; j < json.choices[0].delta.text.length; j++) {
|
|
|
|
const str = json.choices[0].delta.text[j];
|
|
|
|
const choiceClone = structuredClone(json.choices[0]);
|
|
|
|
choiceClone.delta.text = str;
|
|
|
|
const choices = [choiceClone];
|
|
|
|
yield {
|
|
|
|
data: { ...json, choices },
|
|
|
|
chunk: str,
|
|
|
|
};
|
|
|
|
}
|
2024-04-05 00:25:48 +02:00
|
|
|
return;
|
2024-04-02 15:38:39 +02:00
|
|
|
}
|
|
|
|
else if (typeof json.choices[0].delta.content === 'string' && json.choices[0].delta.content.length > 0) {
|
2024-04-02 14:25:23 +02:00
|
|
|
for (let j = 0; j < json.choices[0].delta.content.length; j++) {
|
|
|
|
const str = json.choices[0].delta.content[j];
|
|
|
|
const choiceClone = structuredClone(json.choices[0]);
|
|
|
|
choiceClone.delta.content = str;
|
|
|
|
const choices = [choiceClone];
|
|
|
|
yield {
|
|
|
|
data: { ...json, choices },
|
|
|
|
chunk: str,
|
|
|
|
};
|
|
|
|
}
|
2024-04-05 00:25:48 +02:00
|
|
|
return;
|
2024-04-02 14:25:23 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
else if (typeof json.choices[0].message === 'object') {
|
|
|
|
if (typeof json.choices[0].message.content === 'string' && json.choices[0].message.content.length > 0) {
|
|
|
|
for (let j = 0; j < json.choices[0].message.content.length; j++) {
|
|
|
|
const str = json.choices[0].message.content[j];
|
|
|
|
const choiceClone = structuredClone(json.choices[0]);
|
|
|
|
choiceClone.message.content = str;
|
|
|
|
const choices = [choiceClone];
|
|
|
|
yield {
|
|
|
|
data: { ...json, choices },
|
|
|
|
chunk: str,
|
|
|
|
};
|
|
|
|
}
|
2024-04-05 00:25:48 +02:00
|
|
|
return;
|
2024-04-02 14:25:23 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2024-04-05 00:25:48 +02:00
|
|
|
throw new Error('Unknown event data format');
|
2024-04-02 14:25:23 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
/**
|
|
|
|
* Like the default one, but multiplies the events by the number of letters in the event data.
|
|
|
|
*/
|
|
|
|
export class SmoothEventSourceStream extends EventSourceStream {
|
|
|
|
constructor() {
|
|
|
|
super();
|
2024-04-02 13:56:15 +02:00
|
|
|
let lastStr = '';
|
|
|
|
const transformStream = new TransformStream({
|
|
|
|
async transform(chunk, controller) {
|
|
|
|
const event = chunk;
|
|
|
|
const data = event.data;
|
|
|
|
try {
|
2024-04-02 15:21:55 +02:00
|
|
|
const hasFocus = document.hasFocus();
|
2024-04-04 20:20:10 +02:00
|
|
|
|
|
|
|
if (data === '[DONE]') {
|
|
|
|
lastStr = '';
|
|
|
|
return controller.enqueue(event);
|
|
|
|
}
|
|
|
|
|
2024-04-02 13:56:15 +02:00
|
|
|
const json = JSON.parse(data);
|
|
|
|
|
|
|
|
if (!json) {
|
2024-04-02 14:25:23 +02:00
|
|
|
lastStr = '';
|
|
|
|
return controller.enqueue(event);
|
2024-04-02 13:56:15 +02:00
|
|
|
}
|
|
|
|
|
2024-04-02 14:25:23 +02:00
|
|
|
for await (const parsed of parseStreamData(json)) {
|
2024-04-02 15:21:55 +02:00
|
|
|
hasFocus && await delay(getDelay(lastStr));
|
2024-04-02 14:25:23 +02:00
|
|
|
controller.enqueue(new MessageEvent(event.type, { data: JSON.stringify(parsed.data) }));
|
|
|
|
lastStr = parsed.chunk;
|
2024-04-02 19:25:37 +02:00
|
|
|
hasFocus && await eventSource.emit(event_types.SMOOTH_STREAM_TOKEN_RECEIVED, parsed.chunk);
|
2024-04-02 13:56:15 +02:00
|
|
|
}
|
2024-04-04 20:20:10 +02:00
|
|
|
} catch (error) {
|
|
|
|
console.error('Smooth Streaming parsing error', error);
|
2024-04-02 13:56:15 +02:00
|
|
|
controller.enqueue(event);
|
|
|
|
}
|
|
|
|
},
|
|
|
|
});
|
|
|
|
|
|
|
|
this.readable = this.readable.pipeThrough(transformStream);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
export function getEventSourceStream() {
|
|
|
|
if (power_user.smooth_streaming) {
|
|
|
|
return new SmoothEventSourceStream();
|
|
|
|
}
|
|
|
|
|
|
|
|
return new EventSourceStream();
|
|
|
|
}
|
|
|
|
|
2023-12-07 04:55:17 +01:00
|
|
|
export default EventSourceStream;
|