Add smooth streaming

This commit is contained in:
Cohee 2024-04-02 14:56:15 +03:00
parent c0fffde739
commit 51b3b8bfaa
7 changed files with 192 additions and 8 deletions

View File

@ -3618,6 +3618,15 @@
</div>
</div>
</div>
<label class="checkbox_label" for="smooth_streaming">
<input id="smooth_streaming" type="checkbox" />
<div class="flex-container alignItemsBaseline">
<span data-i18n="Smooth Streaming">
Smooth Streaming
</span>
<i class="fa-solid fa-flask" title="Experimental feature. May not work for all backends."></i>
</div>
</label>
</div>
</div>
</div>

View File

@ -9,7 +9,7 @@ import {
import {
power_user,
} from './power-user.js';
import EventSourceStream from './sse-stream.js';
import { getEventSourceStream } from './sse-stream.js';
import { getSortableDelay } from './utils.js';
export const kai_settings = {
@ -174,7 +174,7 @@ export async function generateKoboldWithStreaming(generate_data, signal) {
tryParseStreamingError(response, await response.text());
throw new Error(`Got response status ${response.status}`);
}
const eventStream = new EventSourceStream();
const eventStream = getEventSourceStream();
response.body.pipeThrough(eventStream);
const reader = eventStream.readable.getReader();

View File

@ -10,7 +10,7 @@ import {
import { getCfgPrompt } from './cfg-scale.js';
import { MAX_CONTEXT_DEFAULT, MAX_RESPONSE_DEFAULT, power_user } from './power-user.js';
import { getTextTokens, tokenizers } from './tokenizers.js';
import EventSourceStream from './sse-stream.js';
import { getEventSourceStream } from './sse-stream.js';
import {
getSortableDelay,
getStringHash,
@ -614,7 +614,7 @@ export async function generateNovelWithStreaming(generate_data, signal) {
tryParseStreamingError(response, await response.text());
throw new Error(`Got response status ${response.status}`);
}
const eventStream = new EventSourceStream();
const eventStream = getEventSourceStream();
response.body.pipeThrough(eventStream);
const reader = eventStream.readable.getReader();

View File

@ -45,7 +45,7 @@ import {
import { getCustomStoppingStrings, persona_description_positions, power_user } from './power-user.js';
import { SECRET_KEYS, secret_state, writeSecret } from './secrets.js';
import EventSourceStream from './sse-stream.js';
import { getEventSourceStream } from './sse-stream.js';
import {
delay,
download,
@ -1772,7 +1772,7 @@ async function sendOpenAIRequest(type, messages, signal) {
throw new Error(`Got response status ${response.status}`);
}
if (stream) {
const eventStream = new EventSourceStream();
const eventStream = getEventSourceStream();
response.body.pipeThrough(eventStream);
const reader = eventStream.readable.getReader();
return async function* streamData() {

View File

@ -118,6 +118,7 @@ let power_user = {
markdown_escape_strings: '',
chat_truncation: 100,
streaming_fps: 30,
smooth_streaming: false,
ui_mode: ui_mode.POWER,
fast_ui_mode: true,
@ -1544,6 +1545,8 @@ function loadPowerUserSettings(settings, data) {
$('#streaming_fps').val(power_user.streaming_fps);
$('#streaming_fps_counter').val(power_user.streaming_fps);
$('#smooth_streaming').prop('checked', power_user.smooth_streaming);
$('#font_scale').val(power_user.font_scale);
$('#font_scale_counter').val(power_user.font_scale);
@ -2941,6 +2944,11 @@ $(document).ready(() => {
saveSettingsDebounced();
});
$('#smooth_streaming').on('input', function () {
power_user.smooth_streaming = !!$(this).prop('checked');
saveSettingsDebounced();
});
$('input[name="font_scale"]').on('input', async function (e) {
power_user.font_scale = Number(e.target.value);
$('#font_scale_counter').val(power_user.font_scale);

View File

@ -1,3 +1,6 @@
import { power_user } from './power-user.js';
import { delay } from './utils.js';
/**
* A stream which handles Server-Sent Events from a binary ReadableStream like you get from the fetch API.
*/
@ -74,4 +77,168 @@ class EventSourceStream {
}
}
/**
* Like the default one, but multiplies the events by the number of letters in the event data.
*/
export class SmoothEventSourceStream extends EventSourceStream {
constructor() {
super();
const defaultDelayMs = 20;
const punctuationDelayMs = 500;
function getDelay(s) {
if (!s) {
return 0;
}
if (s == ',') {
return punctuationDelayMs / 2;
}
if (['.', '!', '?', '\n'].includes(s)) {
return punctuationDelayMs;
}
return defaultDelayMs;
}
let lastStr = '';
const transformStream = new TransformStream({
async transform(chunk, controller) {
const event = chunk;
const data = event.data;
try {
const json = JSON.parse(data);
if (!json) {
controller.enqueue(event);
return;
}
// Claude
if (typeof json.delta === 'object') {
if (typeof json.delta.text === 'string' && json.delta.text.length > 0) {
for (let i = 0; i < json.delta.text.length; i++) {
await delay(getDelay(lastStr));
const str = json.delta.text[i];
controller.enqueue(new MessageEvent(event.type, { data: JSON.stringify({ ...json, delta: { text: str } }) }));
lastStr = str;
}
} else {
controller.enqueue(event);
}
}
// MakerSuite
else if (Array.isArray(json.candidates)) {
for (let i = 0; i < json.candidates.length; i++) {
if (typeof json.candidates[i].content === 'string' && json.candidates[i].content.length > 0) {
for (let j = 0; j < json.candidates[i].content.length; j++) {
await delay(getDelay(lastStr));
const str = json.candidates[i].content[j];
const candidatesClone = structuredClone(json.candidates[i]);
candidatesClone[i].content = str;
controller.enqueue(new MessageEvent(event.type, { data: JSON.stringify({ ...json, candidates: candidatesClone }) }));
lastStr = str;
}
} else {
controller.enqueue(event);
}
}
}
// NovelAI / KoboldCpp Classic
else if (typeof json.token === 'string' && json.token.length > 0) {
for (let i = 0; i < json.token.length; i++) {
await delay(getDelay(lastStr));
const str = json.token[i];
controller.enqueue(new MessageEvent(event.type, { data: JSON.stringify({ ...json, token: str }) }));
lastStr = str;
}
}
// llama.cpp?
else if (typeof json.content === 'string' && json.content.length > 0) {
for (let i = 0; i < json.content.length; i++) {
await delay(getDelay(lastStr));
const str = json.content[i];
controller.enqueue(new MessageEvent(event.type, { data: JSON.stringify({ ...json, content: str }) }));
lastStr = str;
}
}
// OpenAI-likes
else if (Array.isArray(json.choices)) {
const isNotPrimary = json?.choices?.[0]?.index > 0;
if (isNotPrimary || json.choices.length === 0) {
controller.enqueue(event);
return;
}
if (typeof json.choices[0].delta === 'object') {
if (typeof json.choices[0].delta.text === 'string' && json.choices[0].delta.text.length > 0) {
for (let j = 0; j < json.choices[0].delta.text.length; j++) {
await delay(getDelay(lastStr));
const str = json.choices[0].delta.text[j];
const choiceClone = structuredClone(json.choices[0]);
choiceClone.delta.text = str;
const choices = [choiceClone];
controller.enqueue(new MessageEvent(event.type, { data: JSON.stringify({ ...json, choices }) }));
lastStr = str;
}
} else if (typeof json.choices[0].delta.content === 'string' && json.choices[0].delta.content.length > 0) {
for (let j = 0; j < json.choices[0].delta.content.length; j++) {
await delay(getDelay(lastStr));
const str = json.choices[0].delta.content[j];
const choiceClone = structuredClone(json.choices[0]);
choiceClone.delta.content = str;
const choices = [choiceClone];
controller.enqueue(new MessageEvent(event.type, { data: JSON.stringify({ ...json, choices }) }));
lastStr = str;
}
} else {
controller.enqueue(event);
}
}
else if (typeof json.choices[0].message === 'object') {
if (typeof json.choices[0].message.content === 'string' && json.choices[0].message.content.length > 0) {
for (let j = 0; j < json.choices[0].message.content.length; j++) {
await delay(getDelay(lastStr));
const str = json.choices[0].message.content[j];
const choiceClone = structuredClone(json.choices[0]);
choiceClone.message.content = str;
const choices = [choiceClone];
controller.enqueue(new MessageEvent(event.type, { data: JSON.stringify({ ...json, choices }) }));
lastStr = str;
}
} else {
controller.enqueue(event);
}
}
else if (typeof json.choices[0].text === 'string' && json.choices[0].text.length > 0) {
for (let j = 0; j < json.choices[0].text.length; j++) {
await delay(getDelay(lastStr));
const str = json.choices[0].text[j];
const choiceClone = structuredClone(json.choices[0]);
choiceClone.text = str;
const choices = [choiceClone];
controller.enqueue(new MessageEvent(event.type, { data: JSON.stringify({ ...json, choices }) }));
lastStr = str;
}
} else {
controller.enqueue(event);
}
}
} catch {
controller.enqueue(event);
}
},
});
this.readable = this.readable.pipeThrough(transformStream);
}
}
export function getEventSourceStream() {
if (power_user.smooth_streaming) {
return new SmoothEventSourceStream();
}
return new EventSourceStream();
}
export default EventSourceStream;

View File

@ -12,7 +12,7 @@ import {
import { BIAS_CACHE, createNewLogitBiasEntry, displayLogitBias, getLogitBiasListResult } from './logit-bias.js';
import { power_user, registerDebugFunction } from './power-user.js';
import EventSourceStream from './sse-stream.js';
import { getEventSourceStream } from './sse-stream.js';
import { getCurrentDreamGenModelTokenizer, getCurrentOpenRouterModelTokenizer } from './textgen-models.js';
import { SENTENCEPIECE_TOKENIZERS, TEXTGEN_TOKENIZERS, getTextTokens, tokenizers } from './tokenizers.js';
import { getSortableDelay, onlyUnique } from './utils.js';
@ -821,7 +821,7 @@ async function generateTextGenWithStreaming(generate_data, signal) {
throw new Error(`Got response status ${response.status}`);
}
const eventStream = new EventSourceStream();
const eventStream = getEventSourceStream();
response.body.pipeThrough(eventStream);
const reader = eventStream.readable.getReader();