Add NovelAI token streaming

This commit is contained in:
Cohee
2023-06-09 02:09:00 +03:00
parent a95e321099
commit 10bbc97069
4 changed files with 113 additions and 20 deletions

View File

@ -54,8 +54,11 @@
//find all the elements with `data-i18n` attribute
$("[data-i18n]").each(function () {
//read the translation from the language data
var key = $(this).data("i18n");
$(this).text(data[language][key]);
const key = $(this).data("i18n");
const text = data?.language?.key;
if (text) {
$(this).text(text);
}
});
});
});
@ -259,6 +262,19 @@
</div>
</div>
<div id="range_block_novel">
<div class="range-block">
<label class="checkbox_label widthFreeExpand">
<input id="streaming_novel" type="checkbox" />
<span data-i18n="Streaming">Streaming</span>
</label>
<div class="toggle-description justifyLeft">
<span data-i18n="Display the response bit by bit as it is generated.">
Display the response bit by bit as it is generated.</span><br>
<span data-i18n="When this is off, responses will be displayed all at once when they are complete.">
When this is off, responses will be displayed all at once when they are complete.
</span>
</div>
</div>
<div class="range-block-title" data-i18n="temperature">
Temperature
</div>

View File

@ -85,6 +85,7 @@ import {
} from "./scripts/openai.js";
import {
generateNovelWithStreaming,
getNovelGenerationData,
getNovelTier,
loadNovelPreset,
@ -1565,6 +1566,7 @@ function appendToStoryString(value, prefix) {
function isStreamingEnabled() {
return ((main_api == 'openai' && oai_settings.stream_openai)
|| (main_api == 'novel' && nai_settings.streaming_novel)
|| (main_api == 'poe' && poe_settings.streaming)
|| (main_api == 'textgenerationwebui' && textgenerationwebui_settings.streaming))
&& !isMultigenEnabled(); // Multigen has a quasi-streaming mode which breaks the real streaming
@ -2337,6 +2339,9 @@ async function Generate(type, { automatic_trigger, force_name2, resolve, reject,
else if (main_api == 'textgenerationwebui' && isStreamingEnabled() && type !== 'quiet') {
streamingProcessor.generator = await generateTextGenWithStreaming(generate_data, streamingProcessor.abortController.signal);
}
else if (main_api == 'novel' && isStreamingEnabled() && type !== 'quiet') {
streamingProcessor.generator = await generateNovelWithStreaming(generate_data, streamingProcessor.abortController.signal);
}
else {
try {
const response = await fetch(generate_url, {

View File

@ -1,4 +1,5 @@
import {
getRequestHeaders,
saveSettingsDebounced,
} from "../script.js";
@ -19,6 +20,7 @@ const nai_settings = {
tail_free_sampling_novel: 0.68,
model_novel: "euterpe-v2",
preset_settings_novel: "Classic-Euterpe",
streaming_novel: false,
};
const nai_tiers = {
@ -65,6 +67,7 @@ function loadNovelSettings(settings) {
nai_settings.rep_pen_freq_novel = settings.rep_pen_freq_novel;
nai_settings.rep_pen_presence_novel = settings.rep_pen_presence_novel;
nai_settings.tail_free_sampling_novel = settings.tail_free_sampling_novel;
nai_settings.streaming_novel = !!settings.streaming_novel;
loadNovelSettingsUi(nai_settings);
}
@ -83,6 +86,7 @@ function loadNovelSettingsUi(ui_settings) {
$("#rep_pen_presence_counter_novel").text(Number(ui_settings.rep_pen_presence_novel).toFixed(3));
$("#tail_free_sampling_novel").val(ui_settings.tail_free_sampling_novel);
$("#tail_free_sampling_counter_novel").text(Number(ui_settings.tail_free_sampling_novel).toFixed(3));
$("#streaming_novel").prop('checked', ui_settings.streaming_novel);
}
const sliders = [
@ -155,10 +159,53 @@ export function getNovelGenerationData(finalPromt, this_settings, this_amount_ge
//use_string = true;
"return_full_text": false,
"prefix": "vanilla",
"order": this_settings.order
"order": this_settings.order,
"streaming": nai_settings.streaming_novel,
};
}
export async function generateNovelWithStreaming(generate_data, signal) {
const response = await fetch('/generate_novelai', {
headers: getRequestHeaders(),
body: JSON.stringify(generate_data),
method: 'POST',
signal: signal,
});
return async function* streamData() {
const decoder = new TextDecoder();
const reader = response.body.getReader();
let getMessage = '';
let messageBuffer = "";
while (true) {
const { done, value } = await reader.read();
let response = decoder.decode(value);
let eventList = [];
// ReadableStream's buffer is not guaranteed to contain full SSE messages as they arrive in chunks
// We need to buffer chunks until we have one or more full messages (separated by double newlines)
messageBuffer += response;
eventList = messageBuffer.split("\n\n");
// Last element will be an empty string or a leftover partial message
messageBuffer = eventList.pop();
for (let event of eventList) {
for (let subEvent of event.split('\n')) {
if (subEvent.startsWith("data")) {
let data = JSON.parse(subEvent.substring(5));
getMessage += (data?.token || '');
yield getMessage;
}
}
}
if (done) {
return;
}
}
}
}
$(document).ready(function () {
sliders.forEach(slider => {
$(document).on("input", slider.sliderId, function () {
@ -171,6 +218,12 @@ $(document).ready(function () {
});
});
$('#streaming_novel').on('input', function () {
const value = !!$(this).prop('checked');
nai_settings.streaming_novel = value;
saveSettingsDebounced();
});
$("#model_novel_select").change(function () {
nai_settings.model_novel = $("#model_novel_select").find(":selected").val();
saveSettingsDebounced();

View File

@ -1486,22 +1486,33 @@ app.post("/generate_novelai", jsonParser, async function (request, response_gene
};
try {
const response = await postAsync(api_novelai + "/ai/generate", args);
console.log(response);
return response_generate_novel.send(response);
} catch (error) {
switch (error?.statusCode) {
case 400:
console.log('Validation error');
break;
case 401:
console.log('Access Token is incorrect');
break;
case 402:
console.log('An active subscription is required to access this endpoint');
break;
}
const fetch = require('node-fetch').default;
const url = request.body.streaming ? `${api_novelai}/ai/generate-stream` : `${api_novelai}/ai/generate`;
const response = await fetch(url, { method: 'POST', timeout: 0, ...args });
if (request.body.streaming) {
// Pipe remote SSE stream to Express response
response.body.pipe(response_generate_novel);
request.socket.on('close', function () {
response.body.destroy(); // Close the remote stream
response_generate_novel.end(); // End the Express response
});
response.body.on('end', function () {
console.log("Streaming request finished");
response_generate_novel.end();
});
} else {
if (!response.ok) {
console.log(`Novel API returned error: ${response.status} ${response.statusText} ${await response.text()}`);
return response.status(response.status).send({ error: true });
}
const data = await response.json();
return response_generate_novel.send(data);
}
} catch (error) {
return response_generate_novel.send({ error: true });
}
});
@ -2764,7 +2775,8 @@ async function sendClaudeRequest(request, response) {
headers: {
"Content-Type": "application/json",
"x-api-key": api_key_claude,
}
},
timeout: 0,
});
if (request.body.stream) {
@ -3390,7 +3402,14 @@ app.post('/novel_tts', jsonParser, async (request, response) => {
try {
const fetch = require('node-fetch').default;
const url = `${api_novelai}/ai/generate-voice?text=${encodeURIComponent(text)}&voice=-1&seed=${encodeURIComponent(voice)}&opus=false&version=v2`;
const result = await fetch(url, { method: 'GET', headers: { 'Authorization': `Bearer ${token}`, 'Accept': 'audio/webm' } });
const result = await fetch(url, {
method: 'GET',
headers: {
'Authorization': `Bearer ${token}`,
'Accept': 'audio/webm',
},
timeout: 0,
});
if (!result.ok) {
return response.sendStatus(result.status);