Move NovelAI endpoints to separate file
This commit is contained in:
parent
0f1a0963fd
commit
599904d589
|
@ -3744,7 +3744,7 @@ function getGenerateUrl() {
|
||||||
} else if (main_api == 'textgenerationwebui') {
|
} else if (main_api == 'textgenerationwebui') {
|
||||||
generate_url = '/generate_textgenerationwebui';
|
generate_url = '/generate_textgenerationwebui';
|
||||||
} else if (main_api == 'novel') {
|
} else if (main_api == 'novel') {
|
||||||
generate_url = '/generate_novelai';
|
generate_url = '/api/novelai/generate';
|
||||||
}
|
}
|
||||||
return generate_url;
|
return generate_url;
|
||||||
}
|
}
|
||||||
|
|
|
@ -159,7 +159,7 @@ async function generateHypeBot() {
|
||||||
|
|
||||||
abortController = new AbortController();
|
abortController = new AbortController();
|
||||||
|
|
||||||
const response = await fetch('/generate_novelai', {
|
const response = await fetch('/api/novelai/generate', {
|
||||||
headers: getRequestHeaders(),
|
headers: getRequestHeaders(),
|
||||||
body: JSON.stringify(parameters),
|
body: JSON.stringify(parameters),
|
||||||
method: 'POST',
|
method: 'POST',
|
||||||
|
|
|
@ -170,7 +170,7 @@ class NovelTtsProvider {
|
||||||
|
|
||||||
async fetchTtsGeneration(inputText, voiceId) {
|
async fetchTtsGeneration(inputText, voiceId) {
|
||||||
console.info(`Generating new TTS for voice_id ${voiceId}`)
|
console.info(`Generating new TTS for voice_id ${voiceId}`)
|
||||||
const response = await fetch(`/novel_tts`,
|
const response = await fetch(`/api/novelai/generate-voice`,
|
||||||
{
|
{
|
||||||
method: 'POST',
|
method: 'POST',
|
||||||
headers: getRequestHeaders(),
|
headers: getRequestHeaders(),
|
||||||
|
|
|
@ -11,6 +11,7 @@ import { getTextTokens, tokenizers } from "./tokenizers.js";
|
||||||
import {
|
import {
|
||||||
getSortableDelay,
|
getSortableDelay,
|
||||||
getStringHash,
|
getStringHash,
|
||||||
|
onlyUnique,
|
||||||
uuidv4,
|
uuidv4,
|
||||||
} from "./utils.js";
|
} from "./utils.js";
|
||||||
|
|
||||||
|
@ -87,7 +88,7 @@ export function getNovelUnlimitedImageGeneration() {
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function loadNovelSubscriptionData() {
|
export async function loadNovelSubscriptionData() {
|
||||||
const result = await fetch('/getstatus_novelai', {
|
const result = await fetch('/api/novelai/status', {
|
||||||
method: 'POST',
|
method: 'POST',
|
||||||
headers: getRequestHeaders(),
|
headers: getRequestHeaders(),
|
||||||
});
|
});
|
||||||
|
@ -402,7 +403,7 @@ function getBadWordPermutations(text) {
|
||||||
// Ditto + leading space
|
// Ditto + leading space
|
||||||
result.push(` ${text.toLowerCase()}`);
|
result.push(` ${text.toLowerCase()}`);
|
||||||
|
|
||||||
return result;
|
return result.filter(onlyUnique);
|
||||||
}
|
}
|
||||||
|
|
||||||
export function getNovelGenerationData(finalPrompt, this_settings, this_amount_gen, isImpersonate, cfgValues) {
|
export function getNovelGenerationData(finalPrompt, this_settings, this_amount_gen, isImpersonate, cfgValues) {
|
||||||
|
@ -679,7 +680,7 @@ function tryParseStreamingError(decoded) {
|
||||||
export async function generateNovelWithStreaming(generate_data, signal) {
|
export async function generateNovelWithStreaming(generate_data, signal) {
|
||||||
generate_data.streaming = nai_settings.streaming_novel;
|
generate_data.streaming = nai_settings.streaming_novel;
|
||||||
|
|
||||||
const response = await fetch('/generate_novelai', {
|
const response = await fetch('/api/novelai/generate', {
|
||||||
headers: getRequestHeaders(),
|
headers: getRequestHeaders(),
|
||||||
body: JSON.stringify(generate_data),
|
body: JSON.stringify(generate_data),
|
||||||
method: 'POST',
|
method: 'POST',
|
||||||
|
|
409
server.js
409
server.js
|
@ -10,7 +10,7 @@ const readline = require('readline');
|
||||||
const util = require('util');
|
const util = require('util');
|
||||||
const { Readable } = require('stream');
|
const { Readable } = require('stream');
|
||||||
const { finished } = require('stream/promises');
|
const { finished } = require('stream/promises');
|
||||||
const { TextEncoder, TextDecoder } = require('util');
|
const { TextDecoder } = require('util');
|
||||||
|
|
||||||
// cli/fs related library imports
|
// cli/fs related library imports
|
||||||
const open = require('open');
|
const open = require('open');
|
||||||
|
@ -45,7 +45,6 @@ const extract = require('png-chunks-extract');
|
||||||
const jimp = require('jimp');
|
const jimp = require('jimp');
|
||||||
const mime = require('mime-types');
|
const mime = require('mime-types');
|
||||||
const PNGtext = require('png-chunk-text');
|
const PNGtext = require('png-chunk-text');
|
||||||
const yauzl = require('yauzl');
|
|
||||||
|
|
||||||
// tokenizing related library imports
|
// tokenizing related library imports
|
||||||
const { SentencePieceProcessor } = require("@agnai/sentencepiece-js");
|
const { SentencePieceProcessor } = require("@agnai/sentencepiece-js");
|
||||||
|
@ -63,10 +62,9 @@ util.inspect.defaultOptions.maxStringLength = null;
|
||||||
const basicAuthMiddleware = require('./src/middleware/basicAuthMiddleware');
|
const basicAuthMiddleware = require('./src/middleware/basicAuthMiddleware');
|
||||||
const characterCardParser = require('./src/character-card-parser.js');
|
const characterCardParser = require('./src/character-card-parser.js');
|
||||||
const contentManager = require('./src/content-manager');
|
const contentManager = require('./src/content-manager');
|
||||||
const novelai = require('./src/novelai');
|
|
||||||
const statsHelpers = require('./statsHelpers.js');
|
const statsHelpers = require('./statsHelpers.js');
|
||||||
const { writeSecret, readSecret, readSecretState, migrateSecrets, SECRET_KEYS, getAllSecrets } = require('./src/secrets');
|
const { writeSecret, readSecret, readSecretState, migrateSecrets, SECRET_KEYS, getAllSecrets } = require('./src/secrets');
|
||||||
const { delay, getVersion } = require('./src/util');
|
const { delay, getVersion, getImageBuffers } = require('./src/util');
|
||||||
|
|
||||||
// Work around a node v20.0.0, v20.1.0, and v20.2.0 bug. The issue was fixed in v20.3.0.
|
// Work around a node v20.0.0, v20.1.0, and v20.2.0 bug. The issue was fixed in v20.3.0.
|
||||||
// https://github.com/nodejs/node/issues/47822#issuecomment-1564708870
|
// https://github.com/nodejs/node/issues/47822#issuecomment-1564708870
|
||||||
|
@ -107,8 +105,6 @@ const app = express();
|
||||||
app.use(compression());
|
app.use(compression());
|
||||||
app.use(responseTime());
|
app.use(responseTime());
|
||||||
|
|
||||||
const utf8Encode = new TextEncoder();
|
|
||||||
|
|
||||||
// impoort from statsHelpers.js
|
// impoort from statsHelpers.js
|
||||||
|
|
||||||
const config = require(path.join(process.cwd(), './config.conf'));
|
const config = require(path.join(process.cwd(), './config.conf'));
|
||||||
|
@ -131,7 +127,6 @@ const enableExtensions = config.enableExtensions;
|
||||||
const listen = config.listen;
|
const listen = config.listen;
|
||||||
const allowKeysExposure = config.allowKeysExposure;
|
const allowKeysExposure = config.allowKeysExposure;
|
||||||
|
|
||||||
const API_NOVELAI = "https://api.novelai.net";
|
|
||||||
const API_OPENAI = "https://api.openai.com/v1";
|
const API_OPENAI = "https://api.openai.com/v1";
|
||||||
const API_CLAUDE = "https://api.anthropic.com/v1";
|
const API_CLAUDE = "https://api.anthropic.com/v1";
|
||||||
|
|
||||||
|
@ -1851,162 +1846,6 @@ function getImages(path) {
|
||||||
.sort(Intl.Collator().compare);
|
.sort(Intl.Collator().compare);
|
||||||
}
|
}
|
||||||
|
|
||||||
//***********Novel.ai API
|
|
||||||
|
|
||||||
app.post("/getstatus_novelai", jsonParser, async function (request, response_getstatus_novel) {
|
|
||||||
if (!request.body) return response_getstatus_novel.sendStatus(400);
|
|
||||||
const api_key_novel = readSecret(SECRET_KEYS.NOVEL);
|
|
||||||
|
|
||||||
if (!api_key_novel) {
|
|
||||||
return response_getstatus_novel.sendStatus(401);
|
|
||||||
}
|
|
||||||
|
|
||||||
try {
|
|
||||||
const response = await fetch(API_NOVELAI + "/user/subscription", {
|
|
||||||
method: 'GET',
|
|
||||||
headers: {
|
|
||||||
'Content-Type': 'application/json',
|
|
||||||
'Authorization': "Bearer " + api_key_novel,
|
|
||||||
},
|
|
||||||
});
|
|
||||||
|
|
||||||
if (response.ok) {
|
|
||||||
const data = await response.json();
|
|
||||||
return response_getstatus_novel.send(data);
|
|
||||||
} else if (response.status == 401) {
|
|
||||||
console.log('NovelAI Access Token is incorrect.');
|
|
||||||
return response_getstatus_novel.send({ error: true });
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
console.log('NovelAI returned an error:', response.statusText);
|
|
||||||
return response_getstatus_novel.send({ error: true });
|
|
||||||
}
|
|
||||||
} catch (error) {
|
|
||||||
console.log(error);
|
|
||||||
return response_getstatus_novel.send({ error: true });
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
app.post("/generate_novelai", jsonParser, async function (request, response_generate_novel) {
|
|
||||||
if (!request.body) return response_generate_novel.sendStatus(400);
|
|
||||||
|
|
||||||
const api_key_novel = readSecret(SECRET_KEYS.NOVEL);
|
|
||||||
|
|
||||||
if (!api_key_novel) {
|
|
||||||
return response_generate_novel.sendStatus(401);
|
|
||||||
}
|
|
||||||
|
|
||||||
const controller = new AbortController();
|
|
||||||
request.socket.removeAllListeners('close');
|
|
||||||
request.socket.on('close', function () {
|
|
||||||
controller.abort();
|
|
||||||
});
|
|
||||||
|
|
||||||
const isNewModel = (request.body.model.includes('clio') || request.body.model.includes('kayra'));
|
|
||||||
const badWordsList = novelai.getBadWordsList(request.body.model);
|
|
||||||
|
|
||||||
// Add customized bad words for Clio and Kayra
|
|
||||||
if (isNewModel && Array.isArray(request.body.bad_words_ids)) {
|
|
||||||
for (const badWord of request.body.bad_words_ids) {
|
|
||||||
if (Array.isArray(badWord) && badWord.every(x => Number.isInteger(x))) {
|
|
||||||
badWordsList.push(badWord);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Add default biases for dinkus and asterism
|
|
||||||
const logit_bias_exp = isNewModel ? novelai.logitBiasExp.slice() : [];
|
|
||||||
|
|
||||||
if (Array.isArray(logit_bias_exp) && Array.isArray(request.body.logit_bias_exp)) {
|
|
||||||
logit_bias_exp.push(...request.body.logit_bias_exp);
|
|
||||||
}
|
|
||||||
|
|
||||||
const data = {
|
|
||||||
"input": request.body.input,
|
|
||||||
"model": request.body.model,
|
|
||||||
"parameters": {
|
|
||||||
"use_string": request.body.use_string ?? true,
|
|
||||||
"temperature": request.body.temperature,
|
|
||||||
"max_length": request.body.max_length,
|
|
||||||
"min_length": request.body.min_length,
|
|
||||||
"tail_free_sampling": request.body.tail_free_sampling,
|
|
||||||
"repetition_penalty": request.body.repetition_penalty,
|
|
||||||
"repetition_penalty_range": request.body.repetition_penalty_range,
|
|
||||||
"repetition_penalty_slope": request.body.repetition_penalty_slope,
|
|
||||||
"repetition_penalty_frequency": request.body.repetition_penalty_frequency,
|
|
||||||
"repetition_penalty_presence": request.body.repetition_penalty_presence,
|
|
||||||
"repetition_penalty_whitelist": isNewModel ? novelai.repPenaltyAllowList : null,
|
|
||||||
"top_a": request.body.top_a,
|
|
||||||
"top_p": request.body.top_p,
|
|
||||||
"top_k": request.body.top_k,
|
|
||||||
"typical_p": request.body.typical_p,
|
|
||||||
"mirostat_lr": request.body.mirostat_lr,
|
|
||||||
"mirostat_tau": request.body.mirostat_tau,
|
|
||||||
"cfg_scale": request.body.cfg_scale,
|
|
||||||
"cfg_uc": request.body.cfg_uc,
|
|
||||||
"phrase_rep_pen": request.body.phrase_rep_pen,
|
|
||||||
"stop_sequences": request.body.stop_sequences,
|
|
||||||
"bad_words_ids": badWordsList,
|
|
||||||
"logit_bias_exp": logit_bias_exp,
|
|
||||||
"generate_until_sentence": request.body.generate_until_sentence,
|
|
||||||
"use_cache": request.body.use_cache,
|
|
||||||
"return_full_text": request.body.return_full_text,
|
|
||||||
"prefix": request.body.prefix,
|
|
||||||
"order": request.body.order
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
console.log(util.inspect(data, { depth: 4 }))
|
|
||||||
|
|
||||||
const args = {
|
|
||||||
body: JSON.stringify(data),
|
|
||||||
headers: { "Content-Type": "application/json", "Authorization": "Bearer " + api_key_novel },
|
|
||||||
signal: controller.signal,
|
|
||||||
};
|
|
||||||
|
|
||||||
try {
|
|
||||||
const url = request.body.streaming ? `${API_NOVELAI}/ai/generate-stream` : `${API_NOVELAI}/ai/generate`;
|
|
||||||
const response = await fetch(url, { method: 'POST', timeout: 0, ...args });
|
|
||||||
|
|
||||||
if (request.body.streaming) {
|
|
||||||
// Pipe remote SSE stream to Express response
|
|
||||||
response.body.pipe(response_generate_novel);
|
|
||||||
|
|
||||||
request.socket.on('close', function () {
|
|
||||||
if (response.body instanceof Readable) response.body.destroy(); // Close the remote stream
|
|
||||||
response_generate_novel.end(); // End the Express response
|
|
||||||
});
|
|
||||||
|
|
||||||
response.body.on('end', function () {
|
|
||||||
console.log("Streaming request finished");
|
|
||||||
response_generate_novel.end();
|
|
||||||
});
|
|
||||||
} else {
|
|
||||||
if (!response.ok) {
|
|
||||||
const text = await response.text();
|
|
||||||
let message = text;
|
|
||||||
console.log(`Novel API returned error: ${response.status} ${response.statusText} ${text}`);
|
|
||||||
|
|
||||||
try {
|
|
||||||
const data = JSON.parse(text);
|
|
||||||
message = data.message;
|
|
||||||
}
|
|
||||||
catch {
|
|
||||||
// ignore
|
|
||||||
}
|
|
||||||
|
|
||||||
return response_generate_novel.status(response.status).send({ error: { message } });
|
|
||||||
}
|
|
||||||
|
|
||||||
const data = await response.json();
|
|
||||||
console.log(data);
|
|
||||||
return response_generate_novel.send(data);
|
|
||||||
}
|
|
||||||
} catch (error) {
|
|
||||||
return response_generate_novel.send({ error: true });
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
app.post("/getallchatsofcharacter", jsonParser, function (request, response) {
|
app.post("/getallchatsofcharacter", jsonParser, function (request, response) {
|
||||||
if (!request.body) return response.sendStatus(400);
|
if (!request.body) return response.sendStatus(400);
|
||||||
|
|
||||||
|
@ -4173,136 +4012,6 @@ app.post('/viewsecrets', jsonParser, async (_, response) => {
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
app.post('/api/novelai/generate-image', jsonParser, async (request, response) => {
|
|
||||||
if (!request.body) {
|
|
||||||
return response.sendStatus(400);
|
|
||||||
}
|
|
||||||
|
|
||||||
const key = readSecret(SECRET_KEYS.NOVEL);
|
|
||||||
|
|
||||||
if (!key) {
|
|
||||||
return response.sendStatus(401);
|
|
||||||
}
|
|
||||||
|
|
||||||
try {
|
|
||||||
console.log('NAI Diffusion request:', request.body);
|
|
||||||
const generateUrl = `${API_NOVELAI}/ai/generate-image`;
|
|
||||||
const generateResult = await fetch(generateUrl, {
|
|
||||||
method: 'POST',
|
|
||||||
headers: {
|
|
||||||
'Authorization': `Bearer ${key}`,
|
|
||||||
'Content-Type': 'application/json',
|
|
||||||
},
|
|
||||||
body: JSON.stringify({
|
|
||||||
action: 'generate',
|
|
||||||
input: request.body.prompt,
|
|
||||||
model: request.body.model ?? 'nai-diffusion',
|
|
||||||
parameters: {
|
|
||||||
negative_prompt: request.body.negative_prompt ?? '',
|
|
||||||
height: request.body.height ?? 512,
|
|
||||||
width: request.body.width ?? 512,
|
|
||||||
scale: request.body.scale ?? 9,
|
|
||||||
seed: Math.floor(Math.random() * 9999999999),
|
|
||||||
sampler: request.body.sampler ?? 'k_dpmpp_2m',
|
|
||||||
steps: request.body.steps ?? 28,
|
|
||||||
n_samples: 1,
|
|
||||||
// NAI handholding for prompts
|
|
||||||
ucPreset: 0,
|
|
||||||
qualityToggle: false,
|
|
||||||
},
|
|
||||||
}),
|
|
||||||
});
|
|
||||||
|
|
||||||
if (!generateResult.ok) {
|
|
||||||
console.log('NovelAI returned an error.', generateResult.statusText);
|
|
||||||
return response.sendStatus(500);
|
|
||||||
}
|
|
||||||
|
|
||||||
const archiveBuffer = await generateResult.arrayBuffer();
|
|
||||||
const imageBuffer = await extractFileFromZipBuffer(archiveBuffer, '.png');
|
|
||||||
const originalBase64 = imageBuffer.toString('base64');
|
|
||||||
|
|
||||||
// No upscaling
|
|
||||||
if (isNaN(request.body.upscale_ratio) || request.body.upscale_ratio <= 1) {
|
|
||||||
return response.send(originalBase64);
|
|
||||||
}
|
|
||||||
|
|
||||||
try {
|
|
||||||
console.debug('Upscaling image...');
|
|
||||||
const upscaleUrl = `${API_NOVELAI}/ai/upscale`;
|
|
||||||
const upscaleResult = await fetch(upscaleUrl, {
|
|
||||||
method: 'POST',
|
|
||||||
headers: {
|
|
||||||
'Authorization': `Bearer ${key}`,
|
|
||||||
'Content-Type': 'application/json',
|
|
||||||
},
|
|
||||||
body: JSON.stringify({
|
|
||||||
image: originalBase64,
|
|
||||||
height: request.body.height,
|
|
||||||
width: request.body.width,
|
|
||||||
scale: request.body.upscale_ratio,
|
|
||||||
}),
|
|
||||||
});
|
|
||||||
|
|
||||||
if (!upscaleResult.ok) {
|
|
||||||
throw new Error('NovelAI returned an error.');
|
|
||||||
}
|
|
||||||
|
|
||||||
const upscaledArchiveBuffer = await upscaleResult.arrayBuffer();
|
|
||||||
const upscaledImageBuffer = await extractFileFromZipBuffer(upscaledArchiveBuffer, '.png');
|
|
||||||
const upscaledBase64 = upscaledImageBuffer.toString('base64');
|
|
||||||
|
|
||||||
return response.send(upscaledBase64);
|
|
||||||
} catch (error) {
|
|
||||||
console.warn('NovelAI generated an image, but upscaling failed. Returning original image.');
|
|
||||||
return response.send(originalBase64)
|
|
||||||
}
|
|
||||||
} catch (error) {
|
|
||||||
console.log(error);
|
|
||||||
return response.sendStatus(500);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
app.post('/novel_tts', jsonParser, async (request, response) => {
|
|
||||||
const token = readSecret(SECRET_KEYS.NOVEL);
|
|
||||||
|
|
||||||
if (!token) {
|
|
||||||
return response.sendStatus(401);
|
|
||||||
}
|
|
||||||
|
|
||||||
const text = request.body.text;
|
|
||||||
const voice = request.body.voice;
|
|
||||||
|
|
||||||
if (!text || !voice) {
|
|
||||||
return response.sendStatus(400);
|
|
||||||
}
|
|
||||||
|
|
||||||
try {
|
|
||||||
const url = `${API_NOVELAI}/ai/generate-voice?text=${encodeURIComponent(text)}&voice=-1&seed=${encodeURIComponent(voice)}&opus=false&version=v2`;
|
|
||||||
const result = await fetch(url, {
|
|
||||||
method: 'GET',
|
|
||||||
headers: {
|
|
||||||
'Authorization': `Bearer ${token}`,
|
|
||||||
'Accept': 'audio/mpeg',
|
|
||||||
},
|
|
||||||
timeout: 0,
|
|
||||||
});
|
|
||||||
|
|
||||||
if (!result.ok) {
|
|
||||||
return response.sendStatus(result.status);
|
|
||||||
}
|
|
||||||
|
|
||||||
const chunks = await readAllChunks(result.body);
|
|
||||||
const buffer = Buffer.concat(chunks);
|
|
||||||
response.setHeader('Content-Type', 'audio/mpeg');
|
|
||||||
return response.send(buffer);
|
|
||||||
}
|
|
||||||
catch (error) {
|
|
||||||
console.error(error);
|
|
||||||
return response.sendStatus(500);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
app.post('/delete_sprite', jsonParser, async (request, response) => {
|
app.post('/delete_sprite', jsonParser, async (request, response) => {
|
||||||
const label = request.body.label;
|
const label = request.body.label;
|
||||||
const name = request.body.name;
|
const name = request.body.name;
|
||||||
|
@ -4460,45 +4169,6 @@ app.post('/import_custom', jsonParser, async (request, response) => {
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
/**
|
|
||||||
* Extracts a file with given extension from an ArrayBuffer containing a ZIP archive.
|
|
||||||
* @param {ArrayBuffer} archiveBuffer Buffer containing a ZIP archive
|
|
||||||
* @param {string} fileExtension File extension to look for
|
|
||||||
* @returns {Promise<Buffer>} Buffer containing the extracted file
|
|
||||||
*/
|
|
||||||
async function extractFileFromZipBuffer(archiveBuffer, fileExtension) {
|
|
||||||
return await new Promise((resolve, reject) => yauzl.fromBuffer(Buffer.from(archiveBuffer), { lazyEntries: true }, (err, zipfile) => {
|
|
||||||
if (err) {
|
|
||||||
reject(err);
|
|
||||||
}
|
|
||||||
|
|
||||||
zipfile.readEntry();
|
|
||||||
zipfile.on('entry', (entry) => {
|
|
||||||
if (entry.fileName.endsWith(fileExtension)) {
|
|
||||||
console.log(`Extracting ${entry.fileName}`);
|
|
||||||
zipfile.openReadStream(entry, (err, readStream) => {
|
|
||||||
if (err) {
|
|
||||||
reject(err);
|
|
||||||
} else {
|
|
||||||
const chunks = [];
|
|
||||||
readStream.on('data', (chunk) => {
|
|
||||||
chunks.push(chunk);
|
|
||||||
});
|
|
||||||
|
|
||||||
readStream.on('end', () => {
|
|
||||||
const buffer = Buffer.concat(chunks);
|
|
||||||
resolve(buffer);
|
|
||||||
zipfile.readEntry(); // Continue to the next entry
|
|
||||||
});
|
|
||||||
}
|
|
||||||
});
|
|
||||||
} else {
|
|
||||||
zipfile.readEntry();
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}));
|
|
||||||
}
|
|
||||||
|
|
||||||
async function downloadChubLorebook(id) {
|
async function downloadChubLorebook(id) {
|
||||||
const result = await fetch('https://api.chub.ai/api/lorebooks/download', {
|
const result = await fetch('https://api.chub.ai/api/lorebooks/download', {
|
||||||
method: 'POST',
|
method: 'POST',
|
||||||
|
@ -4646,78 +4316,6 @@ function importRisuSprites(data) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
async function readAllChunks(readableStream) {
|
|
||||||
return new Promise((resolve, reject) => {
|
|
||||||
// Consume the readable stream
|
|
||||||
const chunks = [];
|
|
||||||
readableStream.on('data', (chunk) => {
|
|
||||||
chunks.push(chunk);
|
|
||||||
});
|
|
||||||
|
|
||||||
readableStream.on('end', () => {
|
|
||||||
//console.log('Finished reading the stream.');
|
|
||||||
resolve(chunks);
|
|
||||||
});
|
|
||||||
|
|
||||||
readableStream.on('error', (error) => {
|
|
||||||
console.error('Error while reading the stream:', error);
|
|
||||||
reject();
|
|
||||||
});
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
async function getImageBuffers(zipFilePath) {
|
|
||||||
return new Promise((resolve, reject) => {
|
|
||||||
// Check if the zip file exists
|
|
||||||
if (!fs.existsSync(zipFilePath)) {
|
|
||||||
reject(new Error('File not found'));
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
const imageBuffers = [];
|
|
||||||
|
|
||||||
yauzl.open(zipFilePath, { lazyEntries: true }, (err, zipfile) => {
|
|
||||||
if (err) {
|
|
||||||
reject(err);
|
|
||||||
} else {
|
|
||||||
zipfile.readEntry();
|
|
||||||
zipfile.on('entry', (entry) => {
|
|
||||||
const mimeType = mime.lookup(entry.fileName);
|
|
||||||
if (mimeType && mimeType.startsWith('image/') && !entry.fileName.startsWith('__MACOSX')) {
|
|
||||||
console.log(`Extracting ${entry.fileName}`);
|
|
||||||
zipfile.openReadStream(entry, (err, readStream) => {
|
|
||||||
if (err) {
|
|
||||||
reject(err);
|
|
||||||
} else {
|
|
||||||
const chunks = [];
|
|
||||||
readStream.on('data', (chunk) => {
|
|
||||||
chunks.push(chunk);
|
|
||||||
});
|
|
||||||
|
|
||||||
readStream.on('end', () => {
|
|
||||||
imageBuffers.push([path.parse(entry.fileName).base, Buffer.concat(chunks)]);
|
|
||||||
zipfile.readEntry(); // Continue to the next entry
|
|
||||||
});
|
|
||||||
}
|
|
||||||
});
|
|
||||||
} else {
|
|
||||||
zipfile.readEntry(); // Continue to the next entry
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
zipfile.on('end', () => {
|
|
||||||
resolve(imageBuffers);
|
|
||||||
});
|
|
||||||
|
|
||||||
zipfile.on('error', (err) => {
|
|
||||||
reject(err);
|
|
||||||
});
|
|
||||||
}
|
|
||||||
});
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* This function extracts the extension information from the manifest file.
|
* This function extracts the extension information from the manifest file.
|
||||||
* @param {string} extensionPath - The path of the extension folder
|
* @param {string} extensionPath - The path of the extension folder
|
||||||
|
@ -5157,6 +4755,9 @@ app.post('/get_character_assets_list', jsonParser, async (request, response) =>
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
|
// NovelAI generation
|
||||||
|
require('./src/novelai').registerEndpoints(app, jsonParser);
|
||||||
|
|
||||||
// Stable Diffusion generation
|
// Stable Diffusion generation
|
||||||
require('./src/stable-diffusion').registerEndpoints(app, jsonParser);
|
require('./src/stable-diffusion').registerEndpoints(app, jsonParser);
|
||||||
|
|
||||||
|
|
310
src/novelai.js
310
src/novelai.js
|
@ -1,6 +1,14 @@
|
||||||
|
const fetch = require('node-fetch').default;
|
||||||
|
const util = require('util');
|
||||||
|
const { Readable } = require('stream');
|
||||||
|
const { readSecret, SECRET_KEYS } = require('./secrets');
|
||||||
|
const { readAllChunks, extractFileFromZipBuffer } = require('./util');
|
||||||
|
|
||||||
|
const API_NOVELAI = "https://api.novelai.net";
|
||||||
|
|
||||||
// Ban bracket generation, plus defaults
|
// Ban bracket generation, plus defaults
|
||||||
const badWordsList = [
|
const badWordsList = [
|
||||||
[3], [49356], [1431], [31715], [34387], [20765], [30702], [10691], [49333], [1266],
|
[3], [49356], [1431], [31715], [34387], [20765], [30702], [10691], [49333], [1266],
|
||||||
[19438], [43145], [26523], [41471], [2936], [85, 85], [49332], [7286], [1115]
|
[19438], [43145], [26523], [41471], [2936], [85, 85], [49332], [7286], [1115]
|
||||||
]
|
]
|
||||||
|
|
||||||
|
@ -38,7 +46,7 @@ const logitBiasExp = [
|
||||||
]
|
]
|
||||||
|
|
||||||
const hypeBotLogitBiasExp = [
|
const hypeBotLogitBiasExp = [
|
||||||
{ "sequence": [8162], "bias": -0.12, "ensure_sequence_finish": false, "generate_once": false},
|
{ "sequence": [8162], "bias": -0.12, "ensure_sequence_finish": false, "generate_once": false },
|
||||||
{ "sequence": [46256, 224], "bias": -0.12, "ensure_sequence_finish": false, "generate_once": false }
|
{ "sequence": [46256, 224], "bias": -0.12, "ensure_sequence_finish": false, "generate_once": false }
|
||||||
];
|
];
|
||||||
|
|
||||||
|
@ -57,11 +65,297 @@ function getBadWordsList(model) {
|
||||||
return list.slice();
|
return list.slice();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Registers NovelAI API endpoints.
|
||||||
|
* @param {import('express').Express} app - Express app
|
||||||
|
* @param {any} jsonParser - JSON parser middleware
|
||||||
|
*/
|
||||||
|
function registerEndpoints(app, jsonParser) {
|
||||||
|
app.post("/api/novelai/status", jsonParser, async function (req, res) {
|
||||||
|
if (!req.body) return res.sendStatus(400);
|
||||||
|
const api_key_novel = readSecret(SECRET_KEYS.NOVEL);
|
||||||
|
|
||||||
|
if (!api_key_novel) {
|
||||||
|
return res.sendStatus(401);
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
const response = await fetch(API_NOVELAI + "/user/subscription", {
|
||||||
|
method: 'GET',
|
||||||
|
headers: {
|
||||||
|
'Content-Type': 'application/json',
|
||||||
|
'Authorization': "Bearer " + api_key_novel,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
if (response.ok) {
|
||||||
|
const data = await response.json();
|
||||||
|
return res.send(data);
|
||||||
|
} else if (response.status == 401) {
|
||||||
|
console.log('NovelAI Access Token is incorrect.');
|
||||||
|
return res.send({ error: true });
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
console.log('NovelAI returned an error:', response.statusText);
|
||||||
|
return res.send({ error: true });
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
console.log(error);
|
||||||
|
return res.send({ error: true });
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
app.post("/api/novelai/generate", jsonParser, async function (req, res) {
|
||||||
|
if (!req.body) return res.sendStatus(400);
|
||||||
|
|
||||||
|
const api_key_novel = readSecret(SECRET_KEYS.NOVEL);
|
||||||
|
|
||||||
|
if (!api_key_novel) {
|
||||||
|
return res.sendStatus(401);
|
||||||
|
}
|
||||||
|
|
||||||
|
const controller = new AbortController();
|
||||||
|
req.socket.removeAllListeners('close');
|
||||||
|
req.socket.on('close', function () {
|
||||||
|
controller.abort();
|
||||||
|
});
|
||||||
|
|
||||||
|
const isNewModel = (req.body.model.includes('clio') || req.body.model.includes('kayra'));
|
||||||
|
const badWordsList = getBadWordsList(req.body.model);
|
||||||
|
|
||||||
|
// Add customized bad words for Clio and Kayra
|
||||||
|
if (isNewModel && Array.isArray(req.body.bad_words_ids)) {
|
||||||
|
for (const badWord of req.body.bad_words_ids) {
|
||||||
|
if (Array.isArray(badWord) && badWord.every(x => Number.isInteger(x))) {
|
||||||
|
badWordsList.push(badWord);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Add default biases for dinkus and asterism
|
||||||
|
const logit_bias_exp = isNewModel ? logitBiasExp.slice() : [];
|
||||||
|
|
||||||
|
if (Array.isArray(logit_bias_exp) && Array.isArray(req.body.logit_bias_exp)) {
|
||||||
|
logit_bias_exp.push(...req.body.logit_bias_exp);
|
||||||
|
}
|
||||||
|
|
||||||
|
const data = {
|
||||||
|
"input": req.body.input,
|
||||||
|
"model": req.body.model,
|
||||||
|
"parameters": {
|
||||||
|
"use_string": req.body.use_string ?? true,
|
||||||
|
"temperature": req.body.temperature,
|
||||||
|
"max_length": req.body.max_length,
|
||||||
|
"min_length": req.body.min_length,
|
||||||
|
"tail_free_sampling": req.body.tail_free_sampling,
|
||||||
|
"repetition_penalty": req.body.repetition_penalty,
|
||||||
|
"repetition_penalty_range": req.body.repetition_penalty_range,
|
||||||
|
"repetition_penalty_slope": req.body.repetition_penalty_slope,
|
||||||
|
"repetition_penalty_frequency": req.body.repetition_penalty_frequency,
|
||||||
|
"repetition_penalty_presence": req.body.repetition_penalty_presence,
|
||||||
|
"repetition_penalty_whitelist": isNewModel ? repPenaltyAllowList : null,
|
||||||
|
"top_a": req.body.top_a,
|
||||||
|
"top_p": req.body.top_p,
|
||||||
|
"top_k": req.body.top_k,
|
||||||
|
"typical_p": req.body.typical_p,
|
||||||
|
"mirostat_lr": req.body.mirostat_lr,
|
||||||
|
"mirostat_tau": req.body.mirostat_tau,
|
||||||
|
"cfg_scale": req.body.cfg_scale,
|
||||||
|
"cfg_uc": req.body.cfg_uc,
|
||||||
|
"phrase_rep_pen": req.body.phrase_rep_pen,
|
||||||
|
"stop_sequences": req.body.stop_sequences,
|
||||||
|
"bad_words_ids": badWordsList,
|
||||||
|
"logit_bias_exp": logit_bias_exp,
|
||||||
|
"generate_until_sentence": req.body.generate_until_sentence,
|
||||||
|
"use_cache": req.body.use_cache,
|
||||||
|
"return_full_text": req.body.return_full_text,
|
||||||
|
"prefix": req.body.prefix,
|
||||||
|
"order": req.body.order
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
console.log(util.inspect(data, { depth: 4 }))
|
||||||
|
|
||||||
|
const args = {
|
||||||
|
body: JSON.stringify(data),
|
||||||
|
headers: { "Content-Type": "application/json", "Authorization": "Bearer " + api_key_novel },
|
||||||
|
signal: controller.signal,
|
||||||
|
};
|
||||||
|
|
||||||
|
try {
|
||||||
|
const url = req.body.streaming ? `${API_NOVELAI}/ai/generate-stream` : `${API_NOVELAI}/ai/generate`;
|
||||||
|
const response = await fetch(url, { method: 'POST', timeout: 0, ...args });
|
||||||
|
|
||||||
|
if (req.body.streaming) {
|
||||||
|
// Pipe remote SSE stream to Express response
|
||||||
|
response.body.pipe(res);
|
||||||
|
|
||||||
|
req.socket.on('close', function () {
|
||||||
|
if (response.body instanceof Readable) response.body.destroy(); // Close the remote stream
|
||||||
|
res.end(); // End the Express response
|
||||||
|
});
|
||||||
|
|
||||||
|
response.body.on('end', function () {
|
||||||
|
console.log("Streaming request finished");
|
||||||
|
res.end();
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
if (!response.ok) {
|
||||||
|
const text = await response.text();
|
||||||
|
let message = text;
|
||||||
|
console.log(`Novel API returned error: ${response.status} ${response.statusText} ${text}`);
|
||||||
|
|
||||||
|
try {
|
||||||
|
const data = JSON.parse(text);
|
||||||
|
message = data.message;
|
||||||
|
}
|
||||||
|
catch {
|
||||||
|
// ignore
|
||||||
|
}
|
||||||
|
|
||||||
|
return res.status(response.status).send({ error: { message } });
|
||||||
|
}
|
||||||
|
|
||||||
|
const data = await response.json();
|
||||||
|
console.log(data);
|
||||||
|
return res.send(data);
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
return res.send({ error: true });
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
app.post('/api/novelai/generate-image', jsonParser, async (request, response) => {
|
||||||
|
if (!request.body) {
|
||||||
|
return response.sendStatus(400);
|
||||||
|
}
|
||||||
|
|
||||||
|
const key = readSecret(SECRET_KEYS.NOVEL);
|
||||||
|
|
||||||
|
if (!key) {
|
||||||
|
return response.sendStatus(401);
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
console.log('NAI Diffusion request:', request.body);
|
||||||
|
const generateUrl = `${API_NOVELAI}/ai/generate-image`;
|
||||||
|
const generateResult = await fetch(generateUrl, {
|
||||||
|
method: 'POST',
|
||||||
|
headers: {
|
||||||
|
'Authorization': `Bearer ${key}`,
|
||||||
|
'Content-Type': 'application/json',
|
||||||
|
},
|
||||||
|
body: JSON.stringify({
|
||||||
|
action: 'generate',
|
||||||
|
input: request.body.prompt,
|
||||||
|
model: request.body.model ?? 'nai-diffusion',
|
||||||
|
parameters: {
|
||||||
|
negative_prompt: request.body.negative_prompt ?? '',
|
||||||
|
height: request.body.height ?? 512,
|
||||||
|
width: request.body.width ?? 512,
|
||||||
|
scale: request.body.scale ?? 9,
|
||||||
|
seed: Math.floor(Math.random() * 9999999999),
|
||||||
|
sampler: request.body.sampler ?? 'k_dpmpp_2m',
|
||||||
|
steps: request.body.steps ?? 28,
|
||||||
|
n_samples: 1,
|
||||||
|
// NAI handholding for prompts
|
||||||
|
ucPreset: 0,
|
||||||
|
qualityToggle: false,
|
||||||
|
},
|
||||||
|
}),
|
||||||
|
});
|
||||||
|
|
||||||
|
if (!generateResult.ok) {
|
||||||
|
console.log('NovelAI returned an error.', generateResult.statusText);
|
||||||
|
return response.sendStatus(500);
|
||||||
|
}
|
||||||
|
|
||||||
|
const archiveBuffer = await generateResult.arrayBuffer();
|
||||||
|
const imageBuffer = await extractFileFromZipBuffer(archiveBuffer, '.png');
|
||||||
|
const originalBase64 = imageBuffer.toString('base64');
|
||||||
|
|
||||||
|
// No upscaling
|
||||||
|
if (isNaN(request.body.upscale_ratio) || request.body.upscale_ratio <= 1) {
|
||||||
|
return response.send(originalBase64);
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
console.debug('Upscaling image...');
|
||||||
|
const upscaleUrl = `${API_NOVELAI}/ai/upscale`;
|
||||||
|
const upscaleResult = await fetch(upscaleUrl, {
|
||||||
|
method: 'POST',
|
||||||
|
headers: {
|
||||||
|
'Authorization': `Bearer ${key}`,
|
||||||
|
'Content-Type': 'application/json',
|
||||||
|
},
|
||||||
|
body: JSON.stringify({
|
||||||
|
image: originalBase64,
|
||||||
|
height: request.body.height,
|
||||||
|
width: request.body.width,
|
||||||
|
scale: request.body.upscale_ratio,
|
||||||
|
}),
|
||||||
|
});
|
||||||
|
|
||||||
|
if (!upscaleResult.ok) {
|
||||||
|
throw new Error('NovelAI returned an error.');
|
||||||
|
}
|
||||||
|
|
||||||
|
const upscaledArchiveBuffer = await upscaleResult.arrayBuffer();
|
||||||
|
const upscaledImageBuffer = await extractFileFromZipBuffer(upscaledArchiveBuffer, '.png');
|
||||||
|
const upscaledBase64 = upscaledImageBuffer.toString('base64');
|
||||||
|
|
||||||
|
return response.send(upscaledBase64);
|
||||||
|
} catch (error) {
|
||||||
|
console.warn('NovelAI generated an image, but upscaling failed. Returning original image.');
|
||||||
|
return response.send(originalBase64)
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
console.log(error);
|
||||||
|
return response.sendStatus(500);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
app.post('/api/novelai/generate-voice', jsonParser, async (request, response) => {
|
||||||
|
const token = readSecret(SECRET_KEYS.NOVEL);
|
||||||
|
|
||||||
|
if (!token) {
|
||||||
|
return response.sendStatus(401);
|
||||||
|
}
|
||||||
|
|
||||||
|
const text = request.body.text;
|
||||||
|
const voice = request.body.voice;
|
||||||
|
|
||||||
|
if (!text || !voice) {
|
||||||
|
return response.sendStatus(400);
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
const url = `${API_NOVELAI}/ai/generate-voice?text=${encodeURIComponent(text)}&voice=-1&seed=${encodeURIComponent(voice)}&opus=false&version=v2`;
|
||||||
|
const result = await fetch(url, {
|
||||||
|
method: 'GET',
|
||||||
|
headers: {
|
||||||
|
'Authorization': `Bearer ${token}`,
|
||||||
|
'Accept': 'audio/mpeg',
|
||||||
|
},
|
||||||
|
timeout: 0,
|
||||||
|
});
|
||||||
|
|
||||||
|
if (!result.ok) {
|
||||||
|
return response.sendStatus(result.status);
|
||||||
|
}
|
||||||
|
|
||||||
|
const chunks = await readAllChunks(result.body);
|
||||||
|
const buffer = Buffer.concat(chunks);
|
||||||
|
response.setHeader('Content-Type', 'audio/mpeg');
|
||||||
|
return response.send(buffer);
|
||||||
|
}
|
||||||
|
catch (error) {
|
||||||
|
console.error(error);
|
||||||
|
return response.sendStatus(500);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
module.exports = {
|
module.exports = {
|
||||||
badWordsList,
|
registerEndpoints,
|
||||||
repPenaltyAllowList,
|
|
||||||
logitBiasExp,
|
|
||||||
hypeBotBadWordsList,
|
|
||||||
hypeBotLogitBiasExp,
|
|
||||||
getBadWordsList,
|
|
||||||
};
|
};
|
||||||
|
|
126
src/util.js
126
src/util.js
|
@ -1,7 +1,10 @@
|
||||||
const path = require('path');
|
const path = require('path');
|
||||||
|
const fs = require('fs');
|
||||||
const child_process = require('child_process');
|
const child_process = require('child_process');
|
||||||
const commandExistsSync = require('command-exists').sync;
|
const commandExistsSync = require('command-exists').sync;
|
||||||
const _ = require('lodash');
|
const _ = require('lodash');
|
||||||
|
const yauzl = require('yauzl');
|
||||||
|
const mime = require('mime-types');
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Returns the config object from the config.conf file.
|
* Returns the config object from the config.conf file.
|
||||||
|
@ -77,10 +80,133 @@ function delay(ms) {
|
||||||
return new Promise(resolve => setTimeout(resolve, ms));
|
return new Promise(resolve => setTimeout(resolve, ms));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Extracts a file with given extension from an ArrayBuffer containing a ZIP archive.
|
||||||
|
* @param {ArrayBuffer} archiveBuffer Buffer containing a ZIP archive
|
||||||
|
* @param {string} fileExtension File extension to look for
|
||||||
|
* @returns {Promise<Buffer>} Buffer containing the extracted file
|
||||||
|
*/
|
||||||
|
async function extractFileFromZipBuffer(archiveBuffer, fileExtension) {
|
||||||
|
return await new Promise((resolve, reject) => yauzl.fromBuffer(Buffer.from(archiveBuffer), { lazyEntries: true }, (err, zipfile) => {
|
||||||
|
if (err) {
|
||||||
|
reject(err);
|
||||||
|
}
|
||||||
|
|
||||||
|
zipfile.readEntry();
|
||||||
|
zipfile.on('entry', (entry) => {
|
||||||
|
if (entry.fileName.endsWith(fileExtension)) {
|
||||||
|
console.log(`Extracting ${entry.fileName}`);
|
||||||
|
zipfile.openReadStream(entry, (err, readStream) => {
|
||||||
|
if (err) {
|
||||||
|
reject(err);
|
||||||
|
} else {
|
||||||
|
const chunks = [];
|
||||||
|
readStream.on('data', (chunk) => {
|
||||||
|
chunks.push(chunk);
|
||||||
|
});
|
||||||
|
|
||||||
|
readStream.on('end', () => {
|
||||||
|
const buffer = Buffer.concat(chunks);
|
||||||
|
resolve(buffer);
|
||||||
|
zipfile.readEntry(); // Continue to the next entry
|
||||||
|
});
|
||||||
|
}
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
zipfile.readEntry();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}));
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Extracts all images from a ZIP archive.
|
||||||
|
* @param {string} zipFilePath Path to the ZIP archive
|
||||||
|
* @returns {Promise<[string, Buffer][]>} Array of image buffers
|
||||||
|
*/
|
||||||
|
async function getImageBuffers(zipFilePath) {
|
||||||
|
return new Promise((resolve, reject) => {
|
||||||
|
// Check if the zip file exists
|
||||||
|
if (!fs.existsSync(zipFilePath)) {
|
||||||
|
reject(new Error('File not found'));
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const imageBuffers = [];
|
||||||
|
|
||||||
|
yauzl.open(zipFilePath, { lazyEntries: true }, (err, zipfile) => {
|
||||||
|
if (err) {
|
||||||
|
reject(err);
|
||||||
|
} else {
|
||||||
|
zipfile.readEntry();
|
||||||
|
zipfile.on('entry', (entry) => {
|
||||||
|
const mimeType = mime.lookup(entry.fileName);
|
||||||
|
if (mimeType && mimeType.startsWith('image/') && !entry.fileName.startsWith('__MACOSX')) {
|
||||||
|
console.log(`Extracting ${entry.fileName}`);
|
||||||
|
zipfile.openReadStream(entry, (err, readStream) => {
|
||||||
|
if (err) {
|
||||||
|
reject(err);
|
||||||
|
} else {
|
||||||
|
const chunks = [];
|
||||||
|
readStream.on('data', (chunk) => {
|
||||||
|
chunks.push(chunk);
|
||||||
|
});
|
||||||
|
|
||||||
|
readStream.on('end', () => {
|
||||||
|
imageBuffers.push([path.parse(entry.fileName).base, Buffer.concat(chunks)]);
|
||||||
|
zipfile.readEntry(); // Continue to the next entry
|
||||||
|
});
|
||||||
|
}
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
zipfile.readEntry(); // Continue to the next entry
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
zipfile.on('end', () => {
|
||||||
|
resolve(imageBuffers);
|
||||||
|
});
|
||||||
|
|
||||||
|
zipfile.on('error', (err) => {
|
||||||
|
reject(err);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
});
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Gets all chunks of data from the given readable stream.
|
||||||
|
* @param {any} readableStream Readable stream to read from
|
||||||
|
* @returns {Promise<Buffer[]>} Array of chunks
|
||||||
|
*/
|
||||||
|
async function readAllChunks(readableStream) {
|
||||||
|
return new Promise((resolve, reject) => {
|
||||||
|
// Consume the readable stream
|
||||||
|
const chunks = [];
|
||||||
|
readableStream.on('data', (chunk) => {
|
||||||
|
chunks.push(chunk);
|
||||||
|
});
|
||||||
|
|
||||||
|
readableStream.on('end', () => {
|
||||||
|
//console.log('Finished reading the stream.');
|
||||||
|
resolve(chunks);
|
||||||
|
});
|
||||||
|
|
||||||
|
readableStream.on('error', (error) => {
|
||||||
|
console.error('Error while reading the stream:', error);
|
||||||
|
reject();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
module.exports = {
|
module.exports = {
|
||||||
getConfig,
|
getConfig,
|
||||||
getConfigValue,
|
getConfigValue,
|
||||||
getVersion,
|
getVersion,
|
||||||
getBasicAuthHeader,
|
getBasicAuthHeader,
|
||||||
|
extractFileFromZipBuffer,
|
||||||
|
getImageBuffers,
|
||||||
|
readAllChunks,
|
||||||
delay,
|
delay,
|
||||||
};
|
};
|
||||||
|
|
Loading…
Reference in New Issue