mirror of
https://github.com/SillyTavern/SillyTavern.git
synced 2025-06-05 21:59:27 +02:00
Merge branch 'staging' into DarokCx/release
This commit is contained in:
@@ -147,6 +147,11 @@ function getKoboldCppHeaders(directories) {
|
||||
}) : {};
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the headers for the Featherless API.
|
||||
* @param {import('./users').UserDirectoryList} directories
|
||||
* @returns {object} Headers for the request
|
||||
*/
|
||||
function getFeatherlessHeaders(directories) {
|
||||
const apiKey = readSecret(directories, SECRET_KEYS.FEATHERLESS);
|
||||
|
||||
@@ -155,6 +160,19 @@ function getFeatherlessHeaders(directories) {
|
||||
}) : {};
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the headers for the HuggingFace API.
|
||||
* @param {import('./users').UserDirectoryList} directories
|
||||
* @returns {object} Headers for the request
|
||||
*/
|
||||
function getHuggingFaceHeaders(directories) {
|
||||
const apiKey = readSecret(directories, SECRET_KEYS.HUGGINGFACE);
|
||||
|
||||
return apiKey ? ({
|
||||
'Authorization': `Bearer ${apiKey}`,
|
||||
}) : {};
|
||||
}
|
||||
|
||||
function getOverrideHeaders(urlHost) {
|
||||
const requestOverrides = getConfigValue('requestOverrides', []);
|
||||
const overrideHeaders = requestOverrides?.find((e) => e.hosts?.includes(urlHost))?.headers;
|
||||
@@ -196,6 +214,7 @@ function setAdditionalHeadersByType(requestHeaders, type, server, directories) {
|
||||
[TEXTGEN_TYPES.KOBOLDCPP]: getKoboldCppHeaders,
|
||||
[TEXTGEN_TYPES.LLAMACPP]: getLlamaCppHeaders,
|
||||
[TEXTGEN_TYPES.FEATHERLESS]: getFeatherlessHeaders,
|
||||
[TEXTGEN_TYPES.HUGGINGFACE]: getHuggingFaceHeaders,
|
||||
};
|
||||
|
||||
const getHeaders = headerGetters[type];
|
||||
|
@@ -194,9 +194,13 @@ const CHAT_COMPLETION_SOURCES = {
|
||||
COHERE: 'cohere',
|
||||
PERPLEXITY: 'perplexity',
|
||||
GROQ: 'groq',
|
||||
ZEROONEAI: '01ai',
|
||||
};
|
||||
|
||||
const UPLOADS_PATH = './uploads';
|
||||
/**
|
||||
* Path to multer file uploads under the data root.
|
||||
*/
|
||||
const UPLOADS_DIRECTORY = '_uploads';
|
||||
|
||||
// TODO: this is copied from the client code; there should be a way to de-duplicate it eventually
|
||||
const TEXTGEN_TYPES = {
|
||||
@@ -213,6 +217,7 @@ const TEXTGEN_TYPES = {
|
||||
DREAMGEN: 'dreamgen',
|
||||
OPENROUTER: 'openrouter',
|
||||
FEATHERLESS: 'featherless',
|
||||
HUGGINGFACE: 'huggingface',
|
||||
};
|
||||
|
||||
const INFERMATICAI_KEYS = [
|
||||
@@ -225,6 +230,17 @@ const INFERMATICAI_KEYS = [
|
||||
'repetition_penalty',
|
||||
'stream',
|
||||
'stop',
|
||||
'presence_penalty',
|
||||
'frequency_penalty',
|
||||
'min_p',
|
||||
'seed',
|
||||
'ignore_eos',
|
||||
'n',
|
||||
'best_of',
|
||||
'min_tokens',
|
||||
'spaces_between_special_tokens',
|
||||
'skip_special_tokens',
|
||||
'logprobs',
|
||||
];
|
||||
|
||||
const FEATHERLESS_KEYS = [
|
||||
@@ -303,6 +319,7 @@ const TOGETHERAI_KEYS = [
|
||||
// https://github.com/jmorganca/ollama/blob/main/docs/api.md#request-with-options
|
||||
const OLLAMA_KEYS = [
|
||||
'num_predict',
|
||||
'num_ctx',
|
||||
'stop',
|
||||
'temperature',
|
||||
'repeat_penalty',
|
||||
@@ -396,7 +413,7 @@ module.exports = {
|
||||
PUBLIC_DIRECTORIES,
|
||||
USER_DIRECTORY_TEMPLATE,
|
||||
UNSAFE_EXTENSIONS,
|
||||
UPLOADS_PATH,
|
||||
UPLOADS_DIRECTORY,
|
||||
GEMINI_SAFETY,
|
||||
BISON_SAFETY,
|
||||
TEXTGEN_TYPES,
|
||||
|
@@ -45,7 +45,8 @@ router.post('/caption-image', jsonParser, async (request, response) => {
|
||||
});
|
||||
|
||||
if (!result.ok) {
|
||||
console.log(`Claude API returned error: ${result.status} ${result.statusText}`);
|
||||
const text = await result.text();
|
||||
console.log(`Claude API returned error: ${result.status} ${result.statusText}`, text);
|
||||
return response.status(result.status).send({ error: true });
|
||||
}
|
||||
|
||||
|
@@ -4,7 +4,7 @@ const fs = require('fs');
|
||||
const sanitize = require('sanitize-filename');
|
||||
const writeFileAtomicSync = require('write-file-atomic').sync;
|
||||
const { jsonParser, urlencodedParser } = require('../express-common');
|
||||
const { AVATAR_WIDTH, AVATAR_HEIGHT, UPLOADS_PATH } = require('../constants');
|
||||
const { AVATAR_WIDTH, AVATAR_HEIGHT } = require('../constants');
|
||||
const { getImages, tryParse } = require('../util');
|
||||
|
||||
// image processing related library imports
|
||||
@@ -39,7 +39,7 @@ router.post('/upload', urlencodedParser, async (request, response) => {
|
||||
if (!request.file) return response.sendStatus(400);
|
||||
|
||||
try {
|
||||
const pathToUpload = path.join(UPLOADS_PATH, request.file.filename);
|
||||
const pathToUpload = path.join(request.file.destination, request.file.filename);
|
||||
const crop = tryParse(request.query.crop);
|
||||
let rawImg = await jimp.read(pathToUpload);
|
||||
|
||||
|
@@ -17,6 +17,7 @@ const API_COHERE = 'https://api.cohere.ai/v1';
|
||||
const API_PERPLEXITY = 'https://api.perplexity.ai';
|
||||
const API_GROQ = 'https://api.groq.com/openai/v1';
|
||||
const API_MAKERSUITE = 'https://generativelanguage.googleapis.com';
|
||||
const API_01AI = 'https://api.01.ai/v1';
|
||||
|
||||
/**
|
||||
* Applies a post-processing step to the generated messages.
|
||||
@@ -119,7 +120,7 @@ async function sendClaudeRequest(request, response) {
|
||||
let use_system_prompt = (request.body.model.startsWith('claude-2') || request.body.model.startsWith('claude-3')) && request.body.claude_use_sysprompt;
|
||||
let converted_prompt = convertClaudeMessages(request.body.messages, request.body.assistant_prefill, use_system_prompt, request.body.human_sysprompt_message, request.body.char_name, request.body.user_name);
|
||||
// Add custom stop sequences
|
||||
const stopSequences = ['\n\nHuman:', '\n\nSystem:', '\n\nAssistant:'];
|
||||
const stopSequences = [];
|
||||
if (Array.isArray(request.body.stop)) {
|
||||
stopSequences.push(...request.body.stop);
|
||||
}
|
||||
@@ -670,6 +671,10 @@ router.post('/status', jsonParser, async function (request, response_getstatus_o
|
||||
api_url = API_COHERE;
|
||||
api_key_openai = readSecret(request.user.directories, SECRET_KEYS.COHERE);
|
||||
headers = {};
|
||||
} else if (request.body.chat_completion_source === CHAT_COMPLETION_SOURCES.ZEROONEAI) {
|
||||
api_url = API_01AI;
|
||||
api_key_openai = readSecret(request.user.directories, SECRET_KEYS.ZEROONEAI);
|
||||
headers = {};
|
||||
} else {
|
||||
console.log('This chat completion source is not supported yet.');
|
||||
return response_getstatus_openai.status(400).send({ error: true });
|
||||
@@ -931,6 +936,11 @@ router.post('/generate', jsonParser, function (request, response) {
|
||||
request.body.tool_choice = 'none';
|
||||
}
|
||||
}
|
||||
} else if (request.body.chat_completion_source === CHAT_COMPLETION_SOURCES.ZEROONEAI) {
|
||||
apiUrl = API_01AI;
|
||||
apiKey = readSecret(request.user.directories, SECRET_KEYS.ZEROONEAI);
|
||||
headers = {};
|
||||
bodyParams = {};
|
||||
} else {
|
||||
console.log('This chat completion source is not supported yet.');
|
||||
return response.status(400).send({ error: true });
|
||||
|
@@ -95,13 +95,14 @@ router.post('/status', jsonParser, async function (request, response) {
|
||||
|
||||
setAdditionalHeaders(request, args, baseUrl);
|
||||
|
||||
const apiType = request.body.api_type;
|
||||
let url = baseUrl;
|
||||
let result = '';
|
||||
|
||||
if (request.body.legacy_api) {
|
||||
url += '/v1/model';
|
||||
} else {
|
||||
switch (request.body.api_type) {
|
||||
switch (apiType) {
|
||||
case TEXTGEN_TYPES.OOBA:
|
||||
case TEXTGEN_TYPES.VLLM:
|
||||
case TEXTGEN_TYPES.APHRODITE:
|
||||
@@ -129,6 +130,9 @@ router.post('/status', jsonParser, async function (request, response) {
|
||||
case TEXTGEN_TYPES.FEATHERLESS:
|
||||
url += '/v1/models';
|
||||
break;
|
||||
case TEXTGEN_TYPES.HUGGINGFACE:
|
||||
url += '/info';
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -138,7 +142,7 @@ router.post('/status', jsonParser, async function (request, response) {
|
||||
console.log('Models endpoint is offline.');
|
||||
return response.status(400);
|
||||
}
|
||||
console.log("url for models", url)
|
||||
console.log('url for models', url);
|
||||
let data = await modelsReply.json();
|
||||
|
||||
if (request.body.legacy_api) {
|
||||
@@ -147,14 +151,18 @@ router.post('/status', jsonParser, async function (request, response) {
|
||||
}
|
||||
|
||||
// Rewrap to OAI-like response
|
||||
if (request.body.api_type === TEXTGEN_TYPES.TOGETHERAI && Array.isArray(data)) {
|
||||
if (apiType === TEXTGEN_TYPES.TOGETHERAI && Array.isArray(data)) {
|
||||
data = { data: data.map(x => ({ id: x.name, ...x })) };
|
||||
}
|
||||
|
||||
if (request.body.api_type === TEXTGEN_TYPES.OLLAMA && Array.isArray(data.models)) {
|
||||
if (apiType === TEXTGEN_TYPES.OLLAMA && Array.isArray(data.models)) {
|
||||
data = { data: data.models.map(x => ({ id: x.name, ...x })) };
|
||||
}
|
||||
|
||||
if (apiType === TEXTGEN_TYPES.HUGGINGFACE) {
|
||||
data = { data: [] };
|
||||
}
|
||||
|
||||
if (!Array.isArray(data.data)) {
|
||||
console.log('Models response is not an array.');
|
||||
return response.status(400);
|
||||
@@ -166,7 +174,7 @@ router.post('/status', jsonParser, async function (request, response) {
|
||||
// Set result to the first model ID
|
||||
result = modelIds[0] || 'Valid';
|
||||
|
||||
if (request.body.api_type === TEXTGEN_TYPES.OOBA) {
|
||||
if (apiType === TEXTGEN_TYPES.OOBA) {
|
||||
try {
|
||||
const modelInfoUrl = baseUrl + '/v1/internal/model/info';
|
||||
const modelInfoReply = await fetch(modelInfoUrl, args);
|
||||
@@ -181,7 +189,7 @@ router.post('/status', jsonParser, async function (request, response) {
|
||||
} catch (error) {
|
||||
console.error(`Failed to get Ooba model info: ${error}`);
|
||||
}
|
||||
} else if (request.body.api_type === TEXTGEN_TYPES.TABBY) {
|
||||
} else if (apiType === TEXTGEN_TYPES.TABBY) {
|
||||
try {
|
||||
const modelInfoUrl = baseUrl + '/v1/model';
|
||||
const modelInfoReply = await fetch(modelInfoUrl, args);
|
||||
@@ -245,6 +253,7 @@ router.post('/generate', jsonParser, async function (request, response) {
|
||||
case TEXTGEN_TYPES.KOBOLDCPP:
|
||||
case TEXTGEN_TYPES.TOGETHERAI:
|
||||
case TEXTGEN_TYPES.INFERMATICAI:
|
||||
case TEXTGEN_TYPES.HUGGINGFACE:
|
||||
url += '/v1/completions';
|
||||
break;
|
||||
case TEXTGEN_TYPES.DREAMGEN:
|
||||
@@ -349,7 +358,7 @@ router.post('/generate', jsonParser, async function (request, response) {
|
||||
|
||||
// Map InfermaticAI response to OAI completions format
|
||||
if (apiType === TEXTGEN_TYPES.INFERMATICAI) {
|
||||
data['choices'] = (data?.choices || []).map(choice => ({ text: choice?.message?.content || choice.text }));
|
||||
data['choices'] = (data?.choices || []).map(choice => ({ text: choice?.message?.content || choice.text, logprobs: choice?.logprobs, index: choice?.index }));
|
||||
}
|
||||
|
||||
return response.send(data);
|
||||
|
@@ -4,7 +4,6 @@ const express = require('express');
|
||||
const sanitize = require('sanitize-filename');
|
||||
|
||||
const { jsonParser, urlencodedParser } = require('../express-common');
|
||||
const { UPLOADS_PATH } = require('../constants');
|
||||
const { invalidateThumbnail } = require('./thumbnails');
|
||||
const { getImages } = require('../util');
|
||||
|
||||
@@ -60,7 +59,7 @@ router.post('/rename', jsonParser, function (request, response) {
|
||||
router.post('/upload', urlencodedParser, function (request, response) {
|
||||
if (!request.body || !request.file) return response.sendStatus(400);
|
||||
|
||||
const img_path = path.join(UPLOADS_PATH, request.file.filename);
|
||||
const img_path = path.join(request.file.destination, request.file.filename);
|
||||
const filename = request.file.originalname;
|
||||
|
||||
try {
|
||||
|
@@ -11,7 +11,7 @@ const mime = require('mime-types');
|
||||
|
||||
const jimp = require('jimp');
|
||||
|
||||
const { UPLOADS_PATH, AVATAR_WIDTH, AVATAR_HEIGHT } = require('../constants');
|
||||
const { AVATAR_WIDTH, AVATAR_HEIGHT } = require('../constants');
|
||||
const { jsonParser, urlencodedParser } = require('../express-common');
|
||||
const { deepMerge, humanizedISO8601DateTime, tryParse, extractFileFromZipBuffer } = require('../util');
|
||||
const { TavernCardValidator } = require('../validator/TavernCardValidator');
|
||||
@@ -44,7 +44,7 @@ async function readCharacterData(inputFile, inputFormat = 'png') {
|
||||
|
||||
/**
|
||||
* Writes the character card to the specified image file.
|
||||
* @param {string} inputFile - Path to the image file
|
||||
* @param {string|Buffer} inputFile - Path to the image file or image buffer
|
||||
* @param {string} data - Character card data
|
||||
* @param {string} outputFile - Target image file name
|
||||
* @param {import('express').Request} request - Express request obejct
|
||||
@@ -60,8 +60,20 @@ async function writeCharacterData(inputFile, data, outputFile, request, crop = u
|
||||
break;
|
||||
}
|
||||
}
|
||||
// Read the image, resize, and save it as a PNG into the buffer
|
||||
const inputImage = await tryReadImage(inputFile, crop);
|
||||
|
||||
/**
|
||||
* Read the image, resize, and save it as a PNG into the buffer.
|
||||
* @returns {Promise<Buffer>} Image buffer
|
||||
*/
|
||||
function getInputImage() {
|
||||
if (Buffer.isBuffer(inputFile)) {
|
||||
return parseImageBuffer(inputFile, crop);
|
||||
}
|
||||
|
||||
return tryReadImage(inputFile, crop);
|
||||
}
|
||||
|
||||
const inputImage = await getInputImage();
|
||||
|
||||
// Get the chunks
|
||||
const outputImage = characterCardParser.write(inputImage, data);
|
||||
@@ -84,6 +96,32 @@ async function writeCharacterData(inputFile, data, outputFile, request, crop = u
|
||||
* @property {boolean} want_resize Resize the image to the standard avatar size
|
||||
*/
|
||||
|
||||
/**
|
||||
* Parses an image buffer and applies crop if defined.
|
||||
* @param {Buffer} buffer Buffer of the image
|
||||
* @param {Crop|undefined} [crop] Crop parameters
|
||||
* @returns {Promise<Buffer>} Image buffer
|
||||
*/
|
||||
async function parseImageBuffer(buffer, crop) {
|
||||
const image = await jimp.read(buffer);
|
||||
let finalWidth = image.bitmap.width, finalHeight = image.bitmap.height;
|
||||
|
||||
// Apply crop if defined
|
||||
if (typeof crop == 'object' && [crop.x, crop.y, crop.width, crop.height].every(x => typeof x === 'number')) {
|
||||
image.crop(crop.x, crop.y, crop.width, crop.height);
|
||||
// Apply standard resize if requested
|
||||
if (crop.want_resize) {
|
||||
finalWidth = AVATAR_WIDTH;
|
||||
finalHeight = AVATAR_HEIGHT;
|
||||
} else {
|
||||
finalWidth = crop.width;
|
||||
finalHeight = crop.height;
|
||||
}
|
||||
}
|
||||
|
||||
return image.cover(finalWidth, finalHeight).getBufferAsync(jimp.MIME_PNG);
|
||||
}
|
||||
|
||||
/**
|
||||
* Reads an image file and applies crop if defined.
|
||||
* @param {string} imgPath Path to the image file
|
||||
@@ -445,6 +483,9 @@ function convertWorldInfoToCharacterBook(name, entries) {
|
||||
automation_id: entry.automationId ?? '',
|
||||
role: entry.role ?? 0,
|
||||
vectorized: entry.vectorized ?? false,
|
||||
sticky: entry.sticky ?? null,
|
||||
cooldown: entry.cooldown ?? null,
|
||||
delay: entry.delay ?? null,
|
||||
},
|
||||
};
|
||||
|
||||
@@ -509,11 +550,25 @@ async function importFromCharX(uploadPath, { request }) {
|
||||
throw new Error('Invalid CharX card file: missing spec field');
|
||||
}
|
||||
|
||||
/** @type {string|Buffer} */
|
||||
let avatar = defaultAvatarPath;
|
||||
const assets = _.get(card, 'data.assets');
|
||||
if (Array.isArray(assets) && assets.length) {
|
||||
for (const asset of assets.filter(x => x.type === 'icon' && typeof x.uri === 'string')) {
|
||||
const pathNoProtocol = String(asset.uri.replace(/^(?:\/\/|[^/]+)*\//, ''));
|
||||
const buffer = await extractFileFromZipBuffer(data, pathNoProtocol);
|
||||
if (buffer) {
|
||||
avatar = buffer;
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
unsetFavFlag(card);
|
||||
card['create_date'] = humanizedISO8601DateTime();
|
||||
card.name = sanitize(card.name);
|
||||
const fileName = getPngName(card.name, request.user.directories);
|
||||
const result = await writeCharacterData(defaultAvatarPath, JSON.stringify(card), fileName, request);
|
||||
const result = await writeCharacterData(avatar, JSON.stringify(card), fileName, request);
|
||||
return result ? fileName : '';
|
||||
}
|
||||
|
||||
@@ -675,7 +730,7 @@ router.post('/create', urlencodedParser, async function (request, response) {
|
||||
return response.send(avatarName);
|
||||
} else {
|
||||
const crop = tryParse(request.query.crop);
|
||||
const uploadPath = path.join(UPLOADS_PATH, request.file.filename);
|
||||
const uploadPath = path.join(request.file.destination, request.file.filename);
|
||||
await writeCharacterData(uploadPath, char, internalName, request, crop);
|
||||
fs.unlinkSync(uploadPath);
|
||||
return response.send(avatarName);
|
||||
@@ -758,7 +813,7 @@ router.post('/edit', urlencodedParser, async function (request, response) {
|
||||
await writeCharacterData(avatarPath, char, targetFile, request);
|
||||
} else {
|
||||
const crop = tryParse(request.query.crop);
|
||||
const newAvatarPath = path.join(UPLOADS_PATH, request.file.filename);
|
||||
const newAvatarPath = path.join(request.file.destination, request.file.filename);
|
||||
invalidateThumbnail(request.user.directories, 'avatar', request.body.avatar_url);
|
||||
await writeCharacterData(newAvatarPath, char, targetFile, request, crop);
|
||||
fs.unlinkSync(newAvatarPath);
|
||||
@@ -1042,7 +1097,7 @@ function getPreservedName(request) {
|
||||
router.post('/import', urlencodedParser, async function (request, response) {
|
||||
if (!request.body || !request.file) return response.sendStatus(400);
|
||||
|
||||
const uploadPath = path.join(UPLOADS_PATH, request.file.filename);
|
||||
const uploadPath = path.join(request.file.destination, request.file.filename);
|
||||
const format = request.body.file_type;
|
||||
const preservedFileName = getPreservedName(request);
|
||||
|
||||
|
@@ -6,7 +6,6 @@ const sanitize = require('sanitize-filename');
|
||||
const writeFileAtomicSync = require('write-file-atomic').sync;
|
||||
|
||||
const { jsonParser, urlencodedParser } = require('../express-common');
|
||||
const { UPLOADS_PATH } = require('../constants');
|
||||
const { getConfigValue, humanizedISO8601DateTime, tryParse, generateTimestamp, removeOldBackups } = require('../util');
|
||||
|
||||
/**
|
||||
@@ -323,7 +322,7 @@ router.post('/group/import', urlencodedParser, function (request, response) {
|
||||
}
|
||||
|
||||
const chatname = humanizedISO8601DateTime();
|
||||
const pathToUpload = path.join(UPLOADS_PATH, filedata.filename);
|
||||
const pathToUpload = path.join(filedata.destination, filedata.filename);
|
||||
const pathToNewFile = path.join(request.user.directories.groupChats, `${chatname}.jsonl`);
|
||||
fs.copyFileSync(pathToUpload, pathToNewFile);
|
||||
fs.unlinkSync(pathToUpload);
|
||||
@@ -347,9 +346,11 @@ router.post('/import', urlencodedParser, function (request, response) {
|
||||
}
|
||||
|
||||
try {
|
||||
const data = fs.readFileSync(path.join(UPLOADS_PATH, request.file.filename), 'utf8');
|
||||
const pathToUpload = path.join(request.file.destination, request.file.filename);
|
||||
const data = fs.readFileSync(pathToUpload, 'utf8');
|
||||
|
||||
if (format === 'json') {
|
||||
fs.unlinkSync(pathToUpload);
|
||||
const jsonData = JSON.parse(data);
|
||||
if (jsonData.histories !== undefined) {
|
||||
// CAI Tools format
|
||||
@@ -388,7 +389,8 @@ router.post('/import', urlencodedParser, function (request, response) {
|
||||
if (jsonData.user_name !== undefined || jsonData.name !== undefined) {
|
||||
const fileName = `${characterName} - ${humanizedISO8601DateTime()} imported.jsonl`;
|
||||
const filePath = path.join(request.user.directories.chats, avatarUrl, fileName);
|
||||
fs.copyFileSync(path.join(UPLOADS_PATH, request.file.filename), filePath);
|
||||
fs.copyFileSync(pathToUpload, filePath);
|
||||
fs.unlinkSync(pathToUpload);
|
||||
response.send({ res: true });
|
||||
} else {
|
||||
console.log('Incorrect chat format .jsonl');
|
||||
|
@@ -191,7 +191,7 @@ router.post('/delete', jsonParser, async (request, response) => {
|
||||
return response.status(400).send('Bad Request: extensionName is required in the request body.');
|
||||
}
|
||||
|
||||
// Sanatize the extension name to prevent directory traversal
|
||||
// Sanitize the extension name to prevent directory traversal
|
||||
const extensionName = sanitize(request.body.extensionName);
|
||||
|
||||
try {
|
||||
@@ -201,7 +201,7 @@ router.post('/delete', jsonParser, async (request, response) => {
|
||||
return response.status(404).send(`Directory does not exist at ${extensionPath}`);
|
||||
}
|
||||
|
||||
await fs.promises.rmdir(extensionPath, { recursive: true });
|
||||
await fs.promises.rm(extensionPath, { recursive: true });
|
||||
console.log(`Extension has been deleted at ${extensionPath}`);
|
||||
|
||||
return response.send(`Extension has been deleted at ${extensionPath}`);
|
||||
|
@@ -281,6 +281,12 @@ router.post('/generate-image', jsonParser, async (request, response) => {
|
||||
|
||||
const archiveBuffer = await generateResult.arrayBuffer();
|
||||
const imageBuffer = await extractFileFromZipBuffer(archiveBuffer, '.png');
|
||||
|
||||
if (!imageBuffer) {
|
||||
console.warn('NovelAI generated an image, but the PNG file was not found.');
|
||||
return response.sendStatus(500);
|
||||
}
|
||||
|
||||
const originalBase64 = imageBuffer.toString('base64');
|
||||
|
||||
// No upscaling
|
||||
@@ -311,6 +317,11 @@ router.post('/generate-image', jsonParser, async (request, response) => {
|
||||
|
||||
const upscaledArchiveBuffer = await upscaleResult.arrayBuffer();
|
||||
const upscaledImageBuffer = await extractFileFromZipBuffer(upscaledArchiveBuffer, '.png');
|
||||
|
||||
if (!upscaledImageBuffer) {
|
||||
throw new Error('NovelAI upscaled an image, but the PNG file was not found.');
|
||||
}
|
||||
|
||||
const upscaledBase64 = upscaledImageBuffer.toString('base64');
|
||||
|
||||
return response.send(upscaledBase64);
|
||||
|
@@ -6,6 +6,7 @@ const fs = require('fs');
|
||||
const { jsonParser, urlencodedParser } = require('../express-common');
|
||||
const { getConfigValue, mergeObjectWithYaml, excludeKeysByYaml, trimV1 } = require('../util');
|
||||
const { setAdditionalHeaders } = require('../additional-headers');
|
||||
const { OPENROUTER_HEADERS } = require('../constants');
|
||||
|
||||
const router = express.Router();
|
||||
|
||||
@@ -80,7 +81,7 @@ router.post('/caption-image', jsonParser, async (request, response) => {
|
||||
|
||||
if (request.body.api === 'openrouter') {
|
||||
apiUrl = 'https://openrouter.ai/api/v1/chat/completions';
|
||||
headers['HTTP-Referer'] = request.headers.referer;
|
||||
Object.assign(headers, OPENROUTER_HEADERS);
|
||||
}
|
||||
|
||||
if (request.body.api === 'openai') {
|
||||
|
@@ -22,7 +22,7 @@ const visitHeaders = {
|
||||
'Sec-Fetch-User': '?1',
|
||||
};
|
||||
|
||||
router.post('/search', jsonParser, async (request, response) => {
|
||||
router.post('/serpapi', jsonParser, async (request, response) => {
|
||||
try {
|
||||
const key = readSecret(request.user.directories, SECRET_KEYS.SERPAPI);
|
||||
|
||||
@@ -34,6 +34,8 @@ router.post('/search', jsonParser, async (request, response) => {
|
||||
const { query } = request.body;
|
||||
const result = await fetch(`https://serpapi.com/search.json?q=${encodeURIComponent(query)}&api_key=${key}`);
|
||||
|
||||
console.log('SerpApi query', query);
|
||||
|
||||
if (!result.ok) {
|
||||
const text = await result.text();
|
||||
console.log('SerpApi request failed', result.statusText, text);
|
||||
@@ -134,6 +136,54 @@ router.post('/transcript', jsonParser, async (request, response) => {
|
||||
}
|
||||
});
|
||||
|
||||
router.post('/searxng', jsonParser, async (request, response) => {
|
||||
try {
|
||||
const { baseUrl, query } = request.body;
|
||||
|
||||
if (!baseUrl || !query) {
|
||||
console.log('Missing required parameters for /searxng');
|
||||
return response.sendStatus(400);
|
||||
}
|
||||
|
||||
console.log('SearXNG query', baseUrl, query);
|
||||
|
||||
const mainPageUrl = new URL(baseUrl);
|
||||
const mainPageRequest = await fetch(mainPageUrl, { headers: visitHeaders });
|
||||
|
||||
if (!mainPageRequest.ok) {
|
||||
console.log('SearXNG request failed', mainPageRequest.statusText);
|
||||
return response.sendStatus(500);
|
||||
}
|
||||
|
||||
const mainPageText = await mainPageRequest.text();
|
||||
const clientHref = mainPageText.match(/href="(\/client.+\.css)"/)?.[1];
|
||||
|
||||
if (clientHref) {
|
||||
const clientUrl = new URL(clientHref, baseUrl);
|
||||
await fetch(clientUrl, { headers: visitHeaders });
|
||||
}
|
||||
|
||||
const searchUrl = new URL('/search', baseUrl);
|
||||
const searchParams = new URLSearchParams();
|
||||
searchParams.append('q', query);
|
||||
searchUrl.search = searchParams.toString();
|
||||
|
||||
const searchResult = await fetch(searchUrl, { headers: visitHeaders });
|
||||
|
||||
if (!searchResult.ok) {
|
||||
const text = await searchResult.text();
|
||||
console.log('SearXNG request failed', searchResult.statusText, text);
|
||||
return response.sendStatus(500);
|
||||
}
|
||||
|
||||
const data = await searchResult.text();
|
||||
return response.send(data);
|
||||
} catch (error) {
|
||||
console.log('SearXNG request failed', error);
|
||||
return response.sendStatus(500);
|
||||
}
|
||||
});
|
||||
|
||||
router.post('/visit', jsonParser, async (request, response) => {
|
||||
try {
|
||||
const url = request.body.url;
|
||||
@@ -170,6 +220,8 @@ router.post('/visit', jsonParser, async (request, response) => {
|
||||
return response.sendStatus(400);
|
||||
}
|
||||
|
||||
console.log('Visiting web URL', url);
|
||||
|
||||
const result = await fetch(url, { headers: visitHeaders });
|
||||
|
||||
if (!result.ok) {
|
@@ -42,6 +42,8 @@ const SECRET_KEYS = {
|
||||
GROQ: 'api_key_groq',
|
||||
AZURE_TTS: 'api_key_azure_tts',
|
||||
FEATHERLESS: 'api_key_featherless',
|
||||
ZEROONEAI: 'api_key_01ai',
|
||||
HUGGINGFACE: 'api_key_huggingface',
|
||||
};
|
||||
|
||||
// These are the keys that are safe to expose, even if allowKeysExposure is false
|
||||
|
@@ -5,7 +5,6 @@ const express = require('express');
|
||||
const mime = require('mime-types');
|
||||
const sanitize = require('sanitize-filename');
|
||||
const writeFileAtomicSync = require('write-file-atomic').sync;
|
||||
const { UPLOADS_PATH } = require('../constants');
|
||||
const { getImageBuffers } = require('../util');
|
||||
const { jsonParser, urlencodedParser } = require('../express-common');
|
||||
|
||||
@@ -190,7 +189,7 @@ router.post('/upload-zip', urlencodedParser, async (request, response) => {
|
||||
return response.sendStatus(404);
|
||||
}
|
||||
|
||||
const spritePackPath = path.join(UPLOADS_PATH, file.filename);
|
||||
const spritePackPath = path.join(file.destination, file.filename);
|
||||
const sprites = await getImageBuffers(spritePackPath);
|
||||
const files = fs.readdirSync(spritesPath);
|
||||
|
||||
@@ -248,7 +247,7 @@ router.post('/upload', urlencodedParser, async (request, response) => {
|
||||
}
|
||||
|
||||
const filename = label + path.parse(file.originalname).ext;
|
||||
const spritePath = path.join(UPLOADS_PATH, file.filename);
|
||||
const spritePath = path.join(file.destination, file.filename);
|
||||
const pathToFile = path.join(spritesPath, filename);
|
||||
// Copy uploaded file to sprites folder
|
||||
fs.cpSync(spritePath, pathToFile);
|
||||
|
@@ -2,7 +2,7 @@ const fetch = require('node-fetch').default;
|
||||
const https = require('https');
|
||||
const express = require('express');
|
||||
const { readSecret, SECRET_KEYS } = require('./secrets');
|
||||
const { getConfigValue } = require('../util');
|
||||
const { getConfigValue, uuidv4 } = require('../util');
|
||||
const { jsonParser } = require('../express-common');
|
||||
|
||||
const DEEPLX_URL_DEFAULT = 'http://127.0.0.1:1188/translate';
|
||||
@@ -102,6 +102,54 @@ router.post('/google', jsonParser, async (request, response) => {
|
||||
}
|
||||
});
|
||||
|
||||
router.post('/yandex', jsonParser, async (request, response) => {
|
||||
const chunks = request.body.chunks;
|
||||
const lang = request.body.lang;
|
||||
|
||||
if (!chunks || !lang) {
|
||||
return response.sendStatus(400);
|
||||
}
|
||||
|
||||
// reconstruct original text to log
|
||||
let inputText = '';
|
||||
|
||||
const params = new URLSearchParams();
|
||||
for (const chunk of chunks) {
|
||||
params.append('text', chunk);
|
||||
inputText += chunk;
|
||||
}
|
||||
params.append('lang', lang);
|
||||
const ucid = uuidv4().replaceAll('-', '');
|
||||
|
||||
console.log('Input text: ' + inputText);
|
||||
|
||||
try {
|
||||
const result = await fetch(`https://translate.yandex.net/api/v1/tr.json/translate?ucid=${ucid}&srv=android&format=text`, {
|
||||
method: 'POST',
|
||||
body: params,
|
||||
headers: {
|
||||
'Content-Type': 'application/x-www-form-urlencoded',
|
||||
},
|
||||
timeout: 0,
|
||||
});
|
||||
|
||||
if (!result.ok) {
|
||||
const error = await result.text();
|
||||
console.log('Yandex error: ', result.statusText, error);
|
||||
return response.sendStatus(500);
|
||||
}
|
||||
|
||||
const json = await result.json();
|
||||
const translated = json.text.join();
|
||||
console.log('Translated text: ' + translated);
|
||||
|
||||
return response.send(translated);
|
||||
} catch (error) {
|
||||
console.log('Translation error: ' + error.message);
|
||||
return response.sendStatus(500);
|
||||
}
|
||||
});
|
||||
|
||||
router.post('/lingva', jsonParser, async (request, response) => {
|
||||
try {
|
||||
const baseUrl = readSecret(request.user.directories, SECRET_KEYS.LINGVA_URL);
|
||||
|
@@ -1,5 +1,6 @@
|
||||
const vectra = require('vectra');
|
||||
const path = require('path');
|
||||
const fs = require('fs');
|
||||
const express = require('express');
|
||||
const sanitize = require('sanitize-filename');
|
||||
const { jsonParser } = require('../express-common');
|
||||
@@ -16,6 +17,7 @@ const SOURCES = [
|
||||
'cohere',
|
||||
'ollama',
|
||||
'llamacpp',
|
||||
'vllm',
|
||||
];
|
||||
|
||||
/**
|
||||
@@ -45,6 +47,8 @@ async function getVector(source, sourceSettings, text, isQuery, directories) {
|
||||
return require('../vectors/cohere-vectors').getCohereVector(text, isQuery, directories, sourceSettings.model);
|
||||
case 'llamacpp':
|
||||
return require('../vectors/llamacpp-vectors').getLlamaCppVector(text, sourceSettings.apiUrl, directories);
|
||||
case 'vllm':
|
||||
return require('../vectors/vllm-vectors').getVllmVector(text, sourceSettings.apiUrl, sourceSettings.model, directories);
|
||||
case 'ollama':
|
||||
return require('../vectors/ollama-vectors').getOllamaVector(text, sourceSettings.apiUrl, sourceSettings.model, sourceSettings.keep, directories);
|
||||
}
|
||||
@@ -91,6 +95,9 @@ async function getBatchVector(source, sourceSettings, texts, isQuery, directorie
|
||||
case 'llamacpp':
|
||||
results.push(...await require('../vectors/llamacpp-vectors').getLlamaCppBatchVector(batch, sourceSettings.apiUrl, directories));
|
||||
break;
|
||||
case 'vllm':
|
||||
results.push(...await require('../vectors/vllm-vectors').getVllmBatchVector(batch, sourceSettings.apiUrl, sourceSettings.model, directories));
|
||||
break;
|
||||
case 'ollama':
|
||||
results.push(...await require('../vectors/ollama-vectors').getOllamaBatchVector(batch, sourceSettings.apiUrl, sourceSettings.model, sourceSettings.keep, directories));
|
||||
break;
|
||||
@@ -278,6 +285,14 @@ function getSourceSettings(source, request) {
|
||||
return {
|
||||
apiUrl: apiUrl,
|
||||
};
|
||||
} else if (source === 'vllm') {
|
||||
const apiUrl = String(request.headers['x-vllm-url']);
|
||||
const model = String(request.headers['x-vllm-model']);
|
||||
|
||||
return {
|
||||
apiUrl: apiUrl,
|
||||
model: model,
|
||||
};
|
||||
} else if (source === 'ollama') {
|
||||
const apiUrl = String(request.headers['x-ollama-url']);
|
||||
const model = String(request.headers['x-ollama-model']);
|
||||
@@ -426,6 +441,24 @@ router.post('/delete', jsonParser, async (req, res) => {
|
||||
}
|
||||
});
|
||||
|
||||
router.post('/purge-all', jsonParser, async (req, res) => {
|
||||
try {
|
||||
for (const source of SOURCES) {
|
||||
const sourcePath = path.join(req.user.directories.vectors, sanitize(source));
|
||||
if (!fs.existsSync(sourcePath)) {
|
||||
continue;
|
||||
}
|
||||
await fs.promises.rm(sourcePath, { recursive: true });
|
||||
console.log(`Deleted vector source store at ${sourcePath}`);
|
||||
}
|
||||
|
||||
return res.sendStatus(200);
|
||||
} catch (error) {
|
||||
console.error(error);
|
||||
return res.sendStatus(500);
|
||||
}
|
||||
});
|
||||
|
||||
router.post('/purge', jsonParser, async (req, res) => {
|
||||
try {
|
||||
if (!req.body.collectionId) {
|
||||
|
@@ -5,7 +5,6 @@ const sanitize = require('sanitize-filename');
|
||||
const writeFileAtomicSync = require('write-file-atomic').sync;
|
||||
|
||||
const { jsonParser, urlencodedParser } = require('../express-common');
|
||||
const { UPLOADS_PATH } = require('../constants');
|
||||
|
||||
/**
|
||||
* Reads a World Info file and returns its contents
|
||||
@@ -74,7 +73,7 @@ router.post('/import', urlencodedParser, (request, response) => {
|
||||
if (request.body.convertedData) {
|
||||
fileContents = request.body.convertedData;
|
||||
} else {
|
||||
const pathToUpload = path.join(UPLOADS_PATH, request.file.filename);
|
||||
const pathToUpload = path.join(request.file.destination, request.file.filename);
|
||||
fileContents = fs.readFileSync(pathToUpload, 'utf8');
|
||||
fs.unlinkSync(pathToUpload);
|
||||
}
|
||||
|
10
src/util.js
10
src/util.js
@@ -139,7 +139,7 @@ function getHexString(length) {
|
||||
* Extracts a file with given extension from an ArrayBuffer containing a ZIP archive.
|
||||
* @param {ArrayBuffer} archiveBuffer Buffer containing a ZIP archive
|
||||
* @param {string} fileExtension File extension to look for
|
||||
* @returns {Promise<Buffer>} Buffer containing the extracted file
|
||||
* @returns {Promise<Buffer|null>} Buffer containing the extracted file. Null if the file was not found.
|
||||
*/
|
||||
async function extractFileFromZipBuffer(archiveBuffer, fileExtension) {
|
||||
return await new Promise((resolve, reject) => yauzl.fromBuffer(Buffer.from(archiveBuffer), { lazyEntries: true }, (err, zipfile) => {
|
||||
@@ -171,6 +171,7 @@ async function extractFileFromZipBuffer(archiveBuffer, fileExtension) {
|
||||
zipfile.readEntry();
|
||||
}
|
||||
});
|
||||
zipfile.on('end', () => resolve(null));
|
||||
}));
|
||||
}
|
||||
|
||||
@@ -292,7 +293,14 @@ const color = {
|
||||
white: (mess) => color.byNum(mess, 37),
|
||||
};
|
||||
|
||||
/**
|
||||
* Gets a random UUIDv4 string.
|
||||
* @returns {string} A UUIDv4 string
|
||||
*/
|
||||
function uuidv4() {
|
||||
if ('randomUUID' in crypto) {
|
||||
return crypto.randomUUID();
|
||||
}
|
||||
return 'xxxxxxxx-xxxx-4xxx-yxxx-xxxxxxxxxxxx'.replace(/[xy]/g, function (c) {
|
||||
const r = Math.random() * 16 | 0;
|
||||
const v = c === 'x' ? r : (r & 0x3 | 0x8);
|
||||
|
63
src/vectors/vllm-vectors.js
Normal file
63
src/vectors/vllm-vectors.js
Normal file
@@ -0,0 +1,63 @@
|
||||
const fetch = require('node-fetch').default;
|
||||
const { setAdditionalHeadersByType } = require('../additional-headers');
|
||||
const { TEXTGEN_TYPES } = require('../constants');
|
||||
|
||||
/**
|
||||
* Gets the vector for the given text from VLLM
|
||||
* @param {string[]} texts - The array of texts to get the vectors for
|
||||
* @param {string} apiUrl - The API URL
|
||||
* @param {string} model - The model to use
|
||||
* @param {import('../users').UserDirectoryList} directories - The directories object for the user
|
||||
* @returns {Promise<number[][]>} - The array of vectors for the texts
|
||||
*/
|
||||
async function getVllmBatchVector(texts, apiUrl, model, directories) {
|
||||
const url = new URL(apiUrl);
|
||||
url.pathname = '/v1/embeddings';
|
||||
|
||||
const headers = {};
|
||||
setAdditionalHeadersByType(headers, TEXTGEN_TYPES.VLLM, apiUrl, directories);
|
||||
|
||||
const response = await fetch(url, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
...headers,
|
||||
},
|
||||
body: JSON.stringify({ input: texts, model }),
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
const responseText = await response.text();
|
||||
throw new Error(`VLLM: Failed to get vector for text: ${response.statusText} ${responseText}`);
|
||||
}
|
||||
|
||||
const data = await response.json();
|
||||
|
||||
if (!Array.isArray(data?.data)) {
|
||||
throw new Error('API response was not an array');
|
||||
}
|
||||
|
||||
// Sort data by x.index to ensure the order is correct
|
||||
data.data.sort((a, b) => a.index - b.index);
|
||||
|
||||
const vectors = data.data.map(x => x.embedding);
|
||||
return vectors;
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the vector for the given text from VLLM
|
||||
* @param {string} text - The text to get the vector for
|
||||
* @param {string} apiUrl - The API URL
|
||||
* @param {string} model - The model to use
|
||||
* @param {import('../users').UserDirectoryList} directories - The directories object for the user
|
||||
* @returns {Promise<number[]>} - The vector for the text
|
||||
*/
|
||||
async function getVllmVector(text, apiUrl, model, directories) {
|
||||
const vectors = await getVllmBatchVector([text], apiUrl, model, directories);
|
||||
return vectors[0];
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
getVllmBatchVector,
|
||||
getVllmVector,
|
||||
};
|
Reference in New Issue
Block a user