diff --git a/public/script.js b/public/script.js
index 8ee918f39..a17dd748c 100644
--- a/public/script.js
+++ b/public/script.js
@@ -9885,6 +9885,7 @@ jQuery(async function () {
Chub characters (direct link or id)
Example: Anonymous/example-character
Chub lorebooks (direct link or id)
Example: lorebooks/bartleby/example-lorebook
JanitorAI character (direct link or id)
Example: https://janitorai.com/characters/ddd1498a-a370-4136-b138-a8cd9461fdfe_character-aqua-the-useless-goddess
+ Pygmalion.chat character (link)
Example: https://pygmalion.chat/character/a7ca95a1-0c88-4e23-91b3-149db1e78ab9
More coming soon...
`;
const input = await callPopup(html, 'input', '', { okButton: 'Import', rows: 4 });
diff --git a/src/character-card-parser.js b/src/character-card-parser.js
index 53d430b36..9e9cbd1a7 100644
--- a/src/character-card-parser.js
+++ b/src/character-card-parser.js
@@ -1,41 +1,80 @@
const fs = require('fs');
+const encode = require('png-chunks-encode');
const extract = require('png-chunks-extract');
const PNGtext = require('png-chunk-text');
-const parse = async (cardUrl, format) => {
+/**
+ * Writes Character metadata to a PNG image buffer.
+ * @param {Buffer} image PNG image buffer
+ * @param {string} data Character data to write
+ * @returns {Buffer} PNG image buffer with metadata
+ */
+const write = (image, data) => {
+ const chunks = extract(image);
+ const tEXtChunks = chunks.filter(chunk => chunk.name === 'tEXt');
+
+ // Remove all existing tEXt chunks
+ for (let tEXtChunk of tEXtChunks) {
+ chunks.splice(chunks.indexOf(tEXtChunk), 1);
+ }
+ // Add new chunks before the IEND chunk
+ const base64EncodedData = Buffer.from(data, 'utf8').toString('base64');
+ chunks.splice(-1, 0, PNGtext.encode('chara', base64EncodedData));
+ const newBuffer = Buffer.from(encode(chunks));
+ return newBuffer;
+};
+
+/**
+ * Reads Character metadata from a PNG image buffer.
+ * @param {Buffer} image PNG image buffer
+ * @returns {string} Character data
+ */
+const read = (image) => {
+ const chunks = extract(image);
+
+ const textChunks = chunks.filter(function (chunk) {
+ return chunk.name === 'tEXt';
+ }).map(function (chunk) {
+ return PNGtext.decode(chunk.data);
+ });
+
+ if (textChunks.length === 0) {
+ console.error('PNG metadata does not contain any text chunks.');
+ throw new Error('No PNG metadata.');
+ }
+
+ let index = textChunks.findIndex((chunk) => chunk.keyword.toLowerCase() == 'chara');
+
+ if (index === -1) {
+ console.error('PNG metadata does not contain any character data.');
+ throw new Error('No PNG metadata.');
+ }
+
+ return Buffer.from(textChunks[index].text, 'base64').toString('utf8');
+};
+
+/**
+ * Parses a card image and returns the character metadata.
+ * @param {string} cardUrl Path to the card image
+ * @param {string} format File format
+ * @returns {string} Character data
+ */
+const parse = (cardUrl, format) => {
let fileFormat = format === undefined ? 'png' : format;
switch (fileFormat) {
case 'png': {
const buffer = fs.readFileSync(cardUrl);
- const chunks = extract(buffer);
-
- const textChunks = chunks.filter(function (chunk) {
- return chunk.name === 'tEXt';
- }).map(function (chunk) {
- return PNGtext.decode(chunk.data);
- });
-
- if (textChunks.length === 0) {
- console.error('PNG metadata does not contain any text chunks.');
- throw new Error('No PNG metadata.');
- }
-
- let index = textChunks.findIndex((chunk) => chunk.keyword.toLowerCase() == 'chara');
-
- if (index === -1) {
- console.error('PNG metadata does not contain any character data.');
- throw new Error('No PNG metadata.');
- }
-
- return Buffer.from(textChunks[index].text, 'base64').toString('utf8');
+ return read(buffer);
}
- default:
- break;
}
+
+ throw new Error('Unsupported format');
};
module.exports = {
- parse: parse,
+ parse,
+ write,
+ read,
};
diff --git a/src/endpoints/characters.js b/src/endpoints/characters.js
index 0dcefbc97..2c8b36e98 100644
--- a/src/endpoints/characters.js
+++ b/src/endpoints/characters.js
@@ -7,9 +7,6 @@ const writeFileAtomicSync = require('write-file-atomic').sync;
const yaml = require('yaml');
const _ = require('lodash');
-const encode = require('png-chunks-encode');
-const extract = require('png-chunks-extract');
-const PNGtext = require('png-chunk-text');
const jimp = require('jimp');
const { DIRECTORIES, UPLOADS_PATH, AVATAR_WIDTH, AVATAR_HEIGHT } = require('../constants');
@@ -33,7 +30,7 @@ const characterDataCache = new Map();
* @param {string} input_format - 'png'
* @returns {Promise} - Character card data
*/
-async function charaRead(img_url, input_format) {
+async function charaRead(img_url, input_format = 'png') {
const stat = fs.statSync(img_url);
const cacheKey = `${img_url}-${stat.mtimeMs}`;
if (characterDataCache.has(cacheKey)) {
@@ -59,22 +56,12 @@ async function charaWrite(img_url, data, target_img, response = undefined, mes =
}
}
// Read the image, resize, and save it as a PNG into the buffer
- const image = await tryReadImage(img_url, crop);
+ const inputImage = await tryReadImage(img_url, crop);
// Get the chunks
- const chunks = extract(image);
- const tEXtChunks = chunks.filter(chunk => chunk.name === 'tEXt');
+ const outputImage = characterCardParser.write(inputImage, data);
- // Remove all existing tEXt chunks
- for (let tEXtChunk of tEXtChunks) {
- chunks.splice(chunks.indexOf(tEXtChunk), 1);
- }
- // Add new chunks before the IEND chunk
- const base64EncodedData = Buffer.from(data, 'utf8').toString('base64');
- chunks.splice(-1, 0, PNGtext.encode('chara', base64EncodedData));
- //chunks.splice(-1, 0, text.encode('lorem', 'ipsum'));
-
- writeFileAtomicSync(DIRECTORIES.characters + target_img + '.png', Buffer.from(encode(chunks)));
+ writeFileAtomicSync(DIRECTORIES.characters + target_img + '.png', outputImage);
if (response !== undefined) response.send(mes);
return true;
} catch (err) {
diff --git a/src/endpoints/content-manager.js b/src/endpoints/content-manager.js
index 191b4f4ce..727715a95 100644
--- a/src/endpoints/content-manager.js
+++ b/src/endpoints/content-manager.js
@@ -10,6 +10,7 @@ const contentLogPath = path.join(contentDirectory, 'content.log');
const contentIndexPath = path.join(contentDirectory, 'index.json');
const { DIRECTORIES } = require('../constants');
const presetFolders = [DIRECTORIES.koboldAI_Settings, DIRECTORIES.openAI_Settings, DIRECTORIES.novelAI_Settings, DIRECTORIES.textGen_Settings];
+const characterCardParser = require('../character-card-parser.js');
/**
* Gets the default presets from the content directory.
@@ -219,6 +220,58 @@ async function downloadChubCharacter(id) {
return { buffer, fileName, fileType };
}
+/**
+ * Downloads a character card from the Pygsite.
+ * @param {string} id UUID of the character
+ * @returns {Promise<{buffer: Buffer, fileName: string, fileType: string}>}
+ */
+async function downloadPygmalionCharacter(id) {
+ const result = await fetch(`https://server.pygmalion.chat/api/export/character/${id}/v2`, {
+ method: 'POST',
+ });
+
+ if (!result.ok) {
+ const text = await result.text();
+ console.log('Pygsite returned error', result.status, text);
+ throw new Error('Failed to download character');
+ }
+
+ const jsonData = await result.json();
+ const card = jsonData?.card;
+
+ if (!card || typeof card !== 'object') {
+ console.error('Pygsite returned invalid character data', jsonData);
+ throw new Error('Failed to download character');
+ }
+
+ try {
+ const avatarUrl = card?.data?.avatar;
+
+ if (!avatarUrl) {
+ console.error('Pygsite character does not have an avatar', card);
+ throw new Error('Failed to download avatar');
+ }
+
+ const avatarResult = await fetch(avatarUrl);
+ const avatarBuffer = await avatarResult.buffer();
+
+ const cardBuffer = characterCardParser.write(avatarBuffer, JSON.stringify(card));
+
+ return {
+ buffer: cardBuffer,
+ fileName: `${sanitize(id)}.png`,
+ fileType: 'image/png',
+ };
+ } catch (e) {
+ console.error('Failed to download avatar, using JSON instead', e);
+ return {
+ buffer: Buffer.from(JSON.stringify(jsonData)),
+ fileName: `${sanitize(id)}.json`,
+ fileType: 'application/json',
+ };
+ }
+}
+
/**
*
* @param {String} str
@@ -294,7 +347,7 @@ async function downloadJannyCharacter(uuid) {
* @param {String} url
* @returns {String | null } UUID of the character
*/
-function parseJannyUrl(url) {
+function getUuidFromUrl(url) {
// Extract UUID from URL
const uuidRegex = /[a-f0-9]{8}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{12}/;
const matches = url.match(uuidRegex);
@@ -317,8 +370,18 @@ router.post('/import', jsonParser, async (request, response) => {
let type;
const isJannnyContent = url.includes('janitorai');
- if (isJannnyContent) {
- const uuid = parseJannyUrl(url);
+ const isPygmalionContent = url.includes('pygmalion.chat');
+
+ if (isPygmalionContent) {
+ const uuid = getUuidFromUrl(url);
+ if (!uuid) {
+ return response.sendStatus(404);
+ }
+
+ type = 'character';
+ result = await downloadPygmalionCharacter(uuid);
+ } else if (isJannnyContent) {
+ const uuid = getUuidFromUrl(url);
if (!uuid) {
return response.sendStatus(404);
}
diff --git a/src/util.js b/src/util.js
index 1d437379e..4f05fc0c6 100644
--- a/src/util.js
+++ b/src/util.js
@@ -365,7 +365,7 @@ function getImages(path) {
/**
* Pipe a fetch() response to an Express.js Response, including status code.
* @param {import('node-fetch').Response} from The Fetch API response to pipe from.
- * @param {Express.Response} to The Express response to pipe to.
+ * @param {import('express').Response} to The Express response to pipe to.
*/
function forwardFetchResponse(from, to) {
let statusCode = from.status;
@@ -399,6 +399,64 @@ function forwardFetchResponse(from, to) {
});
}
+/**
+ * Makes an HTTP/2 request to the specified endpoint.
+ *
+ * @deprecated Use `node-fetch` if possible.
+ * @param {string} endpoint URL to make the request to
+ * @param {string} method HTTP method to use
+ * @param {string} body Request body
+ * @param {object} headers Request headers
+ * @returns {Promise} Response body
+ */
+function makeHttp2Request(endpoint, method, body, headers) {
+ return new Promise((resolve, reject) => {
+ try {
+ const http2 = require('http2');
+ const url = new URL(endpoint);
+ const client = http2.connect(url.origin);
+
+ const req = client.request({
+ ':method': method,
+ ':path': url.pathname,
+ ...headers,
+ });
+ req.setEncoding('utf8');
+
+ req.on('response', (headers) => {
+ const status = Number(headers[':status']);
+
+ if (status < 200 || status >= 300) {
+ reject(new Error(`Request failed with status ${status}`));
+ }
+
+ let data = '';
+
+ req.on('data', (chunk) => {
+ data += chunk;
+ });
+
+ req.on('end', () => {
+ console.log(data);
+ resolve(data);
+ });
+ });
+
+ req.on('error', (err) => {
+ reject(err);
+ });
+
+ if (body) {
+ req.write(body);
+ }
+
+ req.end();
+ } catch (e) {
+ reject(e);
+ }
+ });
+}
+
/**
* Adds YAML-serialized object to the object.
* @param {object} obj Object
@@ -547,4 +605,5 @@ module.exports = {
excludeKeysByYaml,
trimV1,
Cache,
+ makeHttp2Request,
};