Phaseout JSON5

This commit is contained in:
Cohee
2023-12-03 14:04:43 +02:00
parent 5fb0807dcc
commit b09ebb240e
3 changed files with 14 additions and 28 deletions

12
package-lock.json generated
View File

@ -28,7 +28,6 @@
"ip-matching": "^2.1.2", "ip-matching": "^2.1.2",
"ipaddr.js": "^2.0.1", "ipaddr.js": "^2.0.1",
"jimp": "^0.22.10", "jimp": "^0.22.10",
"json5": "^2.2.3",
"lodash": "^4.17.21", "lodash": "^4.17.21",
"mime-types": "^2.1.35", "mime-types": "^2.1.35",
"multer": "^1.4.5-lts.1", "multer": "^1.4.5-lts.1",
@ -3225,17 +3224,6 @@
"integrity": "sha512-Bdboy+l7tA3OGW6FjyFHWkP5LuByj1Tk33Ljyq0axyzdk9//JSi2u3fP1QSmd1KNwq6VOKYGlAu87CisVir6Pw==", "integrity": "sha512-Bdboy+l7tA3OGW6FjyFHWkP5LuByj1Tk33Ljyq0axyzdk9//JSi2u3fP1QSmd1KNwq6VOKYGlAu87CisVir6Pw==",
"dev": true "dev": true
}, },
"node_modules/json5": {
"version": "2.2.3",
"resolved": "https://registry.npmjs.org/json5/-/json5-2.2.3.tgz",
"integrity": "sha512-XmOWe7eyHYH14cLdVPoyg+GOH3rYX++KpzrylJwSW98t3Nk+U8XOl8FWKOgwtzdb8lXGf6zYwDUzeHMWfxasyg==",
"bin": {
"json5": "lib/cli.js"
},
"engines": {
"node": ">=6"
}
},
"node_modules/jsonfile": { "node_modules/jsonfile": {
"version": "6.1.0", "version": "6.1.0",
"resolved": "https://registry.npmjs.org/jsonfile/-/jsonfile-6.1.0.tgz", "resolved": "https://registry.npmjs.org/jsonfile/-/jsonfile-6.1.0.tgz",

View File

@ -18,7 +18,6 @@
"ip-matching": "^2.1.2", "ip-matching": "^2.1.2",
"ipaddr.js": "^2.0.1", "ipaddr.js": "^2.0.1",
"jimp": "^0.22.10", "jimp": "^0.22.10",
"json5": "^2.2.3",
"lodash": "^4.17.21", "lodash": "^4.17.21",
"mime-types": "^2.1.35", "mime-types": "^2.1.35",
"multer": "^1.4.5-lts.1", "multer": "^1.4.5-lts.1",

View File

@ -33,7 +33,6 @@ const DeviceDetector = require('device-detector-js');
const fetch = require('node-fetch').default; const fetch = require('node-fetch').default;
const ipaddr = require('ipaddr.js'); const ipaddr = require('ipaddr.js');
const ipMatching = require('ip-matching'); const ipMatching = require('ip-matching');
const json5 = require('json5');
// image processing related library imports // image processing related library imports
const encode = require('png-chunks-encode'); const encode = require('png-chunks-encode');
@ -851,7 +850,7 @@ app.post('/getstatus', jsonParser, async function (request, response) {
function tryParse(str) { function tryParse(str) {
try { try {
return json5.parse(str); return JSON.parse(str);
} catch { } catch {
return undefined; return undefined;
} }
@ -1088,7 +1087,7 @@ app.post('/renamecharacter', jsonParser, async function (request, response) {
const rawOldData = await charaRead(oldAvatarPath); const rawOldData = await charaRead(oldAvatarPath);
if (rawOldData === undefined) throw new Error('Failed to read character file'); if (rawOldData === undefined) throw new Error('Failed to read character file');
const oldData = getCharaCardV2(json5.parse(rawOldData)); const oldData = getCharaCardV2(JSON.parse(rawOldData));
_.set(oldData, 'data.name', newName); _.set(oldData, 'data.name', newName);
_.set(oldData, 'name', newName); _.set(oldData, 'name', newName);
const newData = JSON.stringify(oldData); const newData = JSON.stringify(oldData);
@ -1374,7 +1373,7 @@ const processCharacter = async (item, i) => {
const img_data = await charaRead(charactersPath + item); const img_data = await charaRead(charactersPath + item);
if (img_data === undefined) throw new Error('Failed to read character file'); if (img_data === undefined) throw new Error('Failed to read character file');
let jsonObject = getCharaCardV2(json5.parse(img_data)); let jsonObject = getCharaCardV2(JSON.parse(img_data));
jsonObject.avatar = item; jsonObject.avatar = item;
characters[i] = jsonObject; characters[i] = jsonObject;
characters[i]['json_data'] = img_data; characters[i]['json_data'] = img_data;
@ -1671,7 +1670,7 @@ function readAndParseFromDirectory(directoryPath, fileExtension = '.json') {
files.forEach(item => { files.forEach(item => {
try { try {
const file = fs.readFileSync(path.join(directoryPath, item), 'utf-8'); const file = fs.readFileSync(path.join(directoryPath, item), 'utf-8');
parsedFiles.push(fileExtension == '.json' ? json5.parse(file) : file); parsedFiles.push(fileExtension == '.json' ? JSON.parse(file) : file);
} }
catch { catch {
// skip // skip
@ -1698,7 +1697,7 @@ function readPresetsFromDirectory(directoryPath, options = {}) {
files.forEach(item => { files.forEach(item => {
try { try {
const file = fs.readFileSync(path.join(directoryPath, item), 'utf8'); const file = fs.readFileSync(path.join(directoryPath, item), 'utf8');
json5.parse(file); JSON.parse(file);
fileContents.push(file); fileContents.push(file);
fileNames.push(removeFileExtension ? item.replace(/\.[^/.]+$/, '') : item); fileNames.push(removeFileExtension ? item.replace(/\.[^/.]+$/, '') : item);
} catch { } catch {
@ -1893,7 +1892,7 @@ function readWorldInfoFile(worldInfoName) {
} }
const worldInfoText = fs.readFileSync(pathToWorldInfo, 'utf8'); const worldInfoText = fs.readFileSync(pathToWorldInfo, 'utf8');
const worldInfo = json5.parse(worldInfoText); const worldInfo = JSON.parse(worldInfoText);
return worldInfo; return worldInfo;
} }
@ -2006,7 +2005,7 @@ app.post('/importcharacter', urlencodedParser, async function (request, response
response.send({ error: true }); response.send({ error: true });
} }
let jsonData = json5.parse(data); let jsonData = JSON.parse(data);
if (jsonData.spec !== undefined) { if (jsonData.spec !== undefined) {
console.log('importing from v2 json'); console.log('importing from v2 json');
@ -2077,7 +2076,7 @@ app.post('/importcharacter', urlencodedParser, async function (request, response
var img_data = await charaRead(uploadPath, format); var img_data = await charaRead(uploadPath, format);
if (img_data === undefined) throw new Error('Failed to read character data'); if (img_data === undefined) throw new Error('Failed to read character data');
let jsonData = json5.parse(img_data); let jsonData = JSON.parse(img_data);
jsonData.name = sanitize(jsonData.data?.name || jsonData.name); jsonData.name = sanitize(jsonData.data?.name || jsonData.name);
png_name = getPngName(jsonData.name); png_name = getPngName(jsonData.name);
@ -2262,7 +2261,7 @@ app.post('/exportcharacter', jsonParser, async function (request, response) {
try { try {
let json = await charaRead(filename); let json = await charaRead(filename);
if (json === undefined) return response.sendStatus(400); if (json === undefined) return response.sendStatus(400);
let jsonObject = getCharaCardV2(json5.parse(json)); let jsonObject = getCharaCardV2(JSON.parse(json));
return response.type('json').send(jsonObject); return response.type('json').send(jsonObject);
} }
catch { catch {
@ -2311,7 +2310,7 @@ app.post('/importchat', urlencodedParser, function (request, response) {
const data = fs.readFileSync(path.join(UPLOADS_PATH, filedata.filename), 'utf8'); const data = fs.readFileSync(path.join(UPLOADS_PATH, filedata.filename), 'utf8');
if (format === 'json') { if (format === 'json') {
const jsonData = json5.parse(data); const jsonData = JSON.parse(data);
if (jsonData.histories !== undefined) { if (jsonData.histories !== undefined) {
//console.log('/importchat confirms JSON histories are defined'); //console.log('/importchat confirms JSON histories are defined');
const chat = { const chat = {
@ -2399,7 +2398,7 @@ app.post('/importchat', urlencodedParser, function (request, response) {
if (format === 'jsonl') { if (format === 'jsonl') {
const line = data.split('\n')[0]; const line = data.split('\n')[0];
let jsonData = json5.parse(line); let jsonData = JSON.parse(line);
if (jsonData.user_name !== undefined || jsonData.name !== undefined) { if (jsonData.user_name !== undefined || jsonData.name !== undefined) {
fs.copyFileSync(path.join(UPLOADS_PATH, filedata.filename), (`${chatsPath + avatar_url}/${ch_name} - ${humanizedISO8601DateTime()}.jsonl`)); fs.copyFileSync(path.join(UPLOADS_PATH, filedata.filename), (`${chatsPath + avatar_url}/${ch_name} - ${humanizedISO8601DateTime()}.jsonl`));
@ -2431,7 +2430,7 @@ app.post('/importworldinfo', urlencodedParser, (request, response) => {
} }
try { try {
const worldContent = json5.parse(fileContents); const worldContent = JSON.parse(fileContents);
if (!('entries' in worldContent)) { if (!('entries' in worldContent)) {
throw new Error('File must contain a world info entries list'); throw new Error('File must contain a world info entries list');
} }
@ -2597,7 +2596,7 @@ app.post('/getgroups', jsonParser, (_, response) => {
try { try {
const filePath = path.join(DIRECTORIES.groups, file); const filePath = path.join(DIRECTORIES.groups, file);
const fileContents = fs.readFileSync(filePath, 'utf8'); const fileContents = fs.readFileSync(filePath, 'utf8');
const group = json5.parse(fileContents); const group = JSON.parse(fileContents);
const groupStat = fs.statSync(filePath); const groupStat = fs.statSync(filePath);
group['date_added'] = groupStat.birthtimeMs; group['date_added'] = groupStat.birthtimeMs;
group['create_date'] = humanizedISO8601DateTime(groupStat.birthtimeMs); group['create_date'] = humanizedISO8601DateTime(groupStat.birthtimeMs);
@ -2735,7 +2734,7 @@ app.post('/deletegroup', jsonParser, async (request, response) => {
try { try {
// Delete group chats // Delete group chats
const group = json5.parse(fs.readFileSync(pathToGroup, 'utf8')); const group = JSON.parse(fs.readFileSync(pathToGroup, 'utf8'));
if (group && Array.isArray(group.chats)) { if (group && Array.isArray(group.chats)) {
for (const chat of group.chats) { for (const chat of group.chats) {