mirror of
https://github.com/SillyTavern/SillyTavern.git
synced 2025-06-05 21:59:27 +02:00
Node: Migrate to ES Modules
This commit is contained in:
@ -16,6 +16,9 @@ module.exports = {
|
|||||||
env: {
|
env: {
|
||||||
node: true,
|
node: true,
|
||||||
},
|
},
|
||||||
|
parserOptions: {
|
||||||
|
sourceType: 'module',
|
||||||
|
},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
files: ['src/**/*.mjs'],
|
files: ['src/**/*.mjs'],
|
||||||
|
@ -2,7 +2,7 @@
|
|||||||
"compilerOptions": {
|
"compilerOptions": {
|
||||||
"module": "ESNext",
|
"module": "ESNext",
|
||||||
"target": "ESNext",
|
"target": "ESNext",
|
||||||
"moduleResolution": "node",
|
"moduleResolution": "Node",
|
||||||
"strictNullChecks": true,
|
"strictNullChecks": true,
|
||||||
"strictFunctionTypes": true,
|
"strictFunctionTypes": true,
|
||||||
"checkJs": true,
|
"checkJs": true,
|
||||||
|
438
package-lock.json
generated
438
package-lock.json
generated
@ -25,8 +25,8 @@
|
|||||||
"express": "^4.21.0",
|
"express": "^4.21.0",
|
||||||
"form-data": "^4.0.0",
|
"form-data": "^4.0.0",
|
||||||
"google-translate-api-browser": "^3.0.1",
|
"google-translate-api-browser": "^3.0.1",
|
||||||
"he": "^1.2.0",
|
|
||||||
"helmet": "^7.1.0",
|
"helmet": "^7.1.0",
|
||||||
|
"html-entities": "^2.5.2",
|
||||||
"iconv-lite": "^0.6.3",
|
"iconv-lite": "^0.6.3",
|
||||||
"ip-matching": "^2.1.2",
|
"ip-matching": "^2.1.2",
|
||||||
"ipaddr.js": "^2.0.1",
|
"ipaddr.js": "^2.0.1",
|
||||||
@ -34,7 +34,7 @@
|
|||||||
"lodash": "^4.17.21",
|
"lodash": "^4.17.21",
|
||||||
"mime-types": "^2.1.35",
|
"mime-types": "^2.1.35",
|
||||||
"multer": "^1.4.5-lts.1",
|
"multer": "^1.4.5-lts.1",
|
||||||
"node-fetch": "^2.6.11",
|
"node-fetch": "^3.3.2",
|
||||||
"node-persist": "^4.0.1",
|
"node-persist": "^4.0.1",
|
||||||
"open": "^8.4.2",
|
"open": "^8.4.2",
|
||||||
"png-chunk-text": "^1.0.0",
|
"png-chunk-text": "^1.0.0",
|
||||||
@ -59,9 +59,28 @@
|
|||||||
"sillytavern": "server.js"
|
"sillytavern": "server.js"
|
||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
|
"@types/archiver": "^6.0.2",
|
||||||
|
"@types/command-exists": "^1.2.3",
|
||||||
|
"@types/compression": "^1.7.5",
|
||||||
|
"@types/cookie-parser": "^1.4.7",
|
||||||
|
"@types/cookie-session": "^2.0.49",
|
||||||
|
"@types/cors": "^2.8.17",
|
||||||
"@types/dompurify": "^3.0.5",
|
"@types/dompurify": "^3.0.5",
|
||||||
|
"@types/express": "^5.0.0",
|
||||||
"@types/jquery": "^3.5.29",
|
"@types/jquery": "^3.5.29",
|
||||||
|
"@types/lodash": "^4.17.10",
|
||||||
|
"@types/mime-types": "^2.1.4",
|
||||||
|
"@types/multer": "^1.4.12",
|
||||||
|
"@types/node-fetch": "^2.6.11",
|
||||||
|
"@types/node-persist": "^3.1.8",
|
||||||
|
"@types/png-chunk-text": "^1.0.3",
|
||||||
|
"@types/png-chunks-encode": "^1.0.2",
|
||||||
|
"@types/png-chunks-extract": "^1.0.2",
|
||||||
|
"@types/response-time": "^2.3.8",
|
||||||
"@types/toastr": "^2.1.43",
|
"@types/toastr": "^2.1.43",
|
||||||
|
"@types/write-file-atomic": "^4.0.3",
|
||||||
|
"@types/yargs": "^17.0.33",
|
||||||
|
"@types/yauzl": "^2.10.3",
|
||||||
"eslint": "^8.57.0"
|
"eslint": "^8.57.0"
|
||||||
},
|
},
|
||||||
"engines": {
|
"engines": {
|
||||||
@ -949,6 +968,27 @@
|
|||||||
"integrity": "sha512-C5Mc6rdnsaJDjO3UpGW/CQTHtCKaYlScZTly4JIu97Jxo/odCiH0ITnDXSJPTOrEKk/ycSZ0AOgTmkDtkOsvIA==",
|
"integrity": "sha512-C5Mc6rdnsaJDjO3UpGW/CQTHtCKaYlScZTly4JIu97Jxo/odCiH0ITnDXSJPTOrEKk/ycSZ0AOgTmkDtkOsvIA==",
|
||||||
"license": "MIT"
|
"license": "MIT"
|
||||||
},
|
},
|
||||||
|
"node_modules/@types/archiver": {
|
||||||
|
"version": "6.0.2",
|
||||||
|
"resolved": "https://registry.npmjs.org/@types/archiver/-/archiver-6.0.2.tgz",
|
||||||
|
"integrity": "sha512-KmROQqbQzKGuaAbmK+ZcytkJ51+YqDa7NmbXjmtC5YBLSyQYo21YaUnQ3HbaPFKL1ooo6RQ6OPYPIDyxfpDDXw==",
|
||||||
|
"dev": true,
|
||||||
|
"license": "MIT",
|
||||||
|
"dependencies": {
|
||||||
|
"@types/readdir-glob": "*"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/@types/body-parser": {
|
||||||
|
"version": "1.19.5",
|
||||||
|
"resolved": "https://registry.npmjs.org/@types/body-parser/-/body-parser-1.19.5.tgz",
|
||||||
|
"integrity": "sha512-fB3Zu92ucau0iQ0JMCFQE7b/dv8Ot07NI3KaZIkIUNXq82k4eBAqUaneXfleGY9JWskeS9y+u0nXMyspcuQrCg==",
|
||||||
|
"dev": true,
|
||||||
|
"license": "MIT",
|
||||||
|
"dependencies": {
|
||||||
|
"@types/connect": "*",
|
||||||
|
"@types/node": "*"
|
||||||
|
}
|
||||||
|
},
|
||||||
"node_modules/@types/cacheable-request": {
|
"node_modules/@types/cacheable-request": {
|
||||||
"version": "6.0.3",
|
"version": "6.0.3",
|
||||||
"resolved": "https://registry.npmjs.org/@types/cacheable-request/-/cacheable-request-6.0.3.tgz",
|
"resolved": "https://registry.npmjs.org/@types/cacheable-request/-/cacheable-request-6.0.3.tgz",
|
||||||
@ -961,6 +1001,64 @@
|
|||||||
"@types/responselike": "^1.0.0"
|
"@types/responselike": "^1.0.0"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"node_modules/@types/command-exists": {
|
||||||
|
"version": "1.2.3",
|
||||||
|
"resolved": "https://registry.npmjs.org/@types/command-exists/-/command-exists-1.2.3.tgz",
|
||||||
|
"integrity": "sha512-PpbaE2XWLaWYboXD6k70TcXO/OdOyyRFq5TVpmlUELNxdkkmXU9fkImNosmXU1DtsNrqdUgWd/nJQYXgwmtdXQ==",
|
||||||
|
"dev": true,
|
||||||
|
"license": "MIT"
|
||||||
|
},
|
||||||
|
"node_modules/@types/compression": {
|
||||||
|
"version": "1.7.5",
|
||||||
|
"resolved": "https://registry.npmjs.org/@types/compression/-/compression-1.7.5.tgz",
|
||||||
|
"integrity": "sha512-AAQvK5pxMpaT+nDvhHrsBhLSYG5yQdtkaJE1WYieSNY2mVFKAgmU4ks65rkZD5oqnGCFLyQpUr1CqI4DmUMyDg==",
|
||||||
|
"dev": true,
|
||||||
|
"license": "MIT",
|
||||||
|
"dependencies": {
|
||||||
|
"@types/express": "*"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/@types/connect": {
|
||||||
|
"version": "3.4.38",
|
||||||
|
"resolved": "https://registry.npmjs.org/@types/connect/-/connect-3.4.38.tgz",
|
||||||
|
"integrity": "sha512-K6uROf1LD88uDQqJCktA4yzL1YYAK6NgfsI0v/mTgyPKWsX1CnJ0XPSDhViejru1GcRkLWb8RlzFYJRqGUbaug==",
|
||||||
|
"dev": true,
|
||||||
|
"license": "MIT",
|
||||||
|
"dependencies": {
|
||||||
|
"@types/node": "*"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/@types/cookie-parser": {
|
||||||
|
"version": "1.4.7",
|
||||||
|
"resolved": "https://registry.npmjs.org/@types/cookie-parser/-/cookie-parser-1.4.7.tgz",
|
||||||
|
"integrity": "sha512-Fvuyi354Z+uayxzIGCwYTayFKocfV7TuDYZClCdIP9ckhvAu/ixDtCB6qx2TT0FKjPLf1f3P/J1rgf6lPs64mw==",
|
||||||
|
"dev": true,
|
||||||
|
"license": "MIT",
|
||||||
|
"dependencies": {
|
||||||
|
"@types/express": "*"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/@types/cookie-session": {
|
||||||
|
"version": "2.0.49",
|
||||||
|
"resolved": "https://registry.npmjs.org/@types/cookie-session/-/cookie-session-2.0.49.tgz",
|
||||||
|
"integrity": "sha512-4E/bBjlqLhU5l4iGPR+NkVJH593hpNsT4dC3DJDr+ODm6Qpe13kZQVkezRIb+TYDXaBMemS3yLQ+0leba3jlkQ==",
|
||||||
|
"dev": true,
|
||||||
|
"license": "MIT",
|
||||||
|
"dependencies": {
|
||||||
|
"@types/express": "*",
|
||||||
|
"@types/keygrip": "*"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/@types/cors": {
|
||||||
|
"version": "2.8.17",
|
||||||
|
"resolved": "https://registry.npmjs.org/@types/cors/-/cors-2.8.17.tgz",
|
||||||
|
"integrity": "sha512-8CGDvrBj1zgo2qE+oS3pOCyYNqCPryMWY2bGfwA0dcfopWGgxs+78df0Rs3rc9THP4JkOhLsAa+15VdpAqkcUA==",
|
||||||
|
"dev": true,
|
||||||
|
"license": "MIT",
|
||||||
|
"dependencies": {
|
||||||
|
"@types/node": "*"
|
||||||
|
}
|
||||||
|
},
|
||||||
"node_modules/@types/dompurify": {
|
"node_modules/@types/dompurify": {
|
||||||
"version": "3.0.5",
|
"version": "3.0.5",
|
||||||
"resolved": "https://registry.npmjs.org/@types/dompurify/-/dompurify-3.0.5.tgz",
|
"resolved": "https://registry.npmjs.org/@types/dompurify/-/dompurify-3.0.5.tgz",
|
||||||
@ -971,12 +1069,45 @@
|
|||||||
"@types/trusted-types": "*"
|
"@types/trusted-types": "*"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"node_modules/@types/express": {
|
||||||
|
"version": "5.0.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/@types/express/-/express-5.0.0.tgz",
|
||||||
|
"integrity": "sha512-DvZriSMehGHL1ZNLzi6MidnsDhUZM/x2pRdDIKdwbUNqqwHxMlRdkxtn6/EPKyqKpHqTl/4nRZsRNLpZxZRpPQ==",
|
||||||
|
"dev": true,
|
||||||
|
"license": "MIT",
|
||||||
|
"dependencies": {
|
||||||
|
"@types/body-parser": "*",
|
||||||
|
"@types/express-serve-static-core": "^5.0.0",
|
||||||
|
"@types/qs": "*",
|
||||||
|
"@types/serve-static": "*"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/@types/express-serve-static-core": {
|
||||||
|
"version": "5.0.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/@types/express-serve-static-core/-/express-serve-static-core-5.0.0.tgz",
|
||||||
|
"integrity": "sha512-AbXMTZGt40T+KON9/Fdxx0B2WK5hsgxcfXJLr5bFpZ7b4JCex2WyQPTEKdXqfHiY5nKKBScZ7yCoO6Pvgxfvnw==",
|
||||||
|
"dev": true,
|
||||||
|
"license": "MIT",
|
||||||
|
"dependencies": {
|
||||||
|
"@types/node": "*",
|
||||||
|
"@types/qs": "*",
|
||||||
|
"@types/range-parser": "*",
|
||||||
|
"@types/send": "*"
|
||||||
|
}
|
||||||
|
},
|
||||||
"node_modules/@types/http-cache-semantics": {
|
"node_modules/@types/http-cache-semantics": {
|
||||||
"version": "4.0.2",
|
"version": "4.0.2",
|
||||||
"resolved": "https://registry.npmjs.org/@types/http-cache-semantics/-/http-cache-semantics-4.0.2.tgz",
|
"resolved": "https://registry.npmjs.org/@types/http-cache-semantics/-/http-cache-semantics-4.0.2.tgz",
|
||||||
"integrity": "sha512-FD+nQWA2zJjh4L9+pFXqWOi0Hs1ryBCfI+985NjluQ1p8EYtoLvjLOKidXBtZ4/IcxDX4o8/E8qDS3540tNliw==",
|
"integrity": "sha512-FD+nQWA2zJjh4L9+pFXqWOi0Hs1ryBCfI+985NjluQ1p8EYtoLvjLOKidXBtZ4/IcxDX4o8/E8qDS3540tNliw==",
|
||||||
"license": "MIT"
|
"license": "MIT"
|
||||||
},
|
},
|
||||||
|
"node_modules/@types/http-errors": {
|
||||||
|
"version": "2.0.4",
|
||||||
|
"resolved": "https://registry.npmjs.org/@types/http-errors/-/http-errors-2.0.4.tgz",
|
||||||
|
"integrity": "sha512-D0CFMMtydbJAegzOyHjtiKPLlvnm3iTZyZRSZoLq2mRhDdmLfIWOCYPfQJ4cu2erKghU++QvjcUjp/5h7hESpA==",
|
||||||
|
"dev": true,
|
||||||
|
"license": "MIT"
|
||||||
|
},
|
||||||
"node_modules/@types/jquery": {
|
"node_modules/@types/jquery": {
|
||||||
"version": "3.5.31",
|
"version": "3.5.31",
|
||||||
"resolved": "https://registry.npmjs.org/@types/jquery/-/jquery-3.5.31.tgz",
|
"resolved": "https://registry.npmjs.org/@types/jquery/-/jquery-3.5.31.tgz",
|
||||||
@ -987,6 +1118,13 @@
|
|||||||
"@types/sizzle": "*"
|
"@types/sizzle": "*"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"node_modules/@types/keygrip": {
|
||||||
|
"version": "1.0.6",
|
||||||
|
"resolved": "https://registry.npmjs.org/@types/keygrip/-/keygrip-1.0.6.tgz",
|
||||||
|
"integrity": "sha512-lZuNAY9xeJt7Bx4t4dx0rYCDqGPW8RXhQZK1td7d4H6E9zYbLoOtjBvfwdTKpsyxQI/2jv+armjX/RW+ZNpXOQ==",
|
||||||
|
"dev": true,
|
||||||
|
"license": "MIT"
|
||||||
|
},
|
||||||
"node_modules/@types/keyv": {
|
"node_modules/@types/keyv": {
|
||||||
"version": "3.1.4",
|
"version": "3.1.4",
|
||||||
"resolved": "https://registry.npmjs.org/@types/keyv/-/keyv-3.1.4.tgz",
|
"resolved": "https://registry.npmjs.org/@types/keyv/-/keyv-3.1.4.tgz",
|
||||||
@ -996,12 +1134,43 @@
|
|||||||
"@types/node": "*"
|
"@types/node": "*"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"node_modules/@types/lodash": {
|
||||||
|
"version": "4.17.10",
|
||||||
|
"resolved": "https://registry.npmjs.org/@types/lodash/-/lodash-4.17.10.tgz",
|
||||||
|
"integrity": "sha512-YpS0zzoduEhuOWjAotS6A5AVCva7X4lVlYLF0FYHAY9sdraBfnatttHItlWeZdGhuEkf+OzMNg2ZYAx8t+52uQ==",
|
||||||
|
"dev": true,
|
||||||
|
"license": "MIT"
|
||||||
|
},
|
||||||
"node_modules/@types/long": {
|
"node_modules/@types/long": {
|
||||||
"version": "4.0.2",
|
"version": "4.0.2",
|
||||||
"resolved": "https://registry.npmjs.org/@types/long/-/long-4.0.2.tgz",
|
"resolved": "https://registry.npmjs.org/@types/long/-/long-4.0.2.tgz",
|
||||||
"integrity": "sha512-MqTGEo5bj5t157U6fA/BiDynNkn0YknVdh48CMPkTSpFTVmvao5UQmm7uEF6xBEo7qIMAlY/JSleYaE6VOdpaA==",
|
"integrity": "sha512-MqTGEo5bj5t157U6fA/BiDynNkn0YknVdh48CMPkTSpFTVmvao5UQmm7uEF6xBEo7qIMAlY/JSleYaE6VOdpaA==",
|
||||||
"license": "MIT"
|
"license": "MIT"
|
||||||
},
|
},
|
||||||
|
"node_modules/@types/mime": {
|
||||||
|
"version": "1.3.5",
|
||||||
|
"resolved": "https://registry.npmjs.org/@types/mime/-/mime-1.3.5.tgz",
|
||||||
|
"integrity": "sha512-/pyBZWSLD2n0dcHE3hq8s8ZvcETHtEuF+3E7XVt0Ig2nvsVQXdghHVcEkIWjy9A0wKfTn97a/PSDYohKIlnP/w==",
|
||||||
|
"dev": true,
|
||||||
|
"license": "MIT"
|
||||||
|
},
|
||||||
|
"node_modules/@types/mime-types": {
|
||||||
|
"version": "2.1.4",
|
||||||
|
"resolved": "https://registry.npmjs.org/@types/mime-types/-/mime-types-2.1.4.tgz",
|
||||||
|
"integrity": "sha512-lfU4b34HOri+kAY5UheuFMWPDOI+OPceBSHZKp69gEyTL/mmJ4cnU6Y/rlme3UL3GyOn6Y42hyIEw0/q8sWx5w==",
|
||||||
|
"dev": true,
|
||||||
|
"license": "MIT"
|
||||||
|
},
|
||||||
|
"node_modules/@types/multer": {
|
||||||
|
"version": "1.4.12",
|
||||||
|
"resolved": "https://registry.npmjs.org/@types/multer/-/multer-1.4.12.tgz",
|
||||||
|
"integrity": "sha512-pQ2hoqvXiJt2FP9WQVLPRO+AmiIm/ZYkavPlIQnx282u4ZrVdztx0pkh3jjpQt0Kz+YI0YhSG264y08UJKoUQg==",
|
||||||
|
"dev": true,
|
||||||
|
"license": "MIT",
|
||||||
|
"dependencies": {
|
||||||
|
"@types/express": "*"
|
||||||
|
}
|
||||||
|
},
|
||||||
"node_modules/@types/node": {
|
"node_modules/@types/node": {
|
||||||
"version": "16.9.1",
|
"version": "16.9.1",
|
||||||
"resolved": "https://registry.npmjs.org/@types/node/-/node-16.9.1.tgz",
|
"resolved": "https://registry.npmjs.org/@types/node/-/node-16.9.1.tgz",
|
||||||
@ -1009,15 +1178,81 @@
|
|||||||
"license": "MIT"
|
"license": "MIT"
|
||||||
},
|
},
|
||||||
"node_modules/@types/node-fetch": {
|
"node_modules/@types/node-fetch": {
|
||||||
"version": "2.6.9",
|
"version": "2.6.11",
|
||||||
"resolved": "https://registry.npmjs.org/@types/node-fetch/-/node-fetch-2.6.9.tgz",
|
"resolved": "https://registry.npmjs.org/@types/node-fetch/-/node-fetch-2.6.11.tgz",
|
||||||
"integrity": "sha512-bQVlnMLFJ2d35DkPNjEPmd9ueO/rh5EiaZt2bhqiSarPjZIuIV6bPQVqcrEyvNo+AfTrRGVazle1tl597w3gfA==",
|
"integrity": "sha512-24xFj9R5+rfQJLRyM56qh+wnVSYhyXC2tkoBndtY0U+vubqNsYXGjufB2nn8Q6gt0LrARwL6UBtMCSVCwl4B1g==",
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@types/node": "*",
|
"@types/node": "*",
|
||||||
"form-data": "^4.0.0"
|
"form-data": "^4.0.0"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"node_modules/@types/node-persist": {
|
||||||
|
"version": "3.1.8",
|
||||||
|
"resolved": "https://registry.npmjs.org/@types/node-persist/-/node-persist-3.1.8.tgz",
|
||||||
|
"integrity": "sha512-QLidg6/SadZYPrTKxtxL1A85XBoQlG40bhoMdhu6DH6+eNCMr2j+RGfFZ9I9+IY8W/PDwQonJ+iBWD62jZjMfg==",
|
||||||
|
"dev": true,
|
||||||
|
"license": "MIT",
|
||||||
|
"dependencies": {
|
||||||
|
"@types/node": "*"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/@types/png-chunk-text": {
|
||||||
|
"version": "1.0.3",
|
||||||
|
"resolved": "https://registry.npmjs.org/@types/png-chunk-text/-/png-chunk-text-1.0.3.tgz",
|
||||||
|
"integrity": "sha512-7keEFz73uNJ9Ar1XMCNnHEXT9pICJnouMQCCYgBEmHMgdkXaQzSTmSvr6tUDSqgdEgmlRAxZd97wprgliyZoCg==",
|
||||||
|
"dev": true,
|
||||||
|
"license": "MIT"
|
||||||
|
},
|
||||||
|
"node_modules/@types/png-chunks-encode": {
|
||||||
|
"version": "1.0.2",
|
||||||
|
"resolved": "https://registry.npmjs.org/@types/png-chunks-encode/-/png-chunks-encode-1.0.2.tgz",
|
||||||
|
"integrity": "sha512-Dxn0aXEcSg1wVeHjvNlygm/+fKBDzWMCdxJYhjGUTeefFW/jYxWcrg+W7ppLBfH44iJMqeVBHtHBwtYQUeYvgw==",
|
||||||
|
"dev": true,
|
||||||
|
"license": "MIT"
|
||||||
|
},
|
||||||
|
"node_modules/@types/png-chunks-extract": {
|
||||||
|
"version": "1.0.2",
|
||||||
|
"resolved": "https://registry.npmjs.org/@types/png-chunks-extract/-/png-chunks-extract-1.0.2.tgz",
|
||||||
|
"integrity": "sha512-z6djfFIbrrddtunoMJBOPlyZrnmeuG1kkvHUNi2QfpOb+JMMLuLliHHTmMyRi7k7LiTAut0HbdGCF6ibDtQAHQ==",
|
||||||
|
"dev": true,
|
||||||
|
"license": "MIT"
|
||||||
|
},
|
||||||
|
"node_modules/@types/qs": {
|
||||||
|
"version": "6.9.16",
|
||||||
|
"resolved": "https://registry.npmjs.org/@types/qs/-/qs-6.9.16.tgz",
|
||||||
|
"integrity": "sha512-7i+zxXdPD0T4cKDuxCUXJ4wHcsJLwENa6Z3dCu8cfCK743OGy5Nu1RmAGqDPsoTDINVEcdXKRvR/zre+P2Ku1A==",
|
||||||
|
"dev": true,
|
||||||
|
"license": "MIT"
|
||||||
|
},
|
||||||
|
"node_modules/@types/range-parser": {
|
||||||
|
"version": "1.2.7",
|
||||||
|
"resolved": "https://registry.npmjs.org/@types/range-parser/-/range-parser-1.2.7.tgz",
|
||||||
|
"integrity": "sha512-hKormJbkJqzQGhziax5PItDUTMAM9uE2XXQmM37dyd4hVM+5aVl7oVxMVUiVQn2oCQFN/LKCZdvSM0pFRqbSmQ==",
|
||||||
|
"dev": true,
|
||||||
|
"license": "MIT"
|
||||||
|
},
|
||||||
|
"node_modules/@types/readdir-glob": {
|
||||||
|
"version": "1.1.5",
|
||||||
|
"resolved": "https://registry.npmjs.org/@types/readdir-glob/-/readdir-glob-1.1.5.tgz",
|
||||||
|
"integrity": "sha512-raiuEPUYqXu+nvtY2Pe8s8FEmZ3x5yAH4VkLdihcPdalvsHltomrRC9BzuStrJ9yk06470hS0Crw0f1pXqD+Hg==",
|
||||||
|
"dev": true,
|
||||||
|
"license": "MIT",
|
||||||
|
"dependencies": {
|
||||||
|
"@types/node": "*"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/@types/response-time": {
|
||||||
|
"version": "2.3.8",
|
||||||
|
"resolved": "https://registry.npmjs.org/@types/response-time/-/response-time-2.3.8.tgz",
|
||||||
|
"integrity": "sha512-7qGaNYvdxc0zRab8oHpYx7AW17qj+G0xuag1eCrw3M2VWPJQ/HyKaaghWygiaOUl0y9x7QGQwppDpqLJ5V9pzw==",
|
||||||
|
"dev": true,
|
||||||
|
"license": "MIT",
|
||||||
|
"dependencies": {
|
||||||
|
"@types/express": "*",
|
||||||
|
"@types/node": "*"
|
||||||
|
}
|
||||||
|
},
|
||||||
"node_modules/@types/responselike": {
|
"node_modules/@types/responselike": {
|
||||||
"version": "1.0.1",
|
"version": "1.0.1",
|
||||||
"resolved": "https://registry.npmjs.org/@types/responselike/-/responselike-1.0.1.tgz",
|
"resolved": "https://registry.npmjs.org/@types/responselike/-/responselike-1.0.1.tgz",
|
||||||
@ -1027,6 +1262,29 @@
|
|||||||
"@types/node": "*"
|
"@types/node": "*"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"node_modules/@types/send": {
|
||||||
|
"version": "0.17.4",
|
||||||
|
"resolved": "https://registry.npmjs.org/@types/send/-/send-0.17.4.tgz",
|
||||||
|
"integrity": "sha512-x2EM6TJOybec7c52BX0ZspPodMsQUd5L6PRwOunVyVUhXiBSKf3AezDL8Dgvgt5o0UfKNfuA0eMLr2wLT4AiBA==",
|
||||||
|
"dev": true,
|
||||||
|
"license": "MIT",
|
||||||
|
"dependencies": {
|
||||||
|
"@types/mime": "^1",
|
||||||
|
"@types/node": "*"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/@types/serve-static": {
|
||||||
|
"version": "1.15.7",
|
||||||
|
"resolved": "https://registry.npmjs.org/@types/serve-static/-/serve-static-1.15.7.tgz",
|
||||||
|
"integrity": "sha512-W8Ym+h8nhuRwaKPaDw34QUkwsGi6Rc4yYqvKFo5rm2FUEhCFbzVWrxXUxuKK8TASjWsysJY0nsmNCGhCOIsrOw==",
|
||||||
|
"dev": true,
|
||||||
|
"license": "MIT",
|
||||||
|
"dependencies": {
|
||||||
|
"@types/http-errors": "*",
|
||||||
|
"@types/node": "*",
|
||||||
|
"@types/send": "*"
|
||||||
|
}
|
||||||
|
},
|
||||||
"node_modules/@types/sizzle": {
|
"node_modules/@types/sizzle": {
|
||||||
"version": "2.3.8",
|
"version": "2.3.8",
|
||||||
"resolved": "https://registry.npmjs.org/@types/sizzle/-/sizzle-2.3.8.tgz",
|
"resolved": "https://registry.npmjs.org/@types/sizzle/-/sizzle-2.3.8.tgz",
|
||||||
@ -1050,6 +1308,43 @@
|
|||||||
"dev": true,
|
"dev": true,
|
||||||
"license": "MIT"
|
"license": "MIT"
|
||||||
},
|
},
|
||||||
|
"node_modules/@types/write-file-atomic": {
|
||||||
|
"version": "4.0.3",
|
||||||
|
"resolved": "https://registry.npmjs.org/@types/write-file-atomic/-/write-file-atomic-4.0.3.tgz",
|
||||||
|
"integrity": "sha512-qdo+vZRchyJIHNeuI1nrpsLw+hnkgqP/8mlaN6Wle/NKhydHmUN9l4p3ZE8yP90AJNJW4uB8HQhedb4f1vNayQ==",
|
||||||
|
"dev": true,
|
||||||
|
"license": "MIT",
|
||||||
|
"dependencies": {
|
||||||
|
"@types/node": "*"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/@types/yargs": {
|
||||||
|
"version": "17.0.33",
|
||||||
|
"resolved": "https://registry.npmjs.org/@types/yargs/-/yargs-17.0.33.tgz",
|
||||||
|
"integrity": "sha512-WpxBCKWPLr4xSsHgz511rFJAM+wS28w2zEO1QDNY5zM/S8ok70NNfztH0xwhqKyaK0OHCbN98LDAZuy1ctxDkA==",
|
||||||
|
"dev": true,
|
||||||
|
"license": "MIT",
|
||||||
|
"dependencies": {
|
||||||
|
"@types/yargs-parser": "*"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/@types/yargs-parser": {
|
||||||
|
"version": "21.0.3",
|
||||||
|
"resolved": "https://registry.npmjs.org/@types/yargs-parser/-/yargs-parser-21.0.3.tgz",
|
||||||
|
"integrity": "sha512-I4q9QU9MQv4oEOz4tAHJtNz1cwuLxn2F3xcc2iV5WdqLPpUnj30aUuxt1mAxYTG+oe8CZMV/+6rU4S4gRDzqtQ==",
|
||||||
|
"dev": true,
|
||||||
|
"license": "MIT"
|
||||||
|
},
|
||||||
|
"node_modules/@types/yauzl": {
|
||||||
|
"version": "2.10.3",
|
||||||
|
"resolved": "https://registry.npmjs.org/@types/yauzl/-/yauzl-2.10.3.tgz",
|
||||||
|
"integrity": "sha512-oJoftv0LSuaDZE3Le4DbKX+KS9G36NzOeSap90UIK0yMA/NhKJhqlSGtNDORNRaIbQfzjXDrQa0ytJ6mNRGz/Q==",
|
||||||
|
"dev": true,
|
||||||
|
"license": "MIT",
|
||||||
|
"dependencies": {
|
||||||
|
"@types/node": "*"
|
||||||
|
}
|
||||||
|
},
|
||||||
"node_modules/@ungap/structured-clone": {
|
"node_modules/@ungap/structured-clone": {
|
||||||
"version": "1.2.0",
|
"version": "1.2.0",
|
||||||
"resolved": "https://registry.npmjs.org/@ungap/structured-clone/-/structured-clone-1.2.0.tgz",
|
"resolved": "https://registry.npmjs.org/@ungap/structured-clone/-/structured-clone-1.2.0.tgz",
|
||||||
@ -3009,6 +3304,29 @@
|
|||||||
"pend": "~1.2.0"
|
"pend": "~1.2.0"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"node_modules/fetch-blob": {
|
||||||
|
"version": "3.2.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/fetch-blob/-/fetch-blob-3.2.0.tgz",
|
||||||
|
"integrity": "sha512-7yAQpD2UMJzLi1Dqv7qFYnPbaPx7ZfFK6PiIxQ4PfkGPyNyl2Ugx+a/umUonmKqjhM4DnfbMvdX6otXq83soQQ==",
|
||||||
|
"funding": [
|
||||||
|
{
|
||||||
|
"type": "github",
|
||||||
|
"url": "https://github.com/sponsors/jimmywarting"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "paypal",
|
||||||
|
"url": "https://paypal.me/jimmywarting"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"license": "MIT",
|
||||||
|
"dependencies": {
|
||||||
|
"node-domexception": "^1.0.0",
|
||||||
|
"web-streams-polyfill": "^3.0.3"
|
||||||
|
},
|
||||||
|
"engines": {
|
||||||
|
"node": "^12.20 || >= 14.13"
|
||||||
|
}
|
||||||
|
},
|
||||||
"node_modules/file-entry-cache": {
|
"node_modules/file-entry-cache": {
|
||||||
"version": "6.0.1",
|
"version": "6.0.1",
|
||||||
"resolved": "https://registry.npmjs.org/file-entry-cache/-/file-entry-cache-6.0.1.tgz",
|
"resolved": "https://registry.npmjs.org/file-entry-cache/-/file-entry-cache-6.0.1.tgz",
|
||||||
@ -3178,6 +3496,18 @@
|
|||||||
"node": ">= 14"
|
"node": ">= 14"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"node_modules/formdata-polyfill": {
|
||||||
|
"version": "4.0.10",
|
||||||
|
"resolved": "https://registry.npmjs.org/formdata-polyfill/-/formdata-polyfill-4.0.10.tgz",
|
||||||
|
"integrity": "sha512-buewHzMvYL29jdeQTVILecSaZKnt/RJWjoZCF5OW60Z67/GmSLBkOFM7qh1PI3zFNtJbaZL5eQu1vLfazOwj4g==",
|
||||||
|
"license": "MIT",
|
||||||
|
"dependencies": {
|
||||||
|
"fetch-blob": "^3.1.2"
|
||||||
|
},
|
||||||
|
"engines": {
|
||||||
|
"node": ">=12.20.0"
|
||||||
|
}
|
||||||
|
},
|
||||||
"node_modules/forwarded": {
|
"node_modules/forwarded": {
|
||||||
"version": "0.2.0",
|
"version": "0.2.0",
|
||||||
"resolved": "https://registry.npmjs.org/forwarded/-/forwarded-0.2.0.tgz",
|
"resolved": "https://registry.npmjs.org/forwarded/-/forwarded-0.2.0.tgz",
|
||||||
@ -3489,14 +3819,6 @@
|
|||||||
"node": ">= 0.4"
|
"node": ">= 0.4"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"node_modules/he": {
|
|
||||||
"version": "1.2.0",
|
|
||||||
"resolved": "https://registry.npmjs.org/he/-/he-1.2.0.tgz",
|
|
||||||
"integrity": "sha512-F/1DnUGPopORZi0ni+CvrCgHQ5FyEAHRLSApuYWMmrbSwoN2Mn/7k+Gl38gJnR7yyDZk6WLXwiGod1JOWNDKGw==",
|
|
||||||
"bin": {
|
|
||||||
"he": "bin/he"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"node_modules/helmet": {
|
"node_modules/helmet": {
|
||||||
"version": "7.1.0",
|
"version": "7.1.0",
|
||||||
"resolved": "https://registry.npmjs.org/helmet/-/helmet-7.1.0.tgz",
|
"resolved": "https://registry.npmjs.org/helmet/-/helmet-7.1.0.tgz",
|
||||||
@ -3505,6 +3827,22 @@
|
|||||||
"node": ">=16.0.0"
|
"node": ">=16.0.0"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"node_modules/html-entities": {
|
||||||
|
"version": "2.5.2",
|
||||||
|
"resolved": "https://registry.npmjs.org/html-entities/-/html-entities-2.5.2.tgz",
|
||||||
|
"integrity": "sha512-K//PSRMQk4FZ78Kyau+mZurHn3FH0Vwr+H36eE0rPbeYkRRi9YxceYPhuN60UwWorxyKHhqoAJl2OFKa4BVtaA==",
|
||||||
|
"funding": [
|
||||||
|
{
|
||||||
|
"type": "github",
|
||||||
|
"url": "https://github.com/sponsors/mdevils"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "patreon",
|
||||||
|
"url": "https://patreon.com/mdevils"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"license": "MIT"
|
||||||
|
},
|
||||||
"node_modules/htmlparser2": {
|
"node_modules/htmlparser2": {
|
||||||
"version": "8.0.2",
|
"version": "8.0.2",
|
||||||
"resolved": "https://registry.npmjs.org/htmlparser2/-/htmlparser2-8.0.2.tgz",
|
"resolved": "https://registry.npmjs.org/htmlparser2/-/htmlparser2-8.0.2.tgz",
|
||||||
@ -3876,6 +4214,26 @@
|
|||||||
"whatwg-fetch": "^3.4.1"
|
"whatwg-fetch": "^3.4.1"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"node_modules/isomorphic-fetch/node_modules/node-fetch": {
|
||||||
|
"version": "2.7.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.7.0.tgz",
|
||||||
|
"integrity": "sha512-c4FRfUm/dbcWZ7U+1Wq0AwCyFL+3nt2bEw05wfxSz+DWpWsitgmSgYmy2dQdWyKC1694ELPqMs/YzUSNozLt8A==",
|
||||||
|
"license": "MIT",
|
||||||
|
"dependencies": {
|
||||||
|
"whatwg-url": "^5.0.0"
|
||||||
|
},
|
||||||
|
"engines": {
|
||||||
|
"node": "4.x || >=6.0.0"
|
||||||
|
},
|
||||||
|
"peerDependencies": {
|
||||||
|
"encoding": "^0.1.0"
|
||||||
|
},
|
||||||
|
"peerDependenciesMeta": {
|
||||||
|
"encoding": {
|
||||||
|
"optional": true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
"node_modules/jackspeak": {
|
"node_modules/jackspeak": {
|
||||||
"version": "2.3.6",
|
"version": "2.3.6",
|
||||||
"resolved": "https://registry.npmjs.org/jackspeak/-/jackspeak-2.3.6.tgz",
|
"resolved": "https://registry.npmjs.org/jackspeak/-/jackspeak-2.3.6.tgz",
|
||||||
@ -4355,23 +4713,30 @@
|
|||||||
}
|
}
|
||||||
},
|
},
|
||||||
"node_modules/node-fetch": {
|
"node_modules/node-fetch": {
|
||||||
"version": "2.6.12",
|
"version": "3.3.2",
|
||||||
"resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.6.12.tgz",
|
"resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-3.3.2.tgz",
|
||||||
"integrity": "sha512-C/fGU2E8ToujUivIO0H+tpQ6HWo4eEmchoPIoXtxCrVghxdKq+QOHqEZW7tuP3KlV3bC8FRMO5nMCC7Zm1VP6g==",
|
"integrity": "sha512-dRB78srN/l6gqWulah9SrxeYnxeddIG30+GOqK/9OlLVyLg3HPnr6SqOWTWOXKRwC2eGYCkZ59NNuSgvSrpgOA==",
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"whatwg-url": "^5.0.0"
|
"data-uri-to-buffer": "^4.0.0",
|
||||||
|
"fetch-blob": "^3.1.4",
|
||||||
|
"formdata-polyfill": "^4.0.10"
|
||||||
},
|
},
|
||||||
"engines": {
|
"engines": {
|
||||||
"node": "4.x || >=6.0.0"
|
"node": "^12.20.0 || ^14.13.1 || >=16.0.0"
|
||||||
},
|
},
|
||||||
"peerDependencies": {
|
"funding": {
|
||||||
"encoding": "^0.1.0"
|
"type": "opencollective",
|
||||||
},
|
"url": "https://opencollective.com/node-fetch"
|
||||||
"peerDependenciesMeta": {
|
}
|
||||||
"encoding": {
|
},
|
||||||
"optional": true
|
"node_modules/node-fetch/node_modules/data-uri-to-buffer": {
|
||||||
}
|
"version": "4.0.1",
|
||||||
|
"resolved": "https://registry.npmjs.org/data-uri-to-buffer/-/data-uri-to-buffer-4.0.1.tgz",
|
||||||
|
"integrity": "sha512-0R9ikRb668HB7QDxT1vkpuUBtqc53YyAwMwGeUFKRojY/NWKvdZ+9UYtRfGmhqNbRkTSVpMbmyhXipFFv2cb/A==",
|
||||||
|
"license": "MIT",
|
||||||
|
"engines": {
|
||||||
|
"node": ">= 12"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"node_modules/node-persist": {
|
"node_modules/node-persist": {
|
||||||
@ -4546,6 +4911,26 @@
|
|||||||
"undici-types": "~5.26.4"
|
"undici-types": "~5.26.4"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"node_modules/openai/node_modules/node-fetch": {
|
||||||
|
"version": "2.7.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.7.0.tgz",
|
||||||
|
"integrity": "sha512-c4FRfUm/dbcWZ7U+1Wq0AwCyFL+3nt2bEw05wfxSz+DWpWsitgmSgYmy2dQdWyKC1694ELPqMs/YzUSNozLt8A==",
|
||||||
|
"license": "MIT",
|
||||||
|
"dependencies": {
|
||||||
|
"whatwg-url": "^5.0.0"
|
||||||
|
},
|
||||||
|
"engines": {
|
||||||
|
"node": "4.x || >=6.0.0"
|
||||||
|
},
|
||||||
|
"peerDependencies": {
|
||||||
|
"encoding": "^0.1.0"
|
||||||
|
},
|
||||||
|
"peerDependenciesMeta": {
|
||||||
|
"encoding": {
|
||||||
|
"optional": true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
"node_modules/optionator": {
|
"node_modules/optionator": {
|
||||||
"version": "0.9.3",
|
"version": "0.9.3",
|
||||||
"resolved": "https://registry.npmjs.org/optionator/-/optionator-0.9.3.tgz",
|
"resolved": "https://registry.npmjs.org/optionator/-/optionator-0.9.3.tgz",
|
||||||
@ -5808,6 +6193,7 @@
|
|||||||
"version": "5.0.0",
|
"version": "5.0.0",
|
||||||
"resolved": "https://registry.npmjs.org/tr46/-/tr46-5.0.0.tgz",
|
"resolved": "https://registry.npmjs.org/tr46/-/tr46-5.0.0.tgz",
|
||||||
"integrity": "sha512-tk2G5R2KRwBd+ZN0zaEXpmzdKyOYksXwywulIX95MBODjSzMIuQnQ3m8JxgbhnL1LeVo7lqQKsYa1O3Htl7K5g==",
|
"integrity": "sha512-tk2G5R2KRwBd+ZN0zaEXpmzdKyOYksXwywulIX95MBODjSzMIuQnQ3m8JxgbhnL1LeVo7lqQKsYa1O3Htl7K5g==",
|
||||||
|
"license": "MIT",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"punycode": "^2.3.1"
|
"punycode": "^2.3.1"
|
||||||
},
|
},
|
||||||
@ -6012,6 +6398,7 @@
|
|||||||
"version": "7.0.0",
|
"version": "7.0.0",
|
||||||
"resolved": "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-7.0.0.tgz",
|
"resolved": "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-7.0.0.tgz",
|
||||||
"integrity": "sha512-VwddBukDzu71offAQR975unBIGqfKZpM+8ZX6ySk8nYhVoo5CYaZyzt3YBvYtRtO+aoGlqxPg/B87NGVZ/fu6g==",
|
"integrity": "sha512-VwddBukDzu71offAQR975unBIGqfKZpM+8ZX6ySk8nYhVoo5CYaZyzt3YBvYtRtO+aoGlqxPg/B87NGVZ/fu6g==",
|
||||||
|
"license": "BSD-2-Clause",
|
||||||
"engines": {
|
"engines": {
|
||||||
"node": ">=12"
|
"node": ">=12"
|
||||||
}
|
}
|
||||||
@ -6026,6 +6413,7 @@
|
|||||||
"version": "14.0.0",
|
"version": "14.0.0",
|
||||||
"resolved": "https://registry.npmjs.org/whatwg-url/-/whatwg-url-14.0.0.tgz",
|
"resolved": "https://registry.npmjs.org/whatwg-url/-/whatwg-url-14.0.0.tgz",
|
||||||
"integrity": "sha512-1lfMEm2IEr7RIV+f4lUNPOqfFL+pO+Xw3fJSqmjX9AbXcXcYOkCe1P6+9VBZB6n94af16NfZf+sSk0JCBZC9aw==",
|
"integrity": "sha512-1lfMEm2IEr7RIV+f4lUNPOqfFL+pO+Xw3fJSqmjX9AbXcXcYOkCe1P6+9VBZB6n94af16NfZf+sSk0JCBZC9aw==",
|
||||||
|
"license": "MIT",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"tr46": "^5.0.0",
|
"tr46": "^5.0.0",
|
||||||
"webidl-conversions": "^7.0.0"
|
"webidl-conversions": "^7.0.0"
|
||||||
|
25
package.json
25
package.json
@ -15,8 +15,8 @@
|
|||||||
"express": "^4.21.0",
|
"express": "^4.21.0",
|
||||||
"form-data": "^4.0.0",
|
"form-data": "^4.0.0",
|
||||||
"google-translate-api-browser": "^3.0.1",
|
"google-translate-api-browser": "^3.0.1",
|
||||||
"he": "^1.2.0",
|
|
||||||
"helmet": "^7.1.0",
|
"helmet": "^7.1.0",
|
||||||
|
"html-entities": "^2.5.2",
|
||||||
"iconv-lite": "^0.6.3",
|
"iconv-lite": "^0.6.3",
|
||||||
"ip-matching": "^2.1.2",
|
"ip-matching": "^2.1.2",
|
||||||
"ipaddr.js": "^2.0.1",
|
"ipaddr.js": "^2.0.1",
|
||||||
@ -24,7 +24,7 @@
|
|||||||
"lodash": "^4.17.21",
|
"lodash": "^4.17.21",
|
||||||
"mime-types": "^2.1.35",
|
"mime-types": "^2.1.35",
|
||||||
"multer": "^1.4.5-lts.1",
|
"multer": "^1.4.5-lts.1",
|
||||||
"node-fetch": "^2.6.11",
|
"node-fetch": "^3.3.2",
|
||||||
"node-persist": "^4.0.1",
|
"node-persist": "^4.0.1",
|
||||||
"open": "^8.4.2",
|
"open": "^8.4.2",
|
||||||
"png-chunk-text": "^1.0.0",
|
"png-chunk-text": "^1.0.0",
|
||||||
@ -60,7 +60,7 @@
|
|||||||
}
|
}
|
||||||
},
|
},
|
||||||
"name": "sillytavern",
|
"name": "sillytavern",
|
||||||
"type": "commonjs",
|
"type": "module",
|
||||||
"license": "AGPL-3.0",
|
"license": "AGPL-3.0",
|
||||||
"repository": {
|
"repository": {
|
||||||
"type": "git",
|
"type": "git",
|
||||||
@ -85,9 +85,28 @@
|
|||||||
},
|
},
|
||||||
"main": "server.js",
|
"main": "server.js",
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
|
"@types/archiver": "^6.0.2",
|
||||||
|
"@types/command-exists": "^1.2.3",
|
||||||
|
"@types/compression": "^1.7.5",
|
||||||
|
"@types/cookie-parser": "^1.4.7",
|
||||||
|
"@types/cookie-session": "^2.0.49",
|
||||||
|
"@types/cors": "^2.8.17",
|
||||||
"@types/dompurify": "^3.0.5",
|
"@types/dompurify": "^3.0.5",
|
||||||
|
"@types/express": "^5.0.0",
|
||||||
"@types/jquery": "^3.5.29",
|
"@types/jquery": "^3.5.29",
|
||||||
|
"@types/lodash": "^4.17.10",
|
||||||
|
"@types/mime-types": "^2.1.4",
|
||||||
|
"@types/multer": "^1.4.12",
|
||||||
|
"@types/node-fetch": "^2.6.11",
|
||||||
|
"@types/node-persist": "^3.1.8",
|
||||||
|
"@types/png-chunk-text": "^1.0.3",
|
||||||
|
"@types/png-chunks-encode": "^1.0.2",
|
||||||
|
"@types/png-chunks-extract": "^1.0.2",
|
||||||
|
"@types/response-time": "^2.3.8",
|
||||||
"@types/toastr": "^2.1.43",
|
"@types/toastr": "^2.1.43",
|
||||||
|
"@types/write-file-atomic": "^4.0.3",
|
||||||
|
"@types/yargs": "^17.0.33",
|
||||||
|
"@types/yauzl": "^2.10.3",
|
||||||
"eslint": "^8.57.0"
|
"eslint": "^8.57.0"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -3,11 +3,12 @@
|
|||||||
// 1. node plugins.js update
|
// 1. node plugins.js update
|
||||||
// 2. node plugins.js install <plugin-git-url>
|
// 2. node plugins.js install <plugin-git-url>
|
||||||
// More operations coming soon.
|
// More operations coming soon.
|
||||||
const { default: git } = require('simple-git');
|
import { default as git } from 'simple-git';
|
||||||
const fs = require('fs');
|
import * as fs from 'node:fs';
|
||||||
const path = require('path');
|
import * as path from 'node:path';
|
||||||
const { color } = require('./src/util');
|
import { color } from './src/util.js';
|
||||||
|
|
||||||
|
const __dirname = import.meta.dirname;
|
||||||
process.chdir(__dirname);
|
process.chdir(__dirname);
|
||||||
const pluginsPath = './plugins';
|
const pluginsPath = './plugins';
|
||||||
|
|
||||||
|
@ -1,11 +1,12 @@
|
|||||||
/**
|
/**
|
||||||
* Scripts to be done before starting the server for the first time.
|
* Scripts to be done before starting the server for the first time.
|
||||||
*/
|
*/
|
||||||
const fs = require('fs');
|
import * as fs from 'node:fs';
|
||||||
const path = require('path');
|
import * as path from 'node:path';
|
||||||
const crypto = require('crypto');
|
import * as crypto from 'node:crypto';
|
||||||
const yaml = require('yaml');
|
import * as yaml from 'yaml';
|
||||||
const _ = require('lodash');
|
import _ from 'lodash';
|
||||||
|
import { createRequire } from 'node:module';
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Colorizes console output.
|
* Colorizes console output.
|
||||||
@ -59,6 +60,7 @@ function convertConfig() {
|
|||||||
|
|
||||||
try {
|
try {
|
||||||
console.log(color.blue('Converting config.conf to config.yaml. Your old config.conf will be renamed to config.conf.bak'));
|
console.log(color.blue('Converting config.conf to config.yaml. Your old config.conf will be renamed to config.conf.bak'));
|
||||||
|
const require = createRequire(import.meta.url);
|
||||||
const config = require(path.join(process.cwd(), './config.conf'));
|
const config = require(path.join(process.cwd(), './config.conf'));
|
||||||
fs.copyFileSync('./config.conf', './config.conf.bak');
|
fs.copyFileSync('./config.conf', './config.conf.bak');
|
||||||
fs.rmSync('./config.conf');
|
fs.rmSync('./config.conf');
|
||||||
@ -75,7 +77,7 @@ function convertConfig() {
|
|||||||
* Compares the current config.yaml with the default config.yaml and adds any missing values.
|
* Compares the current config.yaml with the default config.yaml and adds any missing values.
|
||||||
*/
|
*/
|
||||||
function addMissingConfigValues() {
|
function addMissingConfigValues() {
|
||||||
try {
|
try {
|
||||||
const defaultConfig = yaml.parse(fs.readFileSync(path.join(process.cwd(), './default/config.yaml'), 'utf8'));
|
const defaultConfig = yaml.parse(fs.readFileSync(path.join(process.cwd(), './default/config.yaml'), 'utf8'));
|
||||||
let config = yaml.parse(fs.readFileSync(path.join(process.cwd(), './config.yaml'), 'utf8'));
|
let config = yaml.parse(fs.readFileSync(path.join(process.cwd(), './config.yaml'), 'utf8'));
|
||||||
|
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
const yaml = require('yaml');
|
import * as fs from 'node:fs';
|
||||||
const fs = require('fs');
|
import yaml from 'yaml';
|
||||||
const storage = require('node-persist');
|
import storage from 'node-persist';
|
||||||
const users = require('./src/users');
|
import * as users from './src/users';
|
||||||
|
|
||||||
const userAccount = process.argv[2];
|
const userAccount = process.argv[2];
|
||||||
const userPassword = process.argv[3];
|
const userPassword = process.argv[3];
|
||||||
|
286
server.js
286
server.js
@ -1,32 +1,33 @@
|
|||||||
#!/usr/bin/env node
|
#!/usr/bin/env node
|
||||||
|
|
||||||
// native node modules
|
// native node modules
|
||||||
const fs = require('fs');
|
import * as fs from 'node:fs';
|
||||||
const http = require('http');
|
import * as http from 'node:http';
|
||||||
const https = require('https');
|
import * as https from 'node:https';
|
||||||
const path = require('path');
|
import * as path from 'node:path';
|
||||||
const util = require('util');
|
import * as util from 'node:util';
|
||||||
|
import * as net from 'node:net';
|
||||||
|
import * as dns from 'node:dns';
|
||||||
|
|
||||||
// cli/fs related library imports
|
// cli/fs related library imports
|
||||||
const open = require('open');
|
import * as open from 'open';
|
||||||
const yargs = require('yargs/yargs');
|
import yargs from 'yargs/yargs';
|
||||||
const { hideBin } = require('yargs/helpers');
|
import { hideBin } from 'yargs/helpers';
|
||||||
|
|
||||||
// express/server related library imports
|
// express/server related library imports
|
||||||
const cors = require('cors');
|
import cors from 'cors';
|
||||||
const doubleCsrf = require('csrf-csrf').doubleCsrf;
|
import { doubleCsrf } from 'csrf-csrf';
|
||||||
const express = require('express');
|
import express from 'express';
|
||||||
const compression = require('compression');
|
import compression from 'compression';
|
||||||
const cookieParser = require('cookie-parser');
|
import cookieParser from 'cookie-parser';
|
||||||
const cookieSession = require('cookie-session');
|
import cookieSession from 'cookie-session';
|
||||||
const multer = require('multer');
|
import multer from 'multer';
|
||||||
const responseTime = require('response-time');
|
import responseTime from 'response-time';
|
||||||
const helmet = require('helmet').default;
|
import helmet from 'helmet';
|
||||||
|
import bodyParser from 'body-parser';
|
||||||
|
|
||||||
// net related library imports
|
// net related library imports
|
||||||
const net = require('net');
|
import fetch from 'node-fetch';
|
||||||
const dns = require('dns');
|
|
||||||
const fetch = require('node-fetch').default;
|
|
||||||
|
|
||||||
// Unrestrict console logs display limit
|
// Unrestrict console logs display limit
|
||||||
util.inspect.defaultOptions.maxArrayLength = null;
|
util.inspect.defaultOptions.maxArrayLength = null;
|
||||||
@ -34,20 +35,67 @@ util.inspect.defaultOptions.maxStringLength = null;
|
|||||||
util.inspect.defaultOptions.depth = 4;
|
util.inspect.defaultOptions.depth = 4;
|
||||||
|
|
||||||
// local library imports
|
// local library imports
|
||||||
const userModule = require('./src/users');
|
import * as loader from './src/plugin-loader.js';
|
||||||
const basicAuthMiddleware = require('./src/middleware/basicAuth');
|
import * as userModule from './src/users.js';
|
||||||
const whitelistMiddleware = require('./src/middleware/whitelist');
|
import basicAuthMiddleware from './src/middleware/basicAuth.js';
|
||||||
const initRequestProxy = require('./src/request-proxy');
|
import whitelistMiddleware from './src/middleware/whitelist.js';
|
||||||
const contentManager = require('./src/endpoints/content-manager');
|
import multerMonkeyPatch from './src/middleware/multerMonkeyPatch.js';
|
||||||
const {
|
import initRequestProxy from './src/request-proxy.js';
|
||||||
|
import * as contentManager from './src/endpoints/content-manager.js';
|
||||||
|
import {
|
||||||
getVersion,
|
getVersion,
|
||||||
getConfigValue,
|
getConfigValue,
|
||||||
color,
|
color,
|
||||||
forwardFetchResponse,
|
forwardFetchResponse,
|
||||||
removeColorFormatting,
|
removeColorFormatting,
|
||||||
getSeparator,
|
getSeparator,
|
||||||
} = require('./src/util');
|
} from './src/util.js';
|
||||||
const { ensureThumbnailCache } = require('./src/endpoints/thumbnails');
|
import { UPLOADS_DIRECTORY } from './src/constants.js';
|
||||||
|
import { ensureThumbnailCache } from './src/endpoints/thumbnails.js';
|
||||||
|
|
||||||
|
// Routers
|
||||||
|
import { router as usersPublicRouter } from './src/endpoints/users-public.js';
|
||||||
|
import { router as usersPrivateRouter } from './src/endpoints/users-private.js';
|
||||||
|
import { router as usersAdminRouter } from './src/endpoints/users-admin.js';
|
||||||
|
import { router as movingUIRouter } from './src/endpoints/moving-ui.js';
|
||||||
|
import { router as imagesRouter } from './src/endpoints/images.js';
|
||||||
|
import { router as quickRepliesRouter } from './src/endpoints/quick-replies.js';
|
||||||
|
import { router as avatarsRouter } from './src/endpoints/avatars.js';
|
||||||
|
import { router as themesRouter } from './src/endpoints/themes.js';
|
||||||
|
import { router as openAiRouter } from './src/endpoints/openai.js';
|
||||||
|
import { router as googleRouter } from './src/endpoints/google.js';
|
||||||
|
import { router as anthropicRouter } from './src/endpoints/anthropic.js';
|
||||||
|
import { router as tokenizersRouter } from './src/endpoints/tokenizers.js';
|
||||||
|
import { router as presetsRouter } from './src/endpoints/presets.js';
|
||||||
|
import { router as secretsRouter } from './src/endpoints/secrets.js';
|
||||||
|
import { router as thumbnailRouter } from './src/endpoints/thumbnails.js';
|
||||||
|
import { router as novelAiRouter } from './src/endpoints/novelai.js';
|
||||||
|
import { router as extensionsRouter } from './src/endpoints/extensions.js';
|
||||||
|
import { router as assetsRouter } from './src/endpoints/assets.js';
|
||||||
|
import { router as filesRouter } from './src/endpoints/files.js';
|
||||||
|
import { router as charactersRouter } from './src/endpoints/characters.js';
|
||||||
|
import { router as chatsRouter } from './src/endpoints/chats.js';
|
||||||
|
import { router as groupsRouter } from './src/endpoints/groups.js';
|
||||||
|
import { router as worldInfoRouter } from './src/endpoints/worldinfo.js';
|
||||||
|
import { router as statsRouter, init as statsInit, onExit as statsOnExit } from './src/endpoints/stats.js';
|
||||||
|
import { router as backgroundsRouter } from './src/endpoints/backgrounds.js';
|
||||||
|
import { router as spritesRouter } from './src/endpoints/sprites.js';
|
||||||
|
import { router as contentManagerRouter } from './src/endpoints/content-manager.js';
|
||||||
|
import { router as settingsRouter, init as settingsInit } from './src/endpoints/settings.js';
|
||||||
|
import { router as stableDiffusionRouter } from './src/endpoints/stable-diffusion.js';
|
||||||
|
import { router as hordeRouter } from './src/endpoints/horde.js';
|
||||||
|
import { router as vectorsRouter } from './src/endpoints/vectors.js';
|
||||||
|
import { router as translateRouter } from './src/endpoints/translate.js';
|
||||||
|
import { router as classifyRouter } from './src/endpoints/classify.js';
|
||||||
|
import { router as captionRouter } from './src/endpoints/caption.js';
|
||||||
|
import { router as searchRouter } from './src/endpoints/search.js';
|
||||||
|
import { router as openRouterRouter } from './src/endpoints/openrouter.js';
|
||||||
|
import { router as chatCompletionsRouter } from './src/endpoints/backends/chat-completions.js';
|
||||||
|
import { router as koboldRouter } from './src/endpoints/backends/kobold.js';
|
||||||
|
import { router as textCompletionsRouter } from './src/endpoints/backends/text-completions.js';
|
||||||
|
import { router as scaleAltRouter } from './src/endpoints/backends/scale-alt.js';
|
||||||
|
import { router as speechRouter } from './src/endpoints/speech.js';
|
||||||
|
import { router as azureRouter } from './src/endpoints/azure.js';
|
||||||
|
|
||||||
// Work around a node v20.0.0, v20.1.0, and v20.2.0 bug. The issue was fixed in v20.3.0.
|
// Work around a node v20.0.0, v20.1.0, and v20.2.0 bug. The issue was fixed in v20.3.0.
|
||||||
// https://github.com/nodejs/node/issues/47822#issuecomment-1564708870
|
// https://github.com/nodejs/node/issues/47822#issuecomment-1564708870
|
||||||
@ -166,8 +214,8 @@ const cliArguments = yargs(hideBin(process.argv))
|
|||||||
}).parseSync();
|
}).parseSync();
|
||||||
|
|
||||||
// change all relative paths
|
// change all relative paths
|
||||||
console.log(`Node version: ${process.version}. Running in ${process.env.NODE_ENV} environment.`);
|
const serverDirectory = import.meta.dirname;
|
||||||
const serverDirectory = __dirname;
|
console.log(`Node version: ${process.version}. Running in ${process.env.NODE_ENV} environment. Server directory: ${serverDirectory}`);
|
||||||
process.chdir(serverDirectory);
|
process.chdir(serverDirectory);
|
||||||
|
|
||||||
const app = express();
|
const app = express();
|
||||||
@ -188,7 +236,7 @@ const basicAuthMode = cliArguments.basicAuthMode ?? getConfigValue('basicAuthMod
|
|||||||
const perUserBasicAuth = getConfigValue('perUserBasicAuth', DEFAULT_PER_USER_BASIC_AUTH);
|
const perUserBasicAuth = getConfigValue('perUserBasicAuth', DEFAULT_PER_USER_BASIC_AUTH);
|
||||||
const enableAccounts = getConfigValue('enableUserAccounts', DEFAULT_ACCOUNTS);
|
const enableAccounts = getConfigValue('enableUserAccounts', DEFAULT_ACCOUNTS);
|
||||||
|
|
||||||
const uploadsPath = path.join(dataRoot, require('./src/constants').UPLOADS_DIRECTORY);
|
const uploadsPath = path.join(dataRoot, UPLOADS_DIRECTORY);
|
||||||
|
|
||||||
const enableIPv6 = cliArguments.enableIPv6 ?? getConfigValue('protocol.ipv6', DEFAULT_ENABLE_IPV6);
|
const enableIPv6 = cliArguments.enableIPv6 ?? getConfigValue('protocol.ipv6', DEFAULT_ENABLE_IPV6);
|
||||||
const enableIPv4 = cliArguments.enableIPv4 ?? getConfigValue('protocol.ipv4', DEFAULT_ENABLE_IPV4);
|
const enableIPv4 = cliArguments.enableIPv4 ?? getConfigValue('protocol.ipv4', DEFAULT_ENABLE_IPV4);
|
||||||
@ -232,7 +280,6 @@ if (listen && basicAuthMode) app.use(basicAuthMiddleware);
|
|||||||
app.use(whitelistMiddleware(enableWhitelist, listen));
|
app.use(whitelistMiddleware(enableWhitelist, listen));
|
||||||
|
|
||||||
if (enableCorsProxy) {
|
if (enableCorsProxy) {
|
||||||
const bodyParser = require('body-parser');
|
|
||||||
app.use(bodyParser.json({
|
app.use(bodyParser.json({
|
||||||
limit: '200mb',
|
limit: '200mb',
|
||||||
}));
|
}));
|
||||||
@ -379,7 +426,7 @@ app.get('/login', async (request, response) => {
|
|||||||
app.use(express.static(process.cwd() + '/public', {}));
|
app.use(express.static(process.cwd() + '/public', {}));
|
||||||
|
|
||||||
// Public API
|
// Public API
|
||||||
app.use('/api/users', require('./src/endpoints/users-public').router);
|
app.use('/api/users', usersPublicRouter);
|
||||||
|
|
||||||
// Everything below this line requires authentication
|
// Everything below this line requires authentication
|
||||||
app.use(userModule.requireLoginMiddleware);
|
app.use(userModule.requireLoginMiddleware);
|
||||||
@ -387,14 +434,15 @@ app.get('/api/ping', (_, response) => response.sendStatus(204));
|
|||||||
|
|
||||||
// File uploads
|
// File uploads
|
||||||
app.use(multer({ dest: uploadsPath, limits: { fieldSize: 10 * 1024 * 1024 } }).single('avatar'));
|
app.use(multer({ dest: uploadsPath, limits: { fieldSize: 10 * 1024 * 1024 } }).single('avatar'));
|
||||||
app.use(require('./src/middleware/multerMonkeyPatch'));
|
app.use(multerMonkeyPatch);
|
||||||
|
|
||||||
// User data mount
|
// User data mount
|
||||||
app.use('/', userModule.router);
|
app.use('/', userModule.router);
|
||||||
// Private endpoints
|
// Private endpoints
|
||||||
app.use('/api/users', require('./src/endpoints/users-private').router);
|
|
||||||
|
app.use('/api/users', usersPrivateRouter);
|
||||||
// Admin endpoints
|
// Admin endpoints
|
||||||
app.use('/api/users', require('./src/endpoints/users-admin').router);
|
app.use('/api/users', usersAdminRouter);
|
||||||
|
|
||||||
app.get('/version', async function (_, response) {
|
app.get('/version', async function (_, response) {
|
||||||
const data = await getVersion();
|
const data = await getVersion();
|
||||||
@ -509,126 +557,45 @@ redirect('/api/serpapi/search', '/api/search/serpapi');
|
|||||||
redirect('/api/serpapi/visit', '/api/search/visit');
|
redirect('/api/serpapi/visit', '/api/search/visit');
|
||||||
redirect('/api/serpapi/transcript', '/api/search/transcript');
|
redirect('/api/serpapi/transcript', '/api/search/transcript');
|
||||||
|
|
||||||
// Moving UI
|
app.use('/api/moving-ui', movingUIRouter);
|
||||||
app.use('/api/moving-ui', require('./src/endpoints/moving-ui').router);
|
app.use('/api/images', imagesRouter);
|
||||||
|
app.use('/api/quick-replies', quickRepliesRouter);
|
||||||
// Image management
|
app.use('/api/avatars', avatarsRouter);
|
||||||
app.use('/api/images', require('./src/endpoints/images').router);
|
app.use('/api/themes', themesRouter);
|
||||||
|
app.use('/api/openai', openAiRouter);
|
||||||
// Quick reply management
|
app.use('/api/google', googleRouter);
|
||||||
app.use('/api/quick-replies', require('./src/endpoints/quick-replies').router);
|
app.use('/api/anthropic', anthropicRouter);
|
||||||
|
app.use('/api/tokenizers', tokenizersRouter);
|
||||||
// Avatar management
|
app.use('/api/presets', presetsRouter);
|
||||||
app.use('/api/avatars', require('./src/endpoints/avatars').router);
|
app.use('/api/secrets', secretsRouter);
|
||||||
|
app.use('/thumbnail', thumbnailRouter);
|
||||||
// Theme management
|
app.use('/api/novelai', novelAiRouter);
|
||||||
app.use('/api/themes', require('./src/endpoints/themes').router);
|
app.use('/api/extensions', extensionsRouter);
|
||||||
|
app.use('/api/assets', assetsRouter);
|
||||||
// OpenAI API
|
app.use('/api/files', filesRouter);
|
||||||
app.use('/api/openai', require('./src/endpoints/openai').router);
|
app.use('/api/characters', charactersRouter);
|
||||||
|
app.use('/api/chats', chatsRouter);
|
||||||
//Google API
|
app.use('/api/groups', groupsRouter);
|
||||||
app.use('/api/google', require('./src/endpoints/google').router);
|
app.use('/api/worldinfo', worldInfoRouter);
|
||||||
|
app.use('/api/stats', statsRouter);
|
||||||
//Anthropic API
|
app.use('/api/backgrounds', backgroundsRouter);
|
||||||
app.use('/api/anthropic', require('./src/endpoints/anthropic').router);
|
app.use('/api/sprites', spritesRouter);
|
||||||
|
app.use('/api/content', contentManagerRouter);
|
||||||
// Tokenizers
|
app.use('/api/settings', settingsRouter);
|
||||||
app.use('/api/tokenizers', require('./src/endpoints/tokenizers').router);
|
app.use('/api/sd', stableDiffusionRouter);
|
||||||
|
app.use('/api/horde', hordeRouter);
|
||||||
// Preset management
|
app.use('/api/vector', vectorsRouter);
|
||||||
app.use('/api/presets', require('./src/endpoints/presets').router);
|
app.use('/api/translate', translateRouter);
|
||||||
|
app.use('/api/extra/classify', classifyRouter);
|
||||||
// Secrets managemenet
|
app.use('/api/extra/caption', captionRouter);
|
||||||
app.use('/api/secrets', require('./src/endpoints/secrets').router);
|
app.use('/api/search', searchRouter);
|
||||||
|
app.use('/api/backends/text-completions', textCompletionsRouter);
|
||||||
// Thumbnail generation. These URLs are saved in chat, so this route cannot be renamed!
|
app.use('/api/openrouter', openRouterRouter);
|
||||||
app.use('/thumbnail', require('./src/endpoints/thumbnails').router);
|
app.use('/api/backends/kobold', koboldRouter);
|
||||||
|
app.use('/api/backends/chat-completions', chatCompletionsRouter);
|
||||||
// NovelAI generation
|
app.use('/api/backends/scale-alt', scaleAltRouter);
|
||||||
app.use('/api/novelai', require('./src/endpoints/novelai').router);
|
app.use('/api/speech', speechRouter);
|
||||||
|
app.use('/api/azure', azureRouter);
|
||||||
// Third-party extensions
|
|
||||||
app.use('/api/extensions', require('./src/endpoints/extensions').router);
|
|
||||||
|
|
||||||
// Asset management
|
|
||||||
app.use('/api/assets', require('./src/endpoints/assets').router);
|
|
||||||
|
|
||||||
// File management
|
|
||||||
app.use('/api/files', require('./src/endpoints/files').router);
|
|
||||||
|
|
||||||
// Character management
|
|
||||||
app.use('/api/characters', require('./src/endpoints/characters').router);
|
|
||||||
|
|
||||||
// Chat management
|
|
||||||
app.use('/api/chats', require('./src/endpoints/chats').router);
|
|
||||||
|
|
||||||
// Group management
|
|
||||||
app.use('/api/groups', require('./src/endpoints/groups').router);
|
|
||||||
|
|
||||||
// World info management
|
|
||||||
app.use('/api/worldinfo', require('./src/endpoints/worldinfo').router);
|
|
||||||
|
|
||||||
// Stats calculation
|
|
||||||
const statsEndpoint = require('./src/endpoints/stats');
|
|
||||||
app.use('/api/stats', statsEndpoint.router);
|
|
||||||
|
|
||||||
// Background management
|
|
||||||
app.use('/api/backgrounds', require('./src/endpoints/backgrounds').router);
|
|
||||||
|
|
||||||
// Character sprite management
|
|
||||||
app.use('/api/sprites', require('./src/endpoints/sprites').router);
|
|
||||||
|
|
||||||
// Custom content management
|
|
||||||
app.use('/api/content', require('./src/endpoints/content-manager').router);
|
|
||||||
|
|
||||||
// Settings load/store
|
|
||||||
const settingsEndpoint = require('./src/endpoints/settings');
|
|
||||||
app.use('/api/settings', settingsEndpoint.router);
|
|
||||||
|
|
||||||
// Stable Diffusion generation
|
|
||||||
app.use('/api/sd', require('./src/endpoints/stable-diffusion').router);
|
|
||||||
|
|
||||||
// LLM and SD Horde generation
|
|
||||||
app.use('/api/horde', require('./src/endpoints/horde').router);
|
|
||||||
|
|
||||||
// Vector storage DB
|
|
||||||
app.use('/api/vector', require('./src/endpoints/vectors').router);
|
|
||||||
|
|
||||||
// Chat translation
|
|
||||||
app.use('/api/translate', require('./src/endpoints/translate').router);
|
|
||||||
|
|
||||||
// Emotion classification
|
|
||||||
app.use('/api/extra/classify', require('./src/endpoints/classify').router);
|
|
||||||
|
|
||||||
// Image captioning
|
|
||||||
app.use('/api/extra/caption', require('./src/endpoints/caption').router);
|
|
||||||
|
|
||||||
// Web search and scraping
|
|
||||||
app.use('/api/search', require('./src/endpoints/search').router);
|
|
||||||
|
|
||||||
// The different text generation APIs
|
|
||||||
|
|
||||||
// Ooba/OpenAI text completions
|
|
||||||
app.use('/api/backends/text-completions', require('./src/endpoints/backends/text-completions').router);
|
|
||||||
|
|
||||||
// OpenRouter
|
|
||||||
app.use('/api/openrouter', require('./src/endpoints/openrouter').router);
|
|
||||||
|
|
||||||
// KoboldAI
|
|
||||||
app.use('/api/backends/kobold', require('./src/endpoints/backends/kobold').router);
|
|
||||||
|
|
||||||
// OpenAI chat completions
|
|
||||||
app.use('/api/backends/chat-completions', require('./src/endpoints/backends/chat-completions').router);
|
|
||||||
|
|
||||||
// Scale (alt method)
|
|
||||||
app.use('/api/backends/scale-alt', require('./src/endpoints/backends/scale-alt').router);
|
|
||||||
|
|
||||||
// Speech (text-to-speech and speech-to-text)
|
|
||||||
app.use('/api/speech', require('./src/endpoints/speech').router);
|
|
||||||
|
|
||||||
// Azure TTS
|
|
||||||
app.use('/api/azure', require('./src/endpoints/azure').router);
|
|
||||||
|
|
||||||
const tavernUrlV6 = new URL(
|
const tavernUrlV6 = new URL(
|
||||||
(cliArguments.ssl ? 'https://' : 'http://') +
|
(cliArguments.ssl ? 'https://' : 'http://') +
|
||||||
@ -663,8 +630,8 @@ const preSetupTasks = async function () {
|
|||||||
await ensureThumbnailCache();
|
await ensureThumbnailCache();
|
||||||
cleanUploads();
|
cleanUploads();
|
||||||
|
|
||||||
await settingsEndpoint.init();
|
await settingsInit();
|
||||||
await statsEndpoint.init();
|
await statsInit();
|
||||||
|
|
||||||
const cleanupPlugins = await loadPlugins();
|
const cleanupPlugins = await loadPlugins();
|
||||||
const consoleTitle = process.title;
|
const consoleTitle = process.title;
|
||||||
@ -673,7 +640,7 @@ const preSetupTasks = async function () {
|
|||||||
const exitProcess = async () => {
|
const exitProcess = async () => {
|
||||||
if (isExiting) return;
|
if (isExiting) return;
|
||||||
isExiting = true;
|
isExiting = true;
|
||||||
statsEndpoint.onExit();
|
await statsOnExit();
|
||||||
if (typeof cleanupPlugins === 'function') {
|
if (typeof cleanupPlugins === 'function') {
|
||||||
await cleanupPlugins();
|
await cleanupPlugins();
|
||||||
}
|
}
|
||||||
@ -776,7 +743,6 @@ const postSetupTasks = async function (v6Failed, v4Failed) {
|
|||||||
async function loadPlugins() {
|
async function loadPlugins() {
|
||||||
try {
|
try {
|
||||||
const pluginDirectory = path.join(serverDirectory, 'plugins');
|
const pluginDirectory = path.join(serverDirectory, 'plugins');
|
||||||
const loader = require('./src/plugin-loader');
|
|
||||||
const cleanupPlugins = await loader.loadPlugins(app, pluginDirectory);
|
const cleanupPlugins = await loader.loadPlugins(app, pluginDirectory);
|
||||||
return cleanupPlugins;
|
return cleanupPlugins;
|
||||||
} catch {
|
} catch {
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
const { TEXTGEN_TYPES, OPENROUTER_HEADERS, FEATHERLESS_HEADERS } = require('./constants');
|
import { TEXTGEN_TYPES, OPENROUTER_HEADERS, FEATHERLESS_HEADERS } from './constants.js';
|
||||||
const { SECRET_KEYS, readSecret } = require('./endpoints/secrets');
|
import { SECRET_KEYS, readSecret } from './endpoints/secrets.js';
|
||||||
const { getConfigValue } = require('./util');
|
import { getConfigValue } from './util.js';
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Gets the headers for the Mancer API.
|
* Gets the headers for the Mancer API.
|
||||||
@ -172,7 +172,7 @@ function getHuggingFaceHeaders(directories) {
|
|||||||
}) : {};
|
}) : {};
|
||||||
}
|
}
|
||||||
|
|
||||||
function getOverrideHeaders(urlHost) {
|
export function getOverrideHeaders(urlHost) {
|
||||||
const requestOverrides = getConfigValue('requestOverrides', []);
|
const requestOverrides = getConfigValue('requestOverrides', []);
|
||||||
const overrideHeaders = requestOverrides?.find((e) => e.hosts?.includes(urlHost))?.headers;
|
const overrideHeaders = requestOverrides?.find((e) => e.hosts?.includes(urlHost))?.headers;
|
||||||
if (overrideHeaders && urlHost) {
|
if (overrideHeaders && urlHost) {
|
||||||
@ -188,7 +188,7 @@ function getOverrideHeaders(urlHost) {
|
|||||||
* @param {object} args New request arguments
|
* @param {object} args New request arguments
|
||||||
* @param {string|null} server API server for new request
|
* @param {string|null} server API server for new request
|
||||||
*/
|
*/
|
||||||
function setAdditionalHeaders(request, args, server) {
|
export function setAdditionalHeaders(request, args, server) {
|
||||||
setAdditionalHeadersByType(args.headers, request.body.api_type, server, request.user.directories);
|
setAdditionalHeadersByType(args.headers, request.body.api_type, server, request.user.directories);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -199,7 +199,7 @@ function setAdditionalHeaders(request, args, server) {
|
|||||||
* @param {string|null} server API server for new request
|
* @param {string|null} server API server for new request
|
||||||
* @param {import('./users').UserDirectoryList} directories User directories
|
* @param {import('./users').UserDirectoryList} directories User directories
|
||||||
*/
|
*/
|
||||||
function setAdditionalHeadersByType(requestHeaders, type, server, directories) {
|
export function setAdditionalHeadersByType(requestHeaders, type, server, directories) {
|
||||||
const headerGetters = {
|
const headerGetters = {
|
||||||
[TEXTGEN_TYPES.MANCER]: getMancerHeaders,
|
[TEXTGEN_TYPES.MANCER]: getMancerHeaders,
|
||||||
[TEXTGEN_TYPES.VLLM]: getVllmHeaders,
|
[TEXTGEN_TYPES.VLLM]: getVllmHeaders,
|
||||||
@ -234,9 +234,3 @@ function setAdditionalHeadersByType(requestHeaders, type, server, directories) {
|
|||||||
|
|
||||||
Object.assign(requestHeaders, headers);
|
Object.assign(requestHeaders, headers);
|
||||||
}
|
}
|
||||||
|
|
||||||
module.exports = {
|
|
||||||
getOverrideHeaders,
|
|
||||||
setAdditionalHeaders,
|
|
||||||
setAdditionalHeadersByType,
|
|
||||||
};
|
|
||||||
|
@ -1,8 +1,8 @@
|
|||||||
const fs = require('fs');
|
import fs from 'node:fs';
|
||||||
|
|
||||||
const encode = require('png-chunks-encode');
|
import encode from 'png-chunks-encode';
|
||||||
const extract = require('png-chunks-extract');
|
import extract from 'png-chunks-extract';
|
||||||
const PNGtext = require('png-chunk-text');
|
import PNGtext from 'png-chunk-text';
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Writes Character metadata to a PNG image buffer.
|
* Writes Character metadata to a PNG image buffer.
|
||||||
@ -11,7 +11,7 @@ const PNGtext = require('png-chunk-text');
|
|||||||
* @param {string} data Character data to write
|
* @param {string} data Character data to write
|
||||||
* @returns {Buffer} PNG image buffer with metadata
|
* @returns {Buffer} PNG image buffer with metadata
|
||||||
*/
|
*/
|
||||||
const write = (image, data) => {
|
export const write = (image, data) => {
|
||||||
const chunks = extract(image);
|
const chunks = extract(image);
|
||||||
const tEXtChunks = chunks.filter(chunk => chunk.name === 'tEXt');
|
const tEXtChunks = chunks.filter(chunk => chunk.name === 'tEXt');
|
||||||
|
|
||||||
@ -36,7 +36,9 @@ const write = (image, data) => {
|
|||||||
|
|
||||||
const base64EncodedData = Buffer.from(JSON.stringify(v3Data), 'utf8').toString('base64');
|
const base64EncodedData = Buffer.from(JSON.stringify(v3Data), 'utf8').toString('base64');
|
||||||
chunks.splice(-1, 0, PNGtext.encode('ccv3', base64EncodedData));
|
chunks.splice(-1, 0, PNGtext.encode('ccv3', base64EncodedData));
|
||||||
} catch (error) { }
|
} catch (error) {
|
||||||
|
// Ignore errors when adding v3 chunk
|
||||||
|
}
|
||||||
|
|
||||||
const newBuffer = Buffer.from(encode(chunks));
|
const newBuffer = Buffer.from(encode(chunks));
|
||||||
return newBuffer;
|
return newBuffer;
|
||||||
@ -48,7 +50,7 @@ const write = (image, data) => {
|
|||||||
* @param {Buffer} image PNG image buffer
|
* @param {Buffer} image PNG image buffer
|
||||||
* @returns {string} Character data
|
* @returns {string} Character data
|
||||||
*/
|
*/
|
||||||
const read = (image) => {
|
export const read = (image) => {
|
||||||
const chunks = extract(image);
|
const chunks = extract(image);
|
||||||
|
|
||||||
const textChunks = chunks.filter((chunk) => chunk.name === 'tEXt').map((chunk) => PNGtext.decode(chunk.data));
|
const textChunks = chunks.filter((chunk) => chunk.name === 'tEXt').map((chunk) => PNGtext.decode(chunk.data));
|
||||||
@ -80,7 +82,7 @@ const read = (image) => {
|
|||||||
* @param {string} format File format
|
* @param {string} format File format
|
||||||
* @returns {string} Character data
|
* @returns {string} Character data
|
||||||
*/
|
*/
|
||||||
const parse = (cardUrl, format) => {
|
export const parse = (cardUrl, format) => {
|
||||||
let fileFormat = format === undefined ? 'png' : format;
|
let fileFormat = format === undefined ? 'png' : format;
|
||||||
|
|
||||||
switch (fileFormat) {
|
switch (fileFormat) {
|
||||||
@ -93,8 +95,3 @@ const parse = (cardUrl, format) => {
|
|||||||
throw new Error('Unsupported format');
|
throw new Error('Unsupported format');
|
||||||
};
|
};
|
||||||
|
|
||||||
module.exports = {
|
|
||||||
parse,
|
|
||||||
write,
|
|
||||||
read,
|
|
||||||
};
|
|
||||||
|
@ -1,18 +1,18 @@
|
|||||||
const PUBLIC_DIRECTORIES = {
|
export const PUBLIC_DIRECTORIES = {
|
||||||
images: 'public/img/',
|
images: 'public/img/',
|
||||||
backups: 'backups/',
|
backups: 'backups/',
|
||||||
sounds: 'public/sounds',
|
sounds: 'public/sounds',
|
||||||
extensions: 'public/scripts/extensions',
|
extensions: 'public/scripts/extensions',
|
||||||
};
|
};
|
||||||
|
|
||||||
const SETTINGS_FILE = 'settings.json';
|
export const SETTINGS_FILE = 'settings.json';
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @type {import('./users').UserDirectoryList}
|
* @type {import('./users').UserDirectoryList}
|
||||||
* @readonly
|
* @readonly
|
||||||
* @enum {string}
|
* @enum {string}
|
||||||
*/
|
*/
|
||||||
const USER_DIRECTORY_TEMPLATE = Object.freeze({
|
export const USER_DIRECTORY_TEMPLATE = Object.freeze({
|
||||||
root: '',
|
root: '',
|
||||||
thumbnails: 'thumbnails',
|
thumbnails: 'thumbnails',
|
||||||
thumbnailsBg: 'thumbnails/bg',
|
thumbnailsBg: 'thumbnails/bg',
|
||||||
@ -48,7 +48,7 @@ const USER_DIRECTORY_TEMPLATE = Object.freeze({
|
|||||||
* @type {import('./users').User}
|
* @type {import('./users').User}
|
||||||
* @readonly
|
* @readonly
|
||||||
*/
|
*/
|
||||||
const DEFAULT_USER = Object.freeze({
|
export const DEFAULT_USER = Object.freeze({
|
||||||
handle: 'default-user',
|
handle: 'default-user',
|
||||||
name: 'User',
|
name: 'User',
|
||||||
created: Date.now(),
|
created: Date.now(),
|
||||||
@ -58,7 +58,7 @@ const DEFAULT_USER = Object.freeze({
|
|||||||
salt: '',
|
salt: '',
|
||||||
});
|
});
|
||||||
|
|
||||||
const UNSAFE_EXTENSIONS = [
|
export const UNSAFE_EXTENSIONS = [
|
||||||
'.php',
|
'.php',
|
||||||
'.exe',
|
'.exe',
|
||||||
'.com',
|
'.com',
|
||||||
@ -135,7 +135,7 @@ const UNSAFE_EXTENSIONS = [
|
|||||||
'.ws',
|
'.ws',
|
||||||
];
|
];
|
||||||
|
|
||||||
const GEMINI_SAFETY = [
|
export const GEMINI_SAFETY = [
|
||||||
{
|
{
|
||||||
category: 'HARM_CATEGORY_HARASSMENT',
|
category: 'HARM_CATEGORY_HARASSMENT',
|
||||||
threshold: 'BLOCK_NONE',
|
threshold: 'BLOCK_NONE',
|
||||||
@ -158,7 +158,7 @@ const GEMINI_SAFETY = [
|
|||||||
},
|
},
|
||||||
];
|
];
|
||||||
|
|
||||||
const BISON_SAFETY = [
|
export const BISON_SAFETY = [
|
||||||
{
|
{
|
||||||
category: 'HARM_CATEGORY_DEROGATORY',
|
category: 'HARM_CATEGORY_DEROGATORY',
|
||||||
threshold: 'BLOCK_NONE',
|
threshold: 'BLOCK_NONE',
|
||||||
@ -185,7 +185,7 @@ const BISON_SAFETY = [
|
|||||||
},
|
},
|
||||||
];
|
];
|
||||||
|
|
||||||
const CHAT_COMPLETION_SOURCES = {
|
export const CHAT_COMPLETION_SOURCES = {
|
||||||
OPENAI: 'openai',
|
OPENAI: 'openai',
|
||||||
WINDOWAI: 'windowai',
|
WINDOWAI: 'windowai',
|
||||||
CLAUDE: 'claude',
|
CLAUDE: 'claude',
|
||||||
@ -205,10 +205,10 @@ const CHAT_COMPLETION_SOURCES = {
|
|||||||
/**
|
/**
|
||||||
* Path to multer file uploads under the data root.
|
* Path to multer file uploads under the data root.
|
||||||
*/
|
*/
|
||||||
const UPLOADS_DIRECTORY = '_uploads';
|
export const UPLOADS_DIRECTORY = '_uploads';
|
||||||
|
|
||||||
// TODO: this is copied from the client code; there should be a way to de-duplicate it eventually
|
// TODO: this is copied from the client code; there should be a way to de-duplicate it eventually
|
||||||
const TEXTGEN_TYPES = {
|
export const TEXTGEN_TYPES = {
|
||||||
OOBA: 'ooba',
|
OOBA: 'ooba',
|
||||||
MANCER: 'mancer',
|
MANCER: 'mancer',
|
||||||
VLLM: 'vllm',
|
VLLM: 'vllm',
|
||||||
@ -225,7 +225,7 @@ const TEXTGEN_TYPES = {
|
|||||||
HUGGINGFACE: 'huggingface',
|
HUGGINGFACE: 'huggingface',
|
||||||
};
|
};
|
||||||
|
|
||||||
const INFERMATICAI_KEYS = [
|
export const INFERMATICAI_KEYS = [
|
||||||
'model',
|
'model',
|
||||||
'prompt',
|
'prompt',
|
||||||
'max_tokens',
|
'max_tokens',
|
||||||
@ -248,7 +248,7 @@ const INFERMATICAI_KEYS = [
|
|||||||
'logprobs',
|
'logprobs',
|
||||||
];
|
];
|
||||||
|
|
||||||
const FEATHERLESS_KEYS = [
|
export const FEATHERLESS_KEYS = [
|
||||||
'model',
|
'model',
|
||||||
'prompt',
|
'prompt',
|
||||||
'best_of',
|
'best_of',
|
||||||
@ -290,9 +290,8 @@ const FEATHERLESS_KEYS = [
|
|||||||
'guided_whitespace_pattern',
|
'guided_whitespace_pattern',
|
||||||
];
|
];
|
||||||
|
|
||||||
|
|
||||||
// https://dreamgen.com/docs/api#openai-text
|
// https://dreamgen.com/docs/api#openai-text
|
||||||
const DREAMGEN_KEYS = [
|
export const DREAMGEN_KEYS = [
|
||||||
'model',
|
'model',
|
||||||
'prompt',
|
'prompt',
|
||||||
'max_tokens',
|
'max_tokens',
|
||||||
@ -309,7 +308,7 @@ const DREAMGEN_KEYS = [
|
|||||||
];
|
];
|
||||||
|
|
||||||
// https://docs.together.ai/reference/completions
|
// https://docs.together.ai/reference/completions
|
||||||
const TOGETHERAI_KEYS = [
|
export const TOGETHERAI_KEYS = [
|
||||||
'model',
|
'model',
|
||||||
'prompt',
|
'prompt',
|
||||||
'max_tokens',
|
'max_tokens',
|
||||||
@ -325,7 +324,7 @@ const TOGETHERAI_KEYS = [
|
|||||||
];
|
];
|
||||||
|
|
||||||
// https://github.com/jmorganca/ollama/blob/main/docs/api.md#request-with-options
|
// https://github.com/jmorganca/ollama/blob/main/docs/api.md#request-with-options
|
||||||
const OLLAMA_KEYS = [
|
export const OLLAMA_KEYS = [
|
||||||
'num_predict',
|
'num_predict',
|
||||||
'num_ctx',
|
'num_ctx',
|
||||||
'stop',
|
'stop',
|
||||||
@ -345,20 +344,20 @@ const OLLAMA_KEYS = [
|
|||||||
'min_p',
|
'min_p',
|
||||||
];
|
];
|
||||||
|
|
||||||
const AVATAR_WIDTH = 512;
|
export const AVATAR_WIDTH = 512;
|
||||||
const AVATAR_HEIGHT = 768;
|
export const AVATAR_HEIGHT = 768;
|
||||||
|
|
||||||
const OPENROUTER_HEADERS = {
|
export const OPENROUTER_HEADERS = {
|
||||||
'HTTP-Referer': 'https://sillytavern.app',
|
'HTTP-Referer': 'https://sillytavern.app',
|
||||||
'X-Title': 'SillyTavern',
|
'X-Title': 'SillyTavern',
|
||||||
};
|
};
|
||||||
|
|
||||||
const FEATHERLESS_HEADERS = {
|
export const FEATHERLESS_HEADERS = {
|
||||||
'HTTP-Referer': 'https://sillytavern.app',
|
'HTTP-Referer': 'https://sillytavern.app',
|
||||||
'X-Title': 'SillyTavern',
|
'X-Title': 'SillyTavern',
|
||||||
};
|
};
|
||||||
|
|
||||||
const OPENROUTER_KEYS = [
|
export const OPENROUTER_KEYS = [
|
||||||
'max_tokens',
|
'max_tokens',
|
||||||
'temperature',
|
'temperature',
|
||||||
'top_k',
|
'top_k',
|
||||||
@ -378,7 +377,7 @@ const OPENROUTER_KEYS = [
|
|||||||
];
|
];
|
||||||
|
|
||||||
// https://github.com/vllm-project/vllm/blob/0f8a91401c89ac0a8018def3756829611b57727f/vllm/entrypoints/openai/protocol.py#L220
|
// https://github.com/vllm-project/vllm/blob/0f8a91401c89ac0a8018def3756829611b57727f/vllm/entrypoints/openai/protocol.py#L220
|
||||||
const VLLM_KEYS = [
|
export const VLLM_KEYS = [
|
||||||
'model',
|
'model',
|
||||||
'prompt',
|
'prompt',
|
||||||
'best_of',
|
'best_of',
|
||||||
@ -419,27 +418,3 @@ const VLLM_KEYS = [
|
|||||||
'guided_decoding_backend',
|
'guided_decoding_backend',
|
||||||
'guided_whitespace_pattern',
|
'guided_whitespace_pattern',
|
||||||
];
|
];
|
||||||
|
|
||||||
module.exports = {
|
|
||||||
DEFAULT_USER,
|
|
||||||
SETTINGS_FILE,
|
|
||||||
PUBLIC_DIRECTORIES,
|
|
||||||
USER_DIRECTORY_TEMPLATE,
|
|
||||||
UNSAFE_EXTENSIONS,
|
|
||||||
UPLOADS_DIRECTORY,
|
|
||||||
GEMINI_SAFETY,
|
|
||||||
BISON_SAFETY,
|
|
||||||
TEXTGEN_TYPES,
|
|
||||||
CHAT_COMPLETION_SOURCES,
|
|
||||||
AVATAR_WIDTH,
|
|
||||||
AVATAR_HEIGHT,
|
|
||||||
TOGETHERAI_KEYS,
|
|
||||||
OLLAMA_KEYS,
|
|
||||||
INFERMATICAI_KEYS,
|
|
||||||
DREAMGEN_KEYS,
|
|
||||||
OPENROUTER_HEADERS,
|
|
||||||
OPENROUTER_KEYS,
|
|
||||||
VLLM_KEYS,
|
|
||||||
FEATHERLESS_KEYS,
|
|
||||||
FEATHERLESS_HEADERS,
|
|
||||||
};
|
|
||||||
|
@ -1,9 +1,10 @@
|
|||||||
const { readSecret, SECRET_KEYS } = require('./secrets');
|
import fetch from 'node-fetch';
|
||||||
const fetch = require('node-fetch').default;
|
import express from 'express';
|
||||||
const express = require('express');
|
|
||||||
const { jsonParser } = require('../express-common');
|
|
||||||
|
|
||||||
const router = express.Router();
|
import { readSecret, SECRET_KEYS } from './secrets.js';
|
||||||
|
import { jsonParser } from '../express-common.js';
|
||||||
|
|
||||||
|
export const router = express.Router();
|
||||||
|
|
||||||
router.post('/caption-image', jsonParser, async (request, response) => {
|
router.post('/caption-image', jsonParser, async (request, response) => {
|
||||||
try {
|
try {
|
||||||
@ -64,5 +65,3 @@ router.post('/caption-image', jsonParser, async (request, response) => {
|
|||||||
response.status(500).send('Internal server error');
|
response.status(500).send('Internal server error');
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
module.exports = { router };
|
|
||||||
|
@ -1,13 +1,15 @@
|
|||||||
const path = require('path');
|
import * as path from 'node:path';
|
||||||
const fs = require('fs');
|
import * as fs from 'node:fs';
|
||||||
const mime = require('mime-types');
|
import { finished } from 'node:stream/promises';
|
||||||
const express = require('express');
|
|
||||||
const sanitize = require('sanitize-filename');
|
import mime from 'mime-types';
|
||||||
const fetch = require('node-fetch').default;
|
import express from 'express';
|
||||||
const { finished } = require('stream/promises');
|
import sanitize from 'sanitize-filename';
|
||||||
const { UNSAFE_EXTENSIONS } = require('../constants');
|
import fetch from 'node-fetch';
|
||||||
const { jsonParser } = require('../express-common');
|
|
||||||
const { clientRelativePath } = require('../util');
|
import { UNSAFE_EXTENSIONS } from '../constants.js';
|
||||||
|
import { jsonParser } from '../express-common.js';
|
||||||
|
import { clientRelativePath } from '../util.js';
|
||||||
|
|
||||||
const VALID_CATEGORIES = ['bgm', 'ambient', 'blip', 'live2d', 'vrm', 'character', 'temp'];
|
const VALID_CATEGORIES = ['bgm', 'ambient', 'blip', 'live2d', 'vrm', 'character', 'temp'];
|
||||||
|
|
||||||
@ -16,7 +18,7 @@ const VALID_CATEGORIES = ['bgm', 'ambient', 'blip', 'live2d', 'vrm', 'character'
|
|||||||
* @param {string} inputFilename Input filename
|
* @param {string} inputFilename Input filename
|
||||||
* @returns {{error: boolean, message?: string}} Whether validation failed, and why if so
|
* @returns {{error: boolean, message?: string}} Whether validation failed, and why if so
|
||||||
*/
|
*/
|
||||||
function validateAssetFileName(inputFilename) {
|
export function validateAssetFileName(inputFilename) {
|
||||||
if (!/^[a-zA-Z0-9_\-.]+$/.test(inputFilename)) {
|
if (!/^[a-zA-Z0-9_\-.]+$/.test(inputFilename)) {
|
||||||
return {
|
return {
|
||||||
error: true,
|
error: true,
|
||||||
@ -93,7 +95,7 @@ function ensureFoldersExist(directories) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
const router = express.Router();
|
export const router = express.Router();
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* HTTP POST handler function to retrieve name of all files of a given folder path.
|
* HTTP POST handler function to retrieve name of all files of a given folder path.
|
||||||
@ -366,5 +368,3 @@ router.post('/character', jsonParser, async (request, response) => {
|
|||||||
return response.sendStatus(500);
|
return response.sendStatus(500);
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
module.exports = { router, validateAssetFileName };
|
|
||||||
|
@ -1,16 +1,16 @@
|
|||||||
const express = require('express');
|
import * as path from 'node:path';
|
||||||
const path = require('path');
|
import * as fs from 'node:fs';
|
||||||
const fs = require('fs');
|
|
||||||
const sanitize = require('sanitize-filename');
|
|
||||||
const writeFileAtomicSync = require('write-file-atomic').sync;
|
|
||||||
const { jsonParser, urlencodedParser } = require('../express-common');
|
|
||||||
const { AVATAR_WIDTH, AVATAR_HEIGHT } = require('../constants');
|
|
||||||
const { getImages, tryParse } = require('../util');
|
|
||||||
|
|
||||||
// image processing related library imports
|
import express from 'express';
|
||||||
const jimp = require('jimp');
|
import sanitize from 'sanitize-filename';
|
||||||
|
import jimp from 'jimp';
|
||||||
|
import { sync as writeFileAtomicSync } from 'write-file-atomic';
|
||||||
|
|
||||||
const router = express.Router();
|
import { jsonParser, urlencodedParser } from '../express-common.js';
|
||||||
|
import { AVATAR_WIDTH, AVATAR_HEIGHT } from '../constants.js';
|
||||||
|
import { getImages, tryParse } from '../util.js';
|
||||||
|
|
||||||
|
export const router = express.Router();
|
||||||
|
|
||||||
router.post('/get', jsonParser, function (request, response) {
|
router.post('/get', jsonParser, function (request, response) {
|
||||||
var images = getImages(request.user.directories.avatars);
|
var images = getImages(request.user.directories.avatars);
|
||||||
@ -58,5 +58,3 @@ router.post('/upload', urlencodedParser, async (request, response) => {
|
|||||||
return response.status(400).send('Is not a valid image');
|
return response.status(400).send('Is not a valid image');
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
module.exports = { router };
|
|
||||||
|
@ -1,9 +1,10 @@
|
|||||||
const { readSecret, SECRET_KEYS } = require('./secrets');
|
import fetch from 'node-fetch';
|
||||||
const fetch = require('node-fetch').default;
|
import { Router } from 'express';
|
||||||
const express = require('express');
|
|
||||||
const { jsonParser } = require('../express-common');
|
|
||||||
|
|
||||||
const router = express.Router();
|
import { readSecret, SECRET_KEYS } from './secrets.js';
|
||||||
|
import { jsonParser } from '../express-common.js';
|
||||||
|
|
||||||
|
export const router = Router();
|
||||||
|
|
||||||
router.post('/list', jsonParser, async (req, res) => {
|
router.post('/list', jsonParser, async (req, res) => {
|
||||||
try {
|
try {
|
||||||
@ -86,7 +87,3 @@ router.post('/generate', jsonParser, async (req, res) => {
|
|||||||
return res.sendStatus(500);
|
return res.sendStatus(500);
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
module.exports = {
|
|
||||||
router,
|
|
||||||
};
|
|
||||||
|
@ -1,13 +1,40 @@
|
|||||||
const express = require('express');
|
import express from 'express';
|
||||||
const fetch = require('node-fetch').default;
|
import fetch from 'node-fetch';
|
||||||
|
|
||||||
const { jsonParser } = require('../../express-common');
|
import { jsonParser } from '../../express-common.js';
|
||||||
const { CHAT_COMPLETION_SOURCES, GEMINI_SAFETY, BISON_SAFETY, OPENROUTER_HEADERS } = require('../../constants');
|
import {
|
||||||
const { forwardFetchResponse, getConfigValue, tryParse, uuidv4, mergeObjectWithYaml, excludeKeysByYaml, color } = require('../../util');
|
CHAT_COMPLETION_SOURCES,
|
||||||
const { convertClaudeMessages, convertGooglePrompt, convertTextCompletionPrompt, convertCohereMessages, convertMistralMessages, convertAI21Messages, mergeMessages } = require('../../prompt-converters');
|
GEMINI_SAFETY,
|
||||||
|
BISON_SAFETY,
|
||||||
|
OPENROUTER_HEADERS,
|
||||||
|
} from '../../constants.js';
|
||||||
|
import {
|
||||||
|
forwardFetchResponse,
|
||||||
|
getConfigValue,
|
||||||
|
tryParse,
|
||||||
|
uuidv4,
|
||||||
|
mergeObjectWithYaml,
|
||||||
|
excludeKeysByYaml,
|
||||||
|
color,
|
||||||
|
} from '../../util.js';
|
||||||
|
import {
|
||||||
|
convertClaudeMessages,
|
||||||
|
convertGooglePrompt,
|
||||||
|
convertTextCompletionPrompt,
|
||||||
|
convertCohereMessages,
|
||||||
|
convertMistralMessages,
|
||||||
|
convertAI21Messages,
|
||||||
|
mergeMessages,
|
||||||
|
} from '../../prompt-converters.js';
|
||||||
|
|
||||||
const { readSecret, SECRET_KEYS } = require('../secrets');
|
import { readSecret, SECRET_KEYS } from '../secrets.js';
|
||||||
const { getTokenizerModel, getSentencepiceTokenizer, getTiktokenTokenizer, sentencepieceTokenizers, TEXT_COMPLETION_MODELS } = require('../tokenizers');
|
import {
|
||||||
|
getTokenizerModel,
|
||||||
|
getSentencepiceTokenizer,
|
||||||
|
getTiktokenTokenizer,
|
||||||
|
sentencepieceTokenizers,
|
||||||
|
TEXT_COMPLETION_MODELS,
|
||||||
|
} from '../tokenizers.js';
|
||||||
|
|
||||||
const API_OPENAI = 'https://api.openai.com/v1';
|
const API_OPENAI = 'https://api.openai.com/v1';
|
||||||
const API_CLAUDE = 'https://api.anthropic.com/v1';
|
const API_CLAUDE = 'https://api.anthropic.com/v1';
|
||||||
@ -41,43 +68,6 @@ function postProcessPrompt(messages, type, charName, userName) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* Ollama strikes back. Special boy #2's steaming routine.
|
|
||||||
* Wrap this abomination into proper SSE stream, again.
|
|
||||||
* @param {Response} jsonStream JSON stream
|
|
||||||
* @param {import('express').Request} request Express request
|
|
||||||
* @param {import('express').Response} response Express response
|
|
||||||
* @returns {Promise<any>} Nothing valuable
|
|
||||||
*/
|
|
||||||
async function parseCohereStream(jsonStream, request, response) {
|
|
||||||
try {
|
|
||||||
const stream = new CohereStream({ stream: jsonStream.body, eventShape: { type: 'json', messageTerminator: '\n' } });
|
|
||||||
|
|
||||||
for await (const json of stream.iterMessages()) {
|
|
||||||
if (json.message) {
|
|
||||||
const message = json.message || 'Unknown error';
|
|
||||||
const chunk = { error: { message: message } };
|
|
||||||
response.write(`data: ${JSON.stringify(chunk)}\n\n`);
|
|
||||||
} else if (json.event_type === 'text-generation') {
|
|
||||||
const text = json.text || '';
|
|
||||||
const chunk = { choices: [{ text }] };
|
|
||||||
response.write(`data: ${JSON.stringify(chunk)}\n\n`);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
console.log('Streaming request finished');
|
|
||||||
response.write('data: [DONE]\n\n');
|
|
||||||
response.end();
|
|
||||||
} catch (error) {
|
|
||||||
console.log('Error forwarding streaming response:', error);
|
|
||||||
if (!response.headersSent) {
|
|
||||||
return response.status(500).send({ error: true });
|
|
||||||
} else {
|
|
||||||
return response.end();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Sends a request to Claude API.
|
* Sends a request to Claude API.
|
||||||
* @param {express.Request} request Express request
|
* @param {express.Request} request Express request
|
||||||
@ -626,7 +616,7 @@ async function sendCohereRequest(request, response) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
const router = express.Router();
|
export const router = express.Router();
|
||||||
|
|
||||||
router.post('/status', jsonParser, async function (request, response_getstatus_openai) {
|
router.post('/status', jsonParser, async function (request, response_getstatus_openai) {
|
||||||
if (!request.body) return response_getstatus_openai.sendStatus(400);
|
if (!request.body) return response_getstatus_openai.sendStatus(400);
|
||||||
@ -1069,6 +1059,3 @@ router.post('/generate', jsonParser, function (request, response) {
|
|||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
module.exports = {
|
|
||||||
router,
|
|
||||||
};
|
|
||||||
|
@ -1,13 +1,13 @@
|
|||||||
const express = require('express');
|
import * as fs from 'node:fs';
|
||||||
const fetch = require('node-fetch').default;
|
import express from 'express';
|
||||||
const fs = require('fs');
|
import fetch from 'node-fetch';
|
||||||
|
|
||||||
const { jsonParser, urlencodedParser } = require('../../express-common');
|
import { jsonParser, urlencodedParser } from '../../express-common.js';
|
||||||
const { forwardFetchResponse, delay } = require('../../util');
|
import { forwardFetchResponse, delay } from '../../util.js';
|
||||||
const { getOverrideHeaders, setAdditionalHeaders, setAdditionalHeadersByType } = require('../../additional-headers');
|
import { getOverrideHeaders, setAdditionalHeaders, setAdditionalHeadersByType } from '../../additional-headers.js';
|
||||||
const { TEXTGEN_TYPES } = require('../../constants');
|
import { TEXTGEN_TYPES } from '../../constants.js';
|
||||||
|
|
||||||
const router = express.Router();
|
export const router = express.Router();
|
||||||
|
|
||||||
router.post('/generate', jsonParser, async function (request, response_generate) {
|
router.post('/generate', jsonParser, async function (request, response_generate) {
|
||||||
if (!request.body) return response_generate.sendStatus(400);
|
if (!request.body) return response_generate.sendStatus(400);
|
||||||
@ -237,5 +237,3 @@ router.post('/transcribe-audio', urlencodedParser, async function (request, resp
|
|||||||
response.status(500).send('Internal server error');
|
response.status(500).send('Internal server error');
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
module.exports = { router };
|
|
||||||
|
@ -1,11 +1,10 @@
|
|||||||
const express = require('express');
|
import express from 'express';
|
||||||
const fetch = require('node-fetch').default;
|
import fetch from 'node-fetch';
|
||||||
|
|
||||||
const { jsonParser } = require('../../express-common');
|
import { jsonParser } from '../../express-common.js';
|
||||||
|
import { readSecret, SECRET_KEYS } from '../secrets.js';
|
||||||
|
|
||||||
const { readSecret, SECRET_KEYS } = require('../secrets');
|
export const router = express.Router();
|
||||||
|
|
||||||
const router = express.Router();
|
|
||||||
|
|
||||||
router.post('/generate', jsonParser, async function (request, response) {
|
router.post('/generate', jsonParser, async function (request, response) {
|
||||||
if (!request.body) return response.sendStatus(400);
|
if (!request.body) return response.sendStatus(400);
|
||||||
@ -97,5 +96,3 @@ router.post('/generate', jsonParser, async function (request, response) {
|
|||||||
return response.sendStatus(500);
|
return response.sendStatus(500);
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
module.exports = { router };
|
|
||||||
|
@ -1,14 +1,23 @@
|
|||||||
const express = require('express');
|
import { Readable } from 'node:stream';
|
||||||
const fetch = require('node-fetch').default;
|
import fetch from 'node-fetch';
|
||||||
const _ = require('lodash');
|
import express from 'express';
|
||||||
const Readable = require('stream').Readable;
|
import _ from 'lodash';
|
||||||
|
|
||||||
const { jsonParser } = require('../../express-common');
|
import { jsonParser } from '../../express-common.js';
|
||||||
const { TEXTGEN_TYPES, TOGETHERAI_KEYS, OLLAMA_KEYS, INFERMATICAI_KEYS, OPENROUTER_KEYS, VLLM_KEYS, DREAMGEN_KEYS, FEATHERLESS_KEYS } = require('../../constants');
|
import {
|
||||||
const { forwardFetchResponse, trimV1, getConfigValue } = require('../../util');
|
TEXTGEN_TYPES,
|
||||||
const { setAdditionalHeaders } = require('../../additional-headers');
|
TOGETHERAI_KEYS,
|
||||||
|
OLLAMA_KEYS,
|
||||||
|
INFERMATICAI_KEYS,
|
||||||
|
OPENROUTER_KEYS,
|
||||||
|
VLLM_KEYS,
|
||||||
|
DREAMGEN_KEYS,
|
||||||
|
FEATHERLESS_KEYS,
|
||||||
|
} from '../../constants.js';
|
||||||
|
import { forwardFetchResponse, trimV1, getConfigValue } from '../../util.js';
|
||||||
|
import { setAdditionalHeaders } from '../../additional-headers.js';
|
||||||
|
|
||||||
const router = express.Router();
|
export const router = express.Router();
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Special boy's steaming routine. Wrap this abomination into proper SSE stream.
|
* Special boy's steaming routine. Wrap this abomination into proper SSE stream.
|
||||||
@ -641,5 +650,3 @@ tabby.post('/download', jsonParser, async function (request, response) {
|
|||||||
router.use('/ollama', ollama);
|
router.use('/ollama', ollama);
|
||||||
router.use('/llamacpp', llamacpp);
|
router.use('/llamacpp', llamacpp);
|
||||||
router.use('/tabby', tabby);
|
router.use('/tabby', tabby);
|
||||||
|
|
||||||
module.exports = { router };
|
|
||||||
|
@ -1,13 +1,14 @@
|
|||||||
const fs = require('fs');
|
import * as fs from 'node:fs';
|
||||||
const path = require('path');
|
import * as path from 'node:path';
|
||||||
const express = require('express');
|
|
||||||
const sanitize = require('sanitize-filename');
|
|
||||||
|
|
||||||
const { jsonParser, urlencodedParser } = require('../express-common');
|
import express from 'express';
|
||||||
const { invalidateThumbnail } = require('./thumbnails');
|
import sanitize from 'sanitize-filename';
|
||||||
const { getImages } = require('../util');
|
|
||||||
|
|
||||||
const router = express.Router();
|
import { jsonParser, urlencodedParser } from '../express-common.js';
|
||||||
|
import { invalidateThumbnail } from './thumbnails.js';
|
||||||
|
import { getImages } from '../util.js';
|
||||||
|
|
||||||
|
export const router = express.Router();
|
||||||
|
|
||||||
router.post('/all', jsonParser, function (request, response) {
|
router.post('/all', jsonParser, function (request, response) {
|
||||||
var images = getImages(request.user.directories.backgrounds);
|
var images = getImages(request.user.directories.backgrounds);
|
||||||
@ -72,5 +73,3 @@ router.post('/upload', urlencodedParser, function (request, response) {
|
|||||||
response.sendStatus(500);
|
response.sendStatus(500);
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
module.exports = { router };
|
|
||||||
|
@ -1,23 +1,23 @@
|
|||||||
const express = require('express');
|
import express from 'express';
|
||||||
const { jsonParser } = require('../express-common');
|
import { jsonParser } from '../express-common.js';
|
||||||
|
import { getPipeline, getRawImage } from '../transformers.mjs';
|
||||||
|
|
||||||
const TASK = 'image-to-text';
|
const TASK = 'image-to-text';
|
||||||
|
|
||||||
const router = express.Router();
|
export const router = express.Router();
|
||||||
|
|
||||||
router.post('/', jsonParser, async (req, res) => {
|
router.post('/', jsonParser, async (req, res) => {
|
||||||
try {
|
try {
|
||||||
const { image } = req.body;
|
const { image } = req.body;
|
||||||
|
|
||||||
const module = await import('../transformers.mjs');
|
const rawImage = await getRawImage(image);
|
||||||
const rawImage = await module.default.getRawImage(image);
|
|
||||||
|
|
||||||
if (!rawImage) {
|
if (!rawImage) {
|
||||||
console.log('Failed to parse captioned image');
|
console.log('Failed to parse captioned image');
|
||||||
return res.sendStatus(400);
|
return res.sendStatus(400);
|
||||||
}
|
}
|
||||||
|
|
||||||
const pipe = await module.default.getPipeline(TASK);
|
const pipe = await getPipeline(TASK);
|
||||||
const result = await pipe(rawImage);
|
const result = await pipe(rawImage);
|
||||||
const text = result[0].generated_text;
|
const text = result[0].generated_text;
|
||||||
console.log('Image caption:', text);
|
console.log('Image caption:', text);
|
||||||
@ -28,5 +28,3 @@ router.post('/', jsonParser, async (req, res) => {
|
|||||||
return res.sendStatus(500);
|
return res.sendStatus(500);
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
module.exports = { router };
|
|
||||||
|
@ -1,24 +1,24 @@
|
|||||||
const path = require('path');
|
import * as path from 'node:path';
|
||||||
const fs = require('fs');
|
import * as fs from 'node:fs';
|
||||||
const fsPromises = require('fs').promises;
|
import { promises as fsPromises } from 'node:fs';
|
||||||
const readline = require('readline');
|
import * as readline from 'node:readline';
|
||||||
const express = require('express');
|
|
||||||
const sanitize = require('sanitize-filename');
|
|
||||||
const writeFileAtomicSync = require('write-file-atomic').sync;
|
|
||||||
const yaml = require('yaml');
|
|
||||||
const _ = require('lodash');
|
|
||||||
const mime = require('mime-types');
|
|
||||||
|
|
||||||
const jimp = require('jimp');
|
import express from 'express';
|
||||||
|
import sanitize from 'sanitize-filename';
|
||||||
|
import { sync as writeFileAtomicSync } from 'write-file-atomic';
|
||||||
|
import yaml from 'yaml';
|
||||||
|
import _ from 'lodash';
|
||||||
|
import mime from 'mime-types';
|
||||||
|
import jimp from 'jimp';
|
||||||
|
|
||||||
const { AVATAR_WIDTH, AVATAR_HEIGHT } = require('../constants');
|
import { AVATAR_WIDTH, AVATAR_HEIGHT } from '../constants.js';
|
||||||
const { jsonParser, urlencodedParser } = require('../express-common');
|
import { jsonParser, urlencodedParser } from '../express-common.js';
|
||||||
const { deepMerge, humanizedISO8601DateTime, tryParse, extractFileFromZipBuffer } = require('../util');
|
import { deepMerge, humanizedISO8601DateTime, tryParse, extractFileFromZipBuffer } from '../util.js';
|
||||||
const { TavernCardValidator } = require('../validator/TavernCardValidator');
|
import { TavernCardValidator } from '../validator/TavernCardValidator.js';
|
||||||
const characterCardParser = require('../character-card-parser.js');
|
import * as characterCardParser from '../character-card-parser.js';
|
||||||
const { readWorldInfoFile } = require('./worldinfo');
|
import { readWorldInfoFile } from './worldinfo.js';
|
||||||
const { invalidateThumbnail } = require('./thumbnails');
|
import { invalidateThumbnail } from './thumbnails.js';
|
||||||
const { importRisuSprites } = require('./sprites');
|
import { importRisuSprites } from './sprites.js';
|
||||||
const defaultAvatarPath = './public/img/ai4.png';
|
const defaultAvatarPath = './public/img/ai4.png';
|
||||||
|
|
||||||
// KV-store for parsed character data
|
// KV-store for parsed character data
|
||||||
@ -715,7 +715,7 @@ async function importFromPng(uploadPath, { request }, preservedFileName) {
|
|||||||
return '';
|
return '';
|
||||||
}
|
}
|
||||||
|
|
||||||
const router = express.Router();
|
export const router = express.Router();
|
||||||
|
|
||||||
router.post('/create', urlencodedParser, async function (request, response) {
|
router.post('/create', urlencodedParser, async function (request, response) {
|
||||||
try {
|
try {
|
||||||
@ -1231,5 +1231,3 @@ router.post('/export', jsonParser, async function (request, response) {
|
|||||||
response.sendStatus(500);
|
response.sendStatus(500);
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
module.exports = { router };
|
|
||||||
|
@ -1,13 +1,14 @@
|
|||||||
const fs = require('fs');
|
import * as fs from 'node:fs';
|
||||||
const path = require('path');
|
import * as path from 'node:path';
|
||||||
const readline = require('readline');
|
import * as readline from 'node:readline';
|
||||||
const express = require('express');
|
|
||||||
const sanitize = require('sanitize-filename');
|
|
||||||
const writeFileAtomicSync = require('write-file-atomic').sync;
|
|
||||||
const _ = require('lodash');
|
|
||||||
|
|
||||||
const { jsonParser, urlencodedParser } = require('../express-common');
|
import express from 'express';
|
||||||
const { getConfigValue, humanizedISO8601DateTime, tryParse, generateTimestamp, removeOldBackups } = require('../util');
|
import sanitize from 'sanitize-filename';
|
||||||
|
import { sync as writeFileAtomicSync } from 'write-file-atomic';
|
||||||
|
import _ from 'lodash';
|
||||||
|
|
||||||
|
import { jsonParser, urlencodedParser } from '../express-common.js';
|
||||||
|
import { getConfigValue, humanizedISO8601DateTime, tryParse, generateTimestamp, removeOldBackups } from '../util.js';
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Saves a chat to the backups directory.
|
* Saves a chat to the backups directory.
|
||||||
@ -188,7 +189,7 @@ function flattenChubChat(userName, characterName, lines) {
|
|||||||
return (lines ?? []).map(convert).join('\n');
|
return (lines ?? []).map(convert).join('\n');
|
||||||
}
|
}
|
||||||
|
|
||||||
const router = express.Router();
|
export const router = express.Router();
|
||||||
|
|
||||||
router.post('/save', jsonParser, function (request, response) {
|
router.post('/save', jsonParser, function (request, response) {
|
||||||
try {
|
try {
|
||||||
@ -514,5 +515,3 @@ router.post('/group/save', jsonParser, (request, response) => {
|
|||||||
getBackupFunction(request.user.profile.handle)(request.user.directories.backups, String(id), jsonlData);
|
getBackupFunction(request.user.profile.handle)(request.user.directories.backups, String(id), jsonlData);
|
||||||
return response.send({ ok: true });
|
return response.send({ ok: true });
|
||||||
});
|
});
|
||||||
|
|
||||||
module.exports = { router };
|
|
||||||
|
@ -1,9 +1,11 @@
|
|||||||
const express = require('express');
|
import express from 'express';
|
||||||
const { jsonParser } = require('../express-common');
|
|
||||||
|
import { getPipeline } from '../transformers.mjs';
|
||||||
|
import { jsonParser } from '../express-common.js';
|
||||||
|
|
||||||
const TASK = 'text-classification';
|
const TASK = 'text-classification';
|
||||||
|
|
||||||
const router = express.Router();
|
export const router = express.Router();
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @type {Map<string, object>} Cache for classification results
|
* @type {Map<string, object>} Cache for classification results
|
||||||
@ -12,8 +14,7 @@ const cacheObject = new Map();
|
|||||||
|
|
||||||
router.post('/labels', jsonParser, async (req, res) => {
|
router.post('/labels', jsonParser, async (req, res) => {
|
||||||
try {
|
try {
|
||||||
const module = await import('../transformers.mjs');
|
const pipe = await getPipeline(TASK);
|
||||||
const pipe = await module.default.getPipeline(TASK);
|
|
||||||
const result = Object.keys(pipe.model.config.label2id);
|
const result = Object.keys(pipe.model.config.label2id);
|
||||||
return res.json({ labels: result });
|
return res.json({ labels: result });
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
@ -35,8 +36,7 @@ router.post('/', jsonParser, async (req, res) => {
|
|||||||
if (cacheObject.has(text)) {
|
if (cacheObject.has(text)) {
|
||||||
return cacheObject.get(text);
|
return cacheObject.get(text);
|
||||||
} else {
|
} else {
|
||||||
const module = await import('../transformers.mjs');
|
const pipe = await getPipeline(TASK);
|
||||||
const pipe = await module.default.getPipeline(TASK);
|
|
||||||
const result = await pipe(text, { topk: 5 });
|
const result = await pipe(text, { topk: 5 });
|
||||||
result.sort((a, b) => b.score - a.score);
|
result.sort((a, b) => b.score - a.score);
|
||||||
cacheObject.set(text, result);
|
cacheObject.set(text, result);
|
||||||
@ -54,5 +54,3 @@ router.post('/', jsonParser, async (req, res) => {
|
|||||||
return res.sendStatus(500);
|
return res.sendStatus(500);
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
module.exports = { router };
|
|
||||||
|
@ -1,16 +1,18 @@
|
|||||||
const fs = require('fs');
|
import * as fs from 'node:fs';
|
||||||
const path = require('path');
|
import * as path from 'node:path';
|
||||||
const express = require('express');
|
|
||||||
const fetch = require('node-fetch').default;
|
import express from 'express';
|
||||||
const sanitize = require('sanitize-filename');
|
import fetch from 'node-fetch';
|
||||||
const { getConfigValue, color } = require('../util');
|
import sanitize from 'sanitize-filename';
|
||||||
const { jsonParser } = require('../express-common');
|
|
||||||
const writeFileAtomicSync = require('write-file-atomic').sync;
|
import { getConfigValue, color } from '../util.js';
|
||||||
|
import { jsonParser } from '../express-common.js';
|
||||||
|
import { sync as writeFileAtomicSync } from 'write-file-atomic';
|
||||||
const contentDirectory = path.join(process.cwd(), 'default/content');
|
const contentDirectory = path.join(process.cwd(), 'default/content');
|
||||||
const scaffoldDirectory = path.join(process.cwd(), 'default/scaffold');
|
const scaffoldDirectory = path.join(process.cwd(), 'default/scaffold');
|
||||||
const contentIndexPath = path.join(contentDirectory, 'index.json');
|
const contentIndexPath = path.join(contentDirectory, 'index.json');
|
||||||
const scaffoldIndexPath = path.join(scaffoldDirectory, 'index.json');
|
const scaffoldIndexPath = path.join(scaffoldDirectory, 'index.json');
|
||||||
const characterCardParser = require('../character-card-parser.js');
|
import * as characterCardParser from '../character-card-parser.js';
|
||||||
|
|
||||||
const WHITELIST_GENERIC_URL_DOWNLOAD_SOURCES = getConfigValue('whitelistImportDomains', []);
|
const WHITELIST_GENERIC_URL_DOWNLOAD_SOURCES = getConfigValue('whitelistImportDomains', []);
|
||||||
|
|
||||||
@ -26,7 +28,7 @@ const WHITELIST_GENERIC_URL_DOWNLOAD_SOURCES = getConfigValue('whitelistImportDo
|
|||||||
* @typedef {string} ContentType
|
* @typedef {string} ContentType
|
||||||
* @enum {string}
|
* @enum {string}
|
||||||
*/
|
*/
|
||||||
const CONTENT_TYPES = {
|
export const CONTENT_TYPES = {
|
||||||
SETTINGS: 'settings',
|
SETTINGS: 'settings',
|
||||||
CHARACTER: 'character',
|
CHARACTER: 'character',
|
||||||
SPRITES: 'sprites',
|
SPRITES: 'sprites',
|
||||||
@ -51,7 +53,7 @@ const CONTENT_TYPES = {
|
|||||||
* @param {import('../users').UserDirectoryList} directories User directories
|
* @param {import('../users').UserDirectoryList} directories User directories
|
||||||
* @returns {object[]} Array of default presets
|
* @returns {object[]} Array of default presets
|
||||||
*/
|
*/
|
||||||
function getDefaultPresets(directories) {
|
export function getDefaultPresets(directories) {
|
||||||
try {
|
try {
|
||||||
const contentIndex = getContentIndex();
|
const contentIndex = getContentIndex();
|
||||||
const presets = [];
|
const presets = [];
|
||||||
@ -76,7 +78,7 @@ function getDefaultPresets(directories) {
|
|||||||
* @param {string} filename Name of the file to get
|
* @param {string} filename Name of the file to get
|
||||||
* @returns {object | null} JSON object or null if the file doesn't exist
|
* @returns {object | null} JSON object or null if the file doesn't exist
|
||||||
*/
|
*/
|
||||||
function getDefaultPresetFile(filename) {
|
export function getDefaultPresetFile(filename) {
|
||||||
try {
|
try {
|
||||||
const contentPath = path.join(contentDirectory, filename);
|
const contentPath = path.join(contentDirectory, filename);
|
||||||
|
|
||||||
@ -158,7 +160,7 @@ async function seedContentForUser(contentIndex, directories, forceCategories) {
|
|||||||
* @param {string[]} forceCategories List of categories to force check (even if content check is skipped)
|
* @param {string[]} forceCategories List of categories to force check (even if content check is skipped)
|
||||||
* @returns {Promise<void>}
|
* @returns {Promise<void>}
|
||||||
*/
|
*/
|
||||||
async function checkForNewContent(directoriesList, forceCategories = []) {
|
export async function checkForNewContent(directoriesList, forceCategories = []) {
|
||||||
try {
|
try {
|
||||||
const contentCheckSkip = getConfigValue('skipContentCheck', false);
|
const contentCheckSkip = getConfigValue('skipContentCheck', false);
|
||||||
if (contentCheckSkip && forceCategories?.length === 0) {
|
if (contentCheckSkip && forceCategories?.length === 0) {
|
||||||
@ -224,7 +226,7 @@ function getContentIndex() {
|
|||||||
* @param {'json'|'string'|'raw'} format Format of content
|
* @param {'json'|'string'|'raw'} format Format of content
|
||||||
* @returns {string[]|Buffer[]} Array of content
|
* @returns {string[]|Buffer[]} Array of content
|
||||||
*/
|
*/
|
||||||
function getContentOfType(type, format) {
|
export function getContentOfType(type, format) {
|
||||||
const contentIndex = getContentIndex();
|
const contentIndex = getContentIndex();
|
||||||
const indexItems = contentIndex.filter((item) => item.type === type && item.folder);
|
const indexItems = contentIndex.filter((item) => item.type === type && item.folder);
|
||||||
const files = [];
|
const files = [];
|
||||||
@ -618,7 +620,7 @@ function isHostWhitelisted(host) {
|
|||||||
return WHITELIST_GENERIC_URL_DOWNLOAD_SOURCES.includes(host);
|
return WHITELIST_GENERIC_URL_DOWNLOAD_SOURCES.includes(host);
|
||||||
}
|
}
|
||||||
|
|
||||||
const router = express.Router();
|
export const router = express.Router();
|
||||||
|
|
||||||
router.post('/importURL', jsonParser, async (request, response) => {
|
router.post('/importURL', jsonParser, async (request, response) => {
|
||||||
if (!request.body.url) {
|
if (!request.body.url) {
|
||||||
@ -753,12 +755,3 @@ router.post('/importUUID', jsonParser, async (request, response) => {
|
|||||||
return response.sendStatus(500);
|
return response.sendStatus(500);
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
module.exports = {
|
|
||||||
CONTENT_TYPES,
|
|
||||||
checkForNewContent,
|
|
||||||
getDefaultPresets,
|
|
||||||
getDefaultPresetFile,
|
|
||||||
getContentOfType,
|
|
||||||
router,
|
|
||||||
};
|
|
||||||
|
@ -1,10 +1,12 @@
|
|||||||
const path = require('path');
|
import * as path from 'node:path';
|
||||||
const fs = require('fs');
|
import * as fs from 'node:fs';
|
||||||
const express = require('express');
|
|
||||||
const { default: simpleGit } = require('simple-git');
|
import express from 'express';
|
||||||
const sanitize = require('sanitize-filename');
|
import sanitize from 'sanitize-filename';
|
||||||
const { PUBLIC_DIRECTORIES } = require('../constants');
|
import { default as simpleGit } from 'simple-git';
|
||||||
const { jsonParser } = require('../express-common');
|
|
||||||
|
import { PUBLIC_DIRECTORIES } from '../constants.js';
|
||||||
|
import { jsonParser } from '../express-common.js';
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* This function extracts the extension information from the manifest file.
|
* This function extracts the extension information from the manifest file.
|
||||||
@ -47,7 +49,7 @@ async function checkIfRepoIsUpToDate(extensionPath) {
|
|||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
const router = express.Router();
|
export const router = express.Router();
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* HTTP POST handler function to clone a git repository from a provided URL, read the extension manifest,
|
* HTTP POST handler function to clone a git repository from a provided URL, read the extension manifest,
|
||||||
@ -240,5 +242,3 @@ router.get('/discover', jsonParser, function (request, response) {
|
|||||||
|
|
||||||
return response.send(extensions);
|
return response.send(extensions);
|
||||||
});
|
});
|
||||||
|
|
||||||
module.exports = { router };
|
|
||||||
|
@ -1,12 +1,15 @@
|
|||||||
const path = require('path');
|
import * as path from 'node:path';
|
||||||
const fs = require('fs');
|
import * as fs from 'node:fs';
|
||||||
const writeFileSyncAtomic = require('write-file-atomic').sync;
|
|
||||||
const express = require('express');
|
import express from 'express';
|
||||||
const sanitize = require('sanitize-filename');
|
import sanitize from 'sanitize-filename';
|
||||||
const router = express.Router();
|
import { sync as writeFileSyncAtomic } from 'write-file-atomic';
|
||||||
const { validateAssetFileName } = require('./assets');
|
|
||||||
const { jsonParser } = require('../express-common');
|
import { validateAssetFileName } from './assets.js';
|
||||||
const { clientRelativePath } = require('../util');
|
import { jsonParser } from '../express-common.js';
|
||||||
|
import { clientRelativePath } from '../util.js';
|
||||||
|
|
||||||
|
export const router = express.Router();
|
||||||
|
|
||||||
router.post('/sanitize-filename', jsonParser, async (request, response) => {
|
router.post('/sanitize-filename', jsonParser, async (request, response) => {
|
||||||
try {
|
try {
|
||||||
@ -97,5 +100,3 @@ router.post('/verify', jsonParser, async (request, response) => {
|
|||||||
return response.sendStatus(500);
|
return response.sendStatus(500);
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
module.exports = { router };
|
|
||||||
|
@ -1,12 +1,13 @@
|
|||||||
const { readSecret, SECRET_KEYS } = require('./secrets');
|
import fetch from 'node-fetch';
|
||||||
const fetch = require('node-fetch').default;
|
import express from 'express';
|
||||||
const express = require('express');
|
|
||||||
const { jsonParser } = require('../express-common');
|
import { readSecret, SECRET_KEYS } from './secrets.js';
|
||||||
const { GEMINI_SAFETY } = require('../constants');
|
import { jsonParser } from '../express-common.js';
|
||||||
|
import { GEMINI_SAFETY } from '../constants.js';
|
||||||
|
|
||||||
const API_MAKERSUITE = 'https://generativelanguage.googleapis.com';
|
const API_MAKERSUITE = 'https://generativelanguage.googleapis.com';
|
||||||
|
|
||||||
const router = express.Router();
|
export const router = express.Router();
|
||||||
|
|
||||||
router.post('/caption-image', jsonParser, async (request, response) => {
|
router.post('/caption-image', jsonParser, async (request, response) => {
|
||||||
try {
|
try {
|
||||||
@ -67,5 +68,3 @@ router.post('/caption-image', jsonParser, async (request, response) => {
|
|||||||
response.status(500).send('Internal server error');
|
response.status(500).send('Internal server error');
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
module.exports = { router };
|
|
||||||
|
@ -1,13 +1,14 @@
|
|||||||
const fs = require('fs');
|
import * as fs from 'node:fs';
|
||||||
const path = require('path');
|
import * as path from 'node:path';
|
||||||
const express = require('express');
|
|
||||||
const sanitize = require('sanitize-filename');
|
|
||||||
const writeFileAtomicSync = require('write-file-atomic').sync;
|
|
||||||
|
|
||||||
const { jsonParser } = require('../express-common');
|
import express from 'express';
|
||||||
const { humanizedISO8601DateTime } = require('../util');
|
import sanitize from 'sanitize-filename';
|
||||||
|
import { sync as writeFileAtomicSync } from 'write-file-atomic';
|
||||||
|
|
||||||
const router = express.Router();
|
import { jsonParser } from '../express-common.js';
|
||||||
|
import { humanizedISO8601DateTime } from '../util.js';
|
||||||
|
|
||||||
|
export const router = express.Router();
|
||||||
|
|
||||||
router.post('/all', jsonParser, (request, response) => {
|
router.post('/all', jsonParser, (request, response) => {
|
||||||
const groups = [];
|
const groups = [];
|
||||||
@ -131,5 +132,3 @@ router.post('/delete', jsonParser, async (request, response) => {
|
|||||||
|
|
||||||
return response.send({ ok: true });
|
return response.send({ ok: true });
|
||||||
});
|
});
|
||||||
|
|
||||||
module.exports = { router };
|
|
||||||
|
@ -1,14 +1,14 @@
|
|||||||
const fetch = require('node-fetch').default;
|
import fetch from 'node-fetch';
|
||||||
const express = require('express');
|
import express from 'express';
|
||||||
const { AIHorde, ModelGenerationInputStableSamplers, ModelInterrogationFormTypes, HordeAsyncRequestStates } = require('@zeldafan0225/ai_horde');
|
import { AIHorde, ModelGenerationInputStableSamplers, ModelInterrogationFormTypes, HordeAsyncRequestStates } from '@zeldafan0225/ai_horde';
|
||||||
const { getVersion, delay, Cache } = require('../util');
|
import { getVersion, delay, Cache } from '../util.js';
|
||||||
const { readSecret, SECRET_KEYS } = require('./secrets');
|
import { readSecret, SECRET_KEYS } from './secrets.js';
|
||||||
const { jsonParser } = require('../express-common');
|
import { jsonParser } from '../express-common.js';
|
||||||
|
|
||||||
const ANONYMOUS_KEY = '0000000000';
|
const ANONYMOUS_KEY = '0000000000';
|
||||||
const HORDE_TEXT_MODEL_METADATA_URL = 'https://raw.githubusercontent.com/db0/AI-Horde-text-model-reference/main/db.json';
|
const HORDE_TEXT_MODEL_METADATA_URL = 'https://raw.githubusercontent.com/db0/AI-Horde-text-model-reference/main/db.json';
|
||||||
const cache = new Cache(60 * 1000);
|
const cache = new Cache(60 * 1000);
|
||||||
const router = express.Router();
|
export const router = express.Router();
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Returns the AIHorde client agent.
|
* Returns the AIHorde client agent.
|
||||||
@ -403,5 +403,3 @@ router.post('/generate-image', jsonParser, async (request, response) => {
|
|||||||
return response.sendStatus(500);
|
return response.sendStatus(500);
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
module.exports = { router };
|
|
||||||
|
@ -1,10 +1,11 @@
|
|||||||
const fs = require('fs');
|
import * as fs from 'node:fs';
|
||||||
const path = require('path');
|
import * as path from 'node:path';
|
||||||
const express = require('express');
|
|
||||||
const sanitize = require('sanitize-filename');
|
|
||||||
|
|
||||||
const { jsonParser } = require('../express-common');
|
import express from 'express';
|
||||||
const { clientRelativePath, removeFileExtension, getImages } = require('../util');
|
import sanitize from 'sanitize-filename';
|
||||||
|
|
||||||
|
import { jsonParser } from '../express-common.js';
|
||||||
|
import { clientRelativePath, removeFileExtension, getImages } from '../util.js';
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Ensure the directory for the provided file path exists.
|
* Ensure the directory for the provided file path exists.
|
||||||
@ -21,7 +22,7 @@ function ensureDirectoryExistence(filePath) {
|
|||||||
fs.mkdirSync(dirname);
|
fs.mkdirSync(dirname);
|
||||||
}
|
}
|
||||||
|
|
||||||
const router = express.Router();
|
export const router = express.Router();
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Endpoint to handle image uploads.
|
* Endpoint to handle image uploads.
|
||||||
@ -89,5 +90,3 @@ router.post('/list/:folder', (request, response) => {
|
|||||||
return response.status(500).send({ error: 'Unable to retrieve files' });
|
return response.status(500).send({ error: 'Unable to retrieve files' });
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
module.exports = { router };
|
|
||||||
|
@ -1,11 +1,11 @@
|
|||||||
const path = require('path');
|
import * as path from 'node:path';
|
||||||
const express = require('express');
|
import express from 'express';
|
||||||
const sanitize = require('sanitize-filename');
|
import sanitize from 'sanitize-filename';
|
||||||
const writeFileAtomicSync = require('write-file-atomic').sync;
|
import { sync as writeFileAtomicSync } from 'write-file-atomic';
|
||||||
|
|
||||||
const { jsonParser } = require('../express-common');
|
import { jsonParser } from '../express-common.js';
|
||||||
|
|
||||||
const router = express.Router();
|
export const router = express.Router();
|
||||||
|
|
||||||
router.post('/save', jsonParser, (request, response) => {
|
router.post('/save', jsonParser, (request, response) => {
|
||||||
if (!request.body || !request.body.name) {
|
if (!request.body || !request.body.name) {
|
||||||
@ -17,5 +17,3 @@ router.post('/save', jsonParser, (request, response) => {
|
|||||||
|
|
||||||
return response.sendStatus(200);
|
return response.sendStatus(200);
|
||||||
});
|
});
|
||||||
|
|
||||||
module.exports = { router };
|
|
||||||
|
@ -1,9 +1,11 @@
|
|||||||
const fetch = require('node-fetch').default;
|
import * as util from 'node:util';
|
||||||
const express = require('express');
|
|
||||||
const util = require('util');
|
import fetch from 'node-fetch';
|
||||||
const { readSecret, SECRET_KEYS } = require('./secrets');
|
import express from 'express';
|
||||||
const { readAllChunks, extractFileFromZipBuffer, forwardFetchResponse } = require('../util');
|
|
||||||
const { jsonParser } = require('../express-common');
|
import { readSecret, SECRET_KEYS } from './secrets.js';
|
||||||
|
import { readAllChunks, extractFileFromZipBuffer, forwardFetchResponse } from '../util.js';
|
||||||
|
import { jsonParser } from '../express-common.js';
|
||||||
|
|
||||||
const API_NOVELAI = 'https://api.novelai.net';
|
const API_NOVELAI = 'https://api.novelai.net';
|
||||||
const TEXT_NOVELAI = 'https://text.novelai.net';
|
const TEXT_NOVELAI = 'https://text.novelai.net';
|
||||||
@ -110,7 +112,7 @@ function getRepPenaltyWhitelist(model) {
|
|||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
|
||||||
const router = express.Router();
|
export const router = express.Router();
|
||||||
|
|
||||||
router.post('/status', jsonParser, async function (req, res) {
|
router.post('/status', jsonParser, async function (req, res) {
|
||||||
if (!req.body) return res.sendStatus(400);
|
if (!req.body) return res.sendStatus(400);
|
||||||
@ -432,5 +434,3 @@ router.post('/generate-voice', jsonParser, async (request, response) => {
|
|||||||
return response.sendStatus(500);
|
return response.sendStatus(500);
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
module.exports = { router };
|
|
||||||
|
@ -1,14 +1,16 @@
|
|||||||
const { readSecret, SECRET_KEYS } = require('./secrets');
|
import * as fs from 'node:fs';
|
||||||
const fetch = require('node-fetch').default;
|
|
||||||
const express = require('express');
|
|
||||||
const FormData = require('form-data');
|
|
||||||
const fs = require('fs');
|
|
||||||
const { jsonParser, urlencodedParser } = require('../express-common');
|
|
||||||
const { getConfigValue, mergeObjectWithYaml, excludeKeysByYaml, trimV1 } = require('../util');
|
|
||||||
const { setAdditionalHeaders } = require('../additional-headers');
|
|
||||||
const { OPENROUTER_HEADERS } = require('../constants');
|
|
||||||
|
|
||||||
const router = express.Router();
|
import fetch from 'node-fetch';
|
||||||
|
import FormData from 'form-data';
|
||||||
|
import express from 'express';
|
||||||
|
|
||||||
|
import { jsonParser, urlencodedParser } from '../express-common.js';
|
||||||
|
import { getConfigValue, mergeObjectWithYaml, excludeKeysByYaml, trimV1 } from '../util.js';
|
||||||
|
import { setAdditionalHeaders } from '../additional-headers.js';
|
||||||
|
import { readSecret, SECRET_KEYS } from './secrets.js';
|
||||||
|
import { OPENROUTER_HEADERS } from '../constants.js';
|
||||||
|
|
||||||
|
export const router = express.Router();
|
||||||
|
|
||||||
router.post('/caption-image', jsonParser, async (request, response) => {
|
router.post('/caption-image', jsonParser, async (request, response) => {
|
||||||
try {
|
try {
|
||||||
@ -341,5 +343,3 @@ custom.post('/generate-voice', jsonParser, async (request, response) => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
router.use('/custom', custom);
|
router.use('/custom', custom);
|
||||||
|
|
||||||
module.exports = { router };
|
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
const express = require('express');
|
import express from 'express';
|
||||||
const { jsonParser } = require('../express-common');
|
import { jsonParser } from '../express-common.js';
|
||||||
|
|
||||||
const router = express.Router();
|
export const router = express.Router();
|
||||||
const API_OPENROUTER = 'https://openrouter.ai/api/v1';
|
const API_OPENROUTER = 'https://openrouter.ai/api/v1';
|
||||||
|
|
||||||
router.post('/models/multimodal', jsonParser, async (_req, res) => {
|
router.post('/models/multimodal', jsonParser, async (_req, res) => {
|
||||||
@ -28,5 +28,3 @@ router.post('/models/multimodal', jsonParser, async (_req, res) => {
|
|||||||
return res.sendStatus(500);
|
return res.sendStatus(500);
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
module.exports = { router };
|
|
||||||
|
@ -1,10 +1,12 @@
|
|||||||
const fs = require('fs');
|
import * as fs from 'node:fs';
|
||||||
const path = require('path');
|
import * as path from 'node:path';
|
||||||
const express = require('express');
|
|
||||||
const sanitize = require('sanitize-filename');
|
import express from 'express';
|
||||||
const writeFileAtomicSync = require('write-file-atomic').sync;
|
import sanitize from 'sanitize-filename';
|
||||||
const { getDefaultPresetFile, getDefaultPresets } = require('./content-manager');
|
import { sync as writeFileAtomicSync } from 'write-file-atomic';
|
||||||
const { jsonParser } = require('../express-common');
|
|
||||||
|
import { getDefaultPresetFile, getDefaultPresets } from './content-manager.js';
|
||||||
|
import { jsonParser } from '../express-common.js';
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Gets the folder and extension for the preset settings based on the API source ID.
|
* Gets the folder and extension for the preset settings based on the API source ID.
|
||||||
@ -34,7 +36,7 @@ function getPresetSettingsByAPI(apiId, directories) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
const router = express.Router();
|
export const router = express.Router();
|
||||||
|
|
||||||
router.post('/save', jsonParser, function (request, response) {
|
router.post('/save', jsonParser, function (request, response) {
|
||||||
const name = sanitize(request.body.name);
|
const name = sanitize(request.body.name);
|
||||||
@ -127,5 +129,3 @@ router.post('/delete-openai', jsonParser, function (request, response) {
|
|||||||
|
|
||||||
return response.send({ error: true });
|
return response.send({ error: true });
|
||||||
});
|
});
|
||||||
|
|
||||||
module.exports = { router };
|
|
||||||
|
@ -1,12 +1,13 @@
|
|||||||
const fs = require('fs');
|
import * as fs from 'node:fs';
|
||||||
const path = require('path');
|
import * as path from 'node:path';
|
||||||
const express = require('express');
|
|
||||||
const sanitize = require('sanitize-filename');
|
|
||||||
const writeFileAtomicSync = require('write-file-atomic').sync;
|
|
||||||
|
|
||||||
const { jsonParser } = require('../express-common');
|
import express from 'express';
|
||||||
|
import sanitize from 'sanitize-filename';
|
||||||
|
import { sync as writeFileAtomicSync } from 'write-file-atomic';
|
||||||
|
|
||||||
const router = express.Router();
|
import { jsonParser } from '../express-common.js';
|
||||||
|
|
||||||
|
export const router = express.Router();
|
||||||
|
|
||||||
router.post('/save', jsonParser, (request, response) => {
|
router.post('/save', jsonParser, (request, response) => {
|
||||||
if (!request.body || !request.body.name) {
|
if (!request.body || !request.body.name) {
|
||||||
@ -31,5 +32,3 @@ router.post('/delete', jsonParser, (request, response) => {
|
|||||||
|
|
||||||
return response.sendStatus(200);
|
return response.sendStatus(200);
|
||||||
});
|
});
|
||||||
|
|
||||||
module.exports = { router };
|
|
||||||
|
@ -1,9 +1,11 @@
|
|||||||
const fetch = require('node-fetch').default;
|
import fetch from 'node-fetch';
|
||||||
const express = require('express');
|
import express from 'express';
|
||||||
const { readSecret, SECRET_KEYS } = require('./secrets');
|
|
||||||
const { jsonParser } = require('../express-common');
|
|
||||||
|
|
||||||
const router = express.Router();
|
import { decode } from 'html-entities';
|
||||||
|
import { readSecret, SECRET_KEYS } from './secrets.js';
|
||||||
|
import { jsonParser } from '../express-common.js';
|
||||||
|
|
||||||
|
export const router = express.Router();
|
||||||
|
|
||||||
// Cosplay as Chrome
|
// Cosplay as Chrome
|
||||||
const visitHeaders = {
|
const visitHeaders = {
|
||||||
@ -29,7 +31,6 @@ const visitHeaders = {
|
|||||||
* @returns {Promise<string>} Transcript text
|
* @returns {Promise<string>} Transcript text
|
||||||
*/
|
*/
|
||||||
async function extractTranscript(videoPageBody, lang) {
|
async function extractTranscript(videoPageBody, lang) {
|
||||||
const he = require('he');
|
|
||||||
const RE_XML_TRANSCRIPT = /<text start="([^"]*)" dur="([^"]*)">([^<]*)<\/text>/g;
|
const RE_XML_TRANSCRIPT = /<text start="([^"]*)" dur="([^"]*)">([^<]*)<\/text>/g;
|
||||||
const splittedHTML = videoPageBody.split('"captions":');
|
const splittedHTML = videoPageBody.split('"captions":');
|
||||||
|
|
||||||
@ -84,7 +85,7 @@ async function extractTranscript(videoPageBody, lang) {
|
|||||||
lang: lang ?? captions.captionTracks[0].languageCode,
|
lang: lang ?? captions.captionTracks[0].languageCode,
|
||||||
}));
|
}));
|
||||||
// The text is double-encoded
|
// The text is double-encoded
|
||||||
const transcriptText = transcript.map((line) => he.decode(he.decode(line.text))).join(' ');
|
const transcriptText = transcript.map((line) => decode(decode(line.text))).join(' ');
|
||||||
return transcriptText;
|
return transcriptText;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -263,5 +264,3 @@ router.post('/visit', jsonParser, async (request, response) => {
|
|||||||
return response.sendStatus(500);
|
return response.sendStatus(500);
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
module.exports = { router };
|
|
||||||
|
@ -1,12 +1,13 @@
|
|||||||
const fs = require('fs');
|
import * as fs from 'node:fs';
|
||||||
const path = require('path');
|
import * as path from 'node:path';
|
||||||
const express = require('express');
|
|
||||||
const { getConfigValue } = require('../util');
|
|
||||||
const writeFileAtomicSync = require('write-file-atomic').sync;
|
|
||||||
const { jsonParser } = require('../express-common');
|
|
||||||
|
|
||||||
const SECRETS_FILE = 'secrets.json';
|
import express from 'express';
|
||||||
const SECRET_KEYS = {
|
import { sync as writeFileAtomicSync } from 'write-file-atomic';
|
||||||
|
import { getConfigValue } from '../util.js';
|
||||||
|
import { jsonParser } from '../express-common.js';
|
||||||
|
|
||||||
|
export const SECRETS_FILE = 'secrets.json';
|
||||||
|
export const SECRET_KEYS = {
|
||||||
HORDE: 'api_key_horde',
|
HORDE: 'api_key_horde',
|
||||||
MANCER: 'api_key_mancer',
|
MANCER: 'api_key_mancer',
|
||||||
VLLM: 'api_key_vllm',
|
VLLM: 'api_key_vllm',
|
||||||
@ -62,7 +63,7 @@ const EXPORTABLE_KEYS = [
|
|||||||
* @param {string} key Secret key
|
* @param {string} key Secret key
|
||||||
* @param {string} value Secret value
|
* @param {string} value Secret value
|
||||||
*/
|
*/
|
||||||
function writeSecret(directories, key, value) {
|
export function writeSecret(directories, key, value) {
|
||||||
const filePath = path.join(directories.root, SECRETS_FILE);
|
const filePath = path.join(directories.root, SECRETS_FILE);
|
||||||
|
|
||||||
if (!fs.existsSync(filePath)) {
|
if (!fs.existsSync(filePath)) {
|
||||||
@ -82,7 +83,7 @@ function writeSecret(directories, key, value) {
|
|||||||
* @param {string} key Secret key
|
* @param {string} key Secret key
|
||||||
* @returns
|
* @returns
|
||||||
*/
|
*/
|
||||||
function deleteSecret(directories, key) {
|
export function deleteSecret(directories, key) {
|
||||||
const filePath = path.join(directories.root, SECRETS_FILE);
|
const filePath = path.join(directories.root, SECRETS_FILE);
|
||||||
|
|
||||||
if (!fs.existsSync(filePath)) {
|
if (!fs.existsSync(filePath)) {
|
||||||
@ -101,7 +102,7 @@ function deleteSecret(directories, key) {
|
|||||||
* @param {string} key Secret key
|
* @param {string} key Secret key
|
||||||
* @returns {string} Secret value
|
* @returns {string} Secret value
|
||||||
*/
|
*/
|
||||||
function readSecret(directories, key) {
|
export function readSecret(directories, key) {
|
||||||
const filePath = path.join(directories.root, SECRETS_FILE);
|
const filePath = path.join(directories.root, SECRETS_FILE);
|
||||||
|
|
||||||
if (!fs.existsSync(filePath)) {
|
if (!fs.existsSync(filePath)) {
|
||||||
@ -118,7 +119,7 @@ function readSecret(directories, key) {
|
|||||||
* @param {import('../users').UserDirectoryList} directories User directories
|
* @param {import('../users').UserDirectoryList} directories User directories
|
||||||
* @returns {object} Secret state
|
* @returns {object} Secret state
|
||||||
*/
|
*/
|
||||||
function readSecretState(directories) {
|
export function readSecretState(directories) {
|
||||||
const filePath = path.join(directories.root, SECRETS_FILE);
|
const filePath = path.join(directories.root, SECRETS_FILE);
|
||||||
|
|
||||||
if (!fs.existsSync(filePath)) {
|
if (!fs.existsSync(filePath)) {
|
||||||
@ -141,7 +142,7 @@ function readSecretState(directories) {
|
|||||||
* @param {import('../users').UserDirectoryList} directories User directories
|
* @param {import('../users').UserDirectoryList} directories User directories
|
||||||
* @returns {Record<string, string> | undefined} Secrets
|
* @returns {Record<string, string> | undefined} Secrets
|
||||||
*/
|
*/
|
||||||
function getAllSecrets(directories) {
|
export function getAllSecrets(directories) {
|
||||||
const filePath = path.join(directories.root, SECRETS_FILE);
|
const filePath = path.join(directories.root, SECRETS_FILE);
|
||||||
|
|
||||||
if (!fs.existsSync(filePath)) {
|
if (!fs.existsSync(filePath)) {
|
||||||
@ -154,7 +155,7 @@ function getAllSecrets(directories) {
|
|||||||
return secrets;
|
return secrets;
|
||||||
}
|
}
|
||||||
|
|
||||||
const router = express.Router();
|
export const router = express.Router();
|
||||||
|
|
||||||
router.post('/write', jsonParser, (request, response) => {
|
router.post('/write', jsonParser, (request, response) => {
|
||||||
const key = request.body.key;
|
const key = request.body.key;
|
||||||
@ -218,13 +219,3 @@ router.post('/find', jsonParser, (request, response) => {
|
|||||||
return response.sendStatus(500);
|
return response.sendStatus(500);
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
module.exports = {
|
|
||||||
writeSecret,
|
|
||||||
readSecret,
|
|
||||||
deleteSecret,
|
|
||||||
readSecretState,
|
|
||||||
getAllSecrets,
|
|
||||||
SECRET_KEYS,
|
|
||||||
router,
|
|
||||||
};
|
|
||||||
|
@ -1,12 +1,14 @@
|
|||||||
const fs = require('fs');
|
import * as fs from 'node:fs';
|
||||||
const path = require('path');
|
import * as path from 'node:path';
|
||||||
const express = require('express');
|
|
||||||
const _ = require('lodash');
|
import express from 'express';
|
||||||
const writeFileAtomicSync = require('write-file-atomic').sync;
|
import _ from 'lodash';
|
||||||
const { SETTINGS_FILE } = require('../constants');
|
import { sync as writeFileAtomicSync } from 'write-file-atomic';
|
||||||
const { getConfigValue, generateTimestamp, removeOldBackups } = require('../util');
|
|
||||||
const { jsonParser } = require('../express-common');
|
import { SETTINGS_FILE } from '../constants.js';
|
||||||
const { getAllUserHandles, getUserDirectories } = require('../users');
|
import { getConfigValue, generateTimestamp, removeOldBackups } from '../util.js';
|
||||||
|
import { jsonParser } from '../express-common.js';
|
||||||
|
import { getAllUserHandles, getUserDirectories } from '../users.js';
|
||||||
|
|
||||||
const ENABLE_EXTENSIONS = getConfigValue('enableExtensions', true);
|
const ENABLE_EXTENSIONS = getConfigValue('enableExtensions', true);
|
||||||
const ENABLE_EXTENSIONS_AUTO_UPDATE = getConfigValue('enableExtensionsAutoUpdate', true);
|
const ENABLE_EXTENSIONS_AUTO_UPDATE = getConfigValue('enableExtensionsAutoUpdate', true);
|
||||||
@ -190,7 +192,7 @@ function getLatestBackup(handle) {
|
|||||||
return path.join(userDirectories.backups, latestBackup);
|
return path.join(userDirectories.backups, latestBackup);
|
||||||
}
|
}
|
||||||
|
|
||||||
const router = express.Router();
|
export const router = express.Router();
|
||||||
|
|
||||||
router.post('/save', jsonParser, function (request, response) {
|
router.post('/save', jsonParser, function (request, response) {
|
||||||
try {
|
try {
|
||||||
@ -357,8 +359,6 @@ router.post('/restore-snapshot', jsonParser, async (request, response) => {
|
|||||||
/**
|
/**
|
||||||
* Initializes the settings endpoint
|
* Initializes the settings endpoint
|
||||||
*/
|
*/
|
||||||
async function init() {
|
export async function init() {
|
||||||
await backupSettings();
|
await backupSettings();
|
||||||
}
|
}
|
||||||
|
|
||||||
module.exports = { router, init };
|
|
||||||
|
@ -1,7 +1,9 @@
|
|||||||
const express = require('express');
|
import express from 'express';
|
||||||
const { jsonParser } = require('../express-common');
|
import wavefile from 'wavefile';
|
||||||
|
import { jsonParser } from '../express-common.js';
|
||||||
|
import { getPipeline } from '../transformers.mjs';
|
||||||
|
|
||||||
const router = express.Router();
|
export const router = express.Router();
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Gets the audio data from a base64-encoded audio file.
|
* Gets the audio data from a base64-encoded audio file.
|
||||||
@ -9,7 +11,6 @@ const router = express.Router();
|
|||||||
* @returns {Float64Array} Audio data
|
* @returns {Float64Array} Audio data
|
||||||
*/
|
*/
|
||||||
function getWaveFile(audio) {
|
function getWaveFile(audio) {
|
||||||
const wavefile = require('wavefile');
|
|
||||||
const wav = new wavefile.WaveFile();
|
const wav = new wavefile.WaveFile();
|
||||||
wav.fromDataURI(audio);
|
wav.fromDataURI(audio);
|
||||||
wav.toBitDepth('32f');
|
wav.toBitDepth('32f');
|
||||||
@ -36,8 +37,7 @@ router.post('/recognize', jsonParser, async (req, res) => {
|
|||||||
try {
|
try {
|
||||||
const TASK = 'automatic-speech-recognition';
|
const TASK = 'automatic-speech-recognition';
|
||||||
const { model, audio, lang } = req.body;
|
const { model, audio, lang } = req.body;
|
||||||
const module = await import('../transformers.mjs');
|
const pipe = await getPipeline(TASK, model);
|
||||||
const pipe = await module.default.getPipeline(TASK, model);
|
|
||||||
const wav = getWaveFile(audio);
|
const wav = getWaveFile(audio);
|
||||||
const start = performance.now();
|
const start = performance.now();
|
||||||
const result = await pipe(wav, { language: lang || null, task: 'transcribe' });
|
const result = await pipe(wav, { language: lang || null, task: 'transcribe' });
|
||||||
@ -54,11 +54,9 @@ router.post('/recognize', jsonParser, async (req, res) => {
|
|||||||
|
|
||||||
router.post('/synthesize', jsonParser, async (req, res) => {
|
router.post('/synthesize', jsonParser, async (req, res) => {
|
||||||
try {
|
try {
|
||||||
const wavefile = require('wavefile');
|
|
||||||
const TASK = 'text-to-speech';
|
const TASK = 'text-to-speech';
|
||||||
const { text, model, speaker } = req.body;
|
const { text, model, speaker } = req.body;
|
||||||
const module = await import('../transformers.mjs');
|
const pipe = await getPipeline(TASK, model);
|
||||||
const pipe = await module.default.getPipeline(TASK, model);
|
|
||||||
const speaker_embeddings = speaker
|
const speaker_embeddings = speaker
|
||||||
? new Float32Array(new Uint8Array(Buffer.from(speaker.startsWith('data:') ? speaker.split(',')[1] : speaker, 'base64')).buffer)
|
? new Float32Array(new Uint8Array(Buffer.from(speaker.startsWith('data:') ? speaker.split(',')[1] : speaker, 'base64')).buffer)
|
||||||
: null;
|
: null;
|
||||||
@ -78,5 +76,3 @@ router.post('/synthesize', jsonParser, async (req, res) => {
|
|||||||
return res.sendStatus(500);
|
return res.sendStatus(500);
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
module.exports = { router };
|
|
||||||
|
@ -1,12 +1,13 @@
|
|||||||
|
import * as fs from 'node:fs';
|
||||||
|
import * as path from 'node:path';
|
||||||
|
|
||||||
const fs = require('fs');
|
import express from 'express';
|
||||||
const path = require('path');
|
import mime from 'mime-types';
|
||||||
const express = require('express');
|
import sanitize from 'sanitize-filename';
|
||||||
const mime = require('mime-types');
|
import { sync as writeFileAtomicSync } from 'write-file-atomic';
|
||||||
const sanitize = require('sanitize-filename');
|
|
||||||
const writeFileAtomicSync = require('write-file-atomic').sync;
|
import { getImageBuffers } from '../util.js';
|
||||||
const { getImageBuffers } = require('../util');
|
import { jsonParser, urlencodedParser } from '../express-common.js';
|
||||||
const { jsonParser, urlencodedParser } = require('../express-common');
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Gets the path to the sprites folder for the provided character name
|
* Gets the path to the sprites folder for the provided character name
|
||||||
@ -45,7 +46,7 @@ function getSpritesPath(directories, name, isSubfolder) {
|
|||||||
* @param {object} data RisuAI character data
|
* @param {object} data RisuAI character data
|
||||||
* @returns {void}
|
* @returns {void}
|
||||||
*/
|
*/
|
||||||
function importRisuSprites(directories, data) {
|
export function importRisuSprites(directories, data) {
|
||||||
try {
|
try {
|
||||||
const name = data?.data?.name;
|
const name = data?.data?.name;
|
||||||
const risuData = data?.data?.extensions?.risuai;
|
const risuData = data?.data?.extensions?.risuai;
|
||||||
@ -106,7 +107,7 @@ function importRisuSprites(directories, data) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
const router = express.Router();
|
export const router = express.Router();
|
||||||
|
|
||||||
router.get('/get', jsonParser, function (request, response) {
|
router.get('/get', jsonParser, function (request, response) {
|
||||||
const name = String(request.query.name);
|
const name = String(request.query.name);
|
||||||
@ -259,8 +260,3 @@ router.post('/upload', urlencodedParser, async (request, response) => {
|
|||||||
return response.sendStatus(500);
|
return response.sendStatus(500);
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
module.exports = {
|
|
||||||
router,
|
|
||||||
importRisuSprites,
|
|
||||||
};
|
|
||||||
|
@ -1,13 +1,15 @@
|
|||||||
const express = require('express');
|
import * as fs from 'node:fs';
|
||||||
const fetch = require('node-fetch').default;
|
import * as path from 'node:path';
|
||||||
const sanitize = require('sanitize-filename');
|
|
||||||
const { getBasicAuthHeader, delay, getHexString } = require('../util.js');
|
import express from 'express';
|
||||||
const fs = require('fs');
|
import fetch from 'node-fetch';
|
||||||
const path = require('path');
|
import sanitize from 'sanitize-filename';
|
||||||
const writeFileAtomicSync = require('write-file-atomic').sync;
|
import { sync as writeFileAtomicSync } from 'write-file-atomic';
|
||||||
const { jsonParser } = require('../express-common');
|
import FormData from 'form-data';
|
||||||
const { readSecret, SECRET_KEYS } = require('./secrets.js');
|
|
||||||
const FormData = require('form-data');
|
import { getBasicAuthHeader, delay, getHexString } from '../util.js';
|
||||||
|
import { jsonParser } from '../express-common.js';
|
||||||
|
import { readSecret, SECRET_KEYS } from './secrets.js';
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Gets the comfy workflows.
|
* Gets the comfy workflows.
|
||||||
@ -21,7 +23,7 @@ function getComfyWorkflows(directories) {
|
|||||||
.sort(Intl.Collator().compare);
|
.sort(Intl.Collator().compare);
|
||||||
}
|
}
|
||||||
|
|
||||||
const router = express.Router();
|
export const router = express.Router();
|
||||||
|
|
||||||
router.post('/ping', jsonParser, async (request, response) => {
|
router.post('/ping', jsonParser, async (request, response) => {
|
||||||
try {
|
try {
|
||||||
@ -993,5 +995,3 @@ router.use('/pollinations', pollinations);
|
|||||||
router.use('/stability', stability);
|
router.use('/stability', stability);
|
||||||
router.use('/blockentropy', blockentropy);
|
router.use('/blockentropy', blockentropy);
|
||||||
router.use('/huggingface', huggingface);
|
router.use('/huggingface', huggingface);
|
||||||
|
|
||||||
module.exports = { router };
|
|
||||||
|
@ -1,14 +1,15 @@
|
|||||||
const fs = require('fs');
|
import * as fs from 'node:fs';
|
||||||
const path = require('path');
|
import * as path from 'node:path';
|
||||||
const express = require('express');
|
import * as crypto from 'node:crypto';
|
||||||
const writeFileAtomic = require('write-file-atomic');
|
|
||||||
const crypto = require('crypto');
|
import express from 'express';
|
||||||
|
import writeFileAtomic from 'write-file-atomic';
|
||||||
|
|
||||||
const readFile = fs.promises.readFile;
|
const readFile = fs.promises.readFile;
|
||||||
const readdir = fs.promises.readdir;
|
const readdir = fs.promises.readdir;
|
||||||
|
|
||||||
const { jsonParser } = require('../express-common');
|
import { jsonParser } from '../express-common.js';
|
||||||
const { getAllUserHandles, getUserDirectories } = require('../users');
|
import { getAllUserHandles, getUserDirectories } from '../users.js';
|
||||||
|
|
||||||
const STATS_FILE = 'stats.json';
|
const STATS_FILE = 'stats.json';
|
||||||
|
|
||||||
@ -146,7 +147,7 @@ async function collectAndCreateStats(chatsPath, charactersPath) {
|
|||||||
* @param {string} chatsPath Path to the directory containing the chat files.
|
* @param {string} chatsPath Path to the directory containing the chat files.
|
||||||
* @param {string} charactersPath Path to the directory containing the character files.
|
* @param {string} charactersPath Path to the directory containing the character files.
|
||||||
*/
|
*/
|
||||||
async function recreateStats(handle, chatsPath, charactersPath) {
|
export async function recreateStats(handle, chatsPath, charactersPath) {
|
||||||
console.log('Collecting and creating stats for user:', handle);
|
console.log('Collecting and creating stats for user:', handle);
|
||||||
const stats = await collectAndCreateStats(chatsPath, charactersPath);
|
const stats = await collectAndCreateStats(chatsPath, charactersPath);
|
||||||
STATS.set(handle, stats);
|
STATS.set(handle, stats);
|
||||||
@ -157,7 +158,7 @@ async function recreateStats(handle, chatsPath, charactersPath) {
|
|||||||
* Loads the stats file into memory. If the file doesn't exist or is invalid,
|
* Loads the stats file into memory. If the file doesn't exist or is invalid,
|
||||||
* initializes stats by collecting and creating them for each character.
|
* initializes stats by collecting and creating them for each character.
|
||||||
*/
|
*/
|
||||||
async function init() {
|
export async function init() {
|
||||||
try {
|
try {
|
||||||
const userHandles = await getAllUserHandles();
|
const userHandles = await getAllUserHandles();
|
||||||
for (const handle of userHandles) {
|
for (const handle of userHandles) {
|
||||||
@ -209,7 +210,7 @@ async function saveStatsToFile() {
|
|||||||
* Attempts to save charStats to a file and then terminates the process.
|
* Attempts to save charStats to a file and then terminates the process.
|
||||||
* If an error occurs during the file write, it logs the error before exiting.
|
* If an error occurs during the file write, it logs the error before exiting.
|
||||||
*/
|
*/
|
||||||
async function onExit() {
|
export async function onExit() {
|
||||||
try {
|
try {
|
||||||
await saveStatsToFile();
|
await saveStatsToFile();
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
@ -434,7 +435,7 @@ function calculateTotalGenTimeAndWordCount(
|
|||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
const router = express.Router();
|
export const router = express.Router();
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Handle a POST request to get the stats object
|
* Handle a POST request to get the stats object
|
||||||
@ -465,10 +466,3 @@ router.post('/update', jsonParser, function (request, response) {
|
|||||||
setCharStats(request.user.profile.handle, request.body);
|
setCharStats(request.user.profile.handle, request.body);
|
||||||
return response.sendStatus(200);
|
return response.sendStatus(200);
|
||||||
});
|
});
|
||||||
|
|
||||||
module.exports = {
|
|
||||||
router,
|
|
||||||
recreateStats,
|
|
||||||
init,
|
|
||||||
onExit,
|
|
||||||
};
|
|
||||||
|
@ -1,11 +1,13 @@
|
|||||||
const express = require('express');
|
import * as path from 'node:path';
|
||||||
const path = require('path');
|
import * as fs from 'node:fs';
|
||||||
const fs = require('fs');
|
|
||||||
const sanitize = require('sanitize-filename');
|
|
||||||
const writeFileAtomicSync = require('write-file-atomic').sync;
|
|
||||||
const { jsonParser } = require('../express-common');
|
|
||||||
|
|
||||||
const router = express.Router();
|
import express from 'express';
|
||||||
|
import sanitize from 'sanitize-filename';
|
||||||
|
import { sync as writeFileAtomicSync } from 'write-file-atomic';
|
||||||
|
|
||||||
|
import { jsonParser } from '../express-common.js';
|
||||||
|
|
||||||
|
export const router = express.Router();
|
||||||
|
|
||||||
router.post('/save', jsonParser, (request, response) => {
|
router.post('/save', jsonParser, (request, response) => {
|
||||||
if (!request.body || !request.body.name) {
|
if (!request.body || !request.body.name) {
|
||||||
@ -36,5 +38,3 @@ router.post('/delete', jsonParser, function (request, response) {
|
|||||||
return response.sendStatus(500);
|
return response.sendStatus(500);
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
module.exports = { router };
|
|
||||||
|
@ -1,14 +1,16 @@
|
|||||||
const fs = require('fs');
|
import * as fs from 'node:fs';
|
||||||
const fsPromises = require('fs').promises;
|
import * as fsPromises from 'node:fs/promises';
|
||||||
const path = require('path');
|
import * as path from 'node:path';
|
||||||
const mime = require('mime-types');
|
|
||||||
const express = require('express');
|
import mime from 'mime-types';
|
||||||
const sanitize = require('sanitize-filename');
|
import express from 'express';
|
||||||
const jimp = require('jimp');
|
import sanitize from 'sanitize-filename';
|
||||||
const writeFileAtomicSync = require('write-file-atomic').sync;
|
import jimp from 'jimp';
|
||||||
const { getAllUserHandles, getUserDirectories } = require('../users');
|
import { sync as writeFileAtomicSync } from 'write-file-atomic';
|
||||||
const { getConfigValue } = require('../util');
|
|
||||||
const { jsonParser } = require('../express-common');
|
import { getAllUserHandles, getUserDirectories } from '../users.js';
|
||||||
|
import { getConfigValue } from '../util.js';
|
||||||
|
import { jsonParser } from '../express-common.js';
|
||||||
|
|
||||||
const thumbnailsDisabled = getConfigValue('disableThumbnails', false);
|
const thumbnailsDisabled = getConfigValue('disableThumbnails', false);
|
||||||
const quality = getConfigValue('thumbnailsQuality', 95);
|
const quality = getConfigValue('thumbnailsQuality', 95);
|
||||||
@ -62,7 +64,7 @@ function getOriginalFolder(directories, type) {
|
|||||||
* @param {'bg' | 'avatar'} type Type of the thumbnail
|
* @param {'bg' | 'avatar'} type Type of the thumbnail
|
||||||
* @param {string} file Name of the file
|
* @param {string} file Name of the file
|
||||||
*/
|
*/
|
||||||
function invalidateThumbnail(directories, type, file) {
|
export function invalidateThumbnail(directories, type, file) {
|
||||||
const folder = getThumbnailFolder(directories, type);
|
const folder = getThumbnailFolder(directories, type);
|
||||||
if (folder === undefined) throw new Error('Invalid thumbnail type');
|
if (folder === undefined) throw new Error('Invalid thumbnail type');
|
||||||
|
|
||||||
@ -141,7 +143,7 @@ async function generateThumbnail(directories, type, file) {
|
|||||||
* Ensures that the thumbnail cache for backgrounds is valid.
|
* Ensures that the thumbnail cache for backgrounds is valid.
|
||||||
* @returns {Promise<void>} Promise that resolves when the cache is validated
|
* @returns {Promise<void>} Promise that resolves when the cache is validated
|
||||||
*/
|
*/
|
||||||
async function ensureThumbnailCache() {
|
export async function ensureThumbnailCache() {
|
||||||
const userHandles = await getAllUserHandles();
|
const userHandles = await getAllUserHandles();
|
||||||
for (const handle of userHandles) {
|
for (const handle of userHandles) {
|
||||||
const directories = getUserDirectories(handle);
|
const directories = getUserDirectories(handle);
|
||||||
@ -166,7 +168,7 @@ async function ensureThumbnailCache() {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
const router = express.Router();
|
export const router = express.Router();
|
||||||
|
|
||||||
// Important: This route must be mounted as '/thumbnail'. It is used in the client code and saved to chat files.
|
// Important: This route must be mounted as '/thumbnail'. It is used in the client code and saved to chat files.
|
||||||
router.get('/', jsonParser, async function (request, response) {
|
router.get('/', jsonParser, async function (request, response) {
|
||||||
@ -227,9 +229,3 @@ router.get('/', jsonParser, async function (request, response) {
|
|||||||
return response.sendStatus(500);
|
return response.sendStatus(500);
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
module.exports = {
|
|
||||||
invalidateThumbnail,
|
|
||||||
ensureThumbnailCache,
|
|
||||||
router,
|
|
||||||
};
|
|
||||||
|
@ -1,15 +1,18 @@
|
|||||||
const fs = require('fs');
|
import * as fs from 'node:fs';
|
||||||
const path = require('path');
|
import * as path from 'node:path';
|
||||||
const express = require('express');
|
|
||||||
const { SentencePieceProcessor } = require('@agnai/sentencepiece-js');
|
import express from 'express';
|
||||||
const tiktoken = require('tiktoken');
|
import { sync as writeFileAtomicSync } from 'write-file-atomic';
|
||||||
const { Tokenizer } = require('@agnai/web-tokenizers');
|
|
||||||
const { convertClaudePrompt } = require('../prompt-converters');
|
import { Tokenizer } from '@agnai/web-tokenizers';
|
||||||
const { TEXTGEN_TYPES } = require('../constants');
|
import { SentencePieceProcessor } from '@agnai/sentencepiece-js';
|
||||||
const { jsonParser } = require('../express-common');
|
import tiktoken from 'tiktoken';
|
||||||
const { setAdditionalHeaders } = require('../additional-headers');
|
|
||||||
const { getConfigValue, isValidUrl } = require('../util');
|
import { convertClaudePrompt } from '../prompt-converters.js';
|
||||||
const writeFileAtomicSync = require('write-file-atomic').sync;
|
import { TEXTGEN_TYPES } from '../constants.js';
|
||||||
|
import { jsonParser } from '../express-common.js';
|
||||||
|
import { setAdditionalHeaders } from '../additional-headers.js';
|
||||||
|
import { getConfigValue, isValidUrl } from '../util.js';
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @typedef { (req: import('express').Request, res: import('express').Response) => Promise<any> } TokenizationHandler
|
* @typedef { (req: import('express').Request, res: import('express').Response) => Promise<any> } TokenizationHandler
|
||||||
@ -23,7 +26,7 @@ const tokenizersCache = {};
|
|||||||
/**
|
/**
|
||||||
* @type {string[]}
|
* @type {string[]}
|
||||||
*/
|
*/
|
||||||
const TEXT_COMPLETION_MODELS = [
|
export const TEXT_COMPLETION_MODELS = [
|
||||||
'gpt-3.5-turbo-instruct',
|
'gpt-3.5-turbo-instruct',
|
||||||
'gpt-3.5-turbo-instruct-0914',
|
'gpt-3.5-turbo-instruct-0914',
|
||||||
'text-davinci-003',
|
'text-davinci-003',
|
||||||
@ -223,7 +226,7 @@ const commandTokenizer = new WebTokenizer('https://github.com/SillyTavern/SillyT
|
|||||||
const qwen2Tokenizer = new WebTokenizer('https://github.com/SillyTavern/SillyTavern-Tokenizers/raw/main/qwen2.json', 'src/tokenizers/llama3.json');
|
const qwen2Tokenizer = new WebTokenizer('https://github.com/SillyTavern/SillyTavern-Tokenizers/raw/main/qwen2.json', 'src/tokenizers/llama3.json');
|
||||||
const nemoTokenizer = new WebTokenizer('https://github.com/SillyTavern/SillyTavern-Tokenizers/raw/main/nemo.json', 'src/tokenizers/llama3.json');
|
const nemoTokenizer = new WebTokenizer('https://github.com/SillyTavern/SillyTavern-Tokenizers/raw/main/nemo.json', 'src/tokenizers/llama3.json');
|
||||||
|
|
||||||
const sentencepieceTokenizers = [
|
export const sentencepieceTokenizers = [
|
||||||
'llama',
|
'llama',
|
||||||
'nerdstash',
|
'nerdstash',
|
||||||
'nerdstash_v2',
|
'nerdstash_v2',
|
||||||
@ -238,7 +241,7 @@ const sentencepieceTokenizers = [
|
|||||||
* @param {string} model Sentencepiece model name
|
* @param {string} model Sentencepiece model name
|
||||||
* @returns {SentencePieceTokenizer|null} Sentencepiece tokenizer
|
* @returns {SentencePieceTokenizer|null} Sentencepiece tokenizer
|
||||||
*/
|
*/
|
||||||
function getSentencepiceTokenizer(model) {
|
export function getSentencepiceTokenizer(model) {
|
||||||
if (model.includes('llama')) {
|
if (model.includes('llama')) {
|
||||||
return spp_llama;
|
return spp_llama;
|
||||||
}
|
}
|
||||||
@ -350,7 +353,7 @@ function getWebTokenizersChunks(tokenizer, ids) {
|
|||||||
* @param {string} requestModel Models to use for tokenization
|
* @param {string} requestModel Models to use for tokenization
|
||||||
* @returns {string} Tokenizer model to use
|
* @returns {string} Tokenizer model to use
|
||||||
*/
|
*/
|
||||||
function getTokenizerModel(requestModel) {
|
export function getTokenizerModel(requestModel) {
|
||||||
if (requestModel.includes('o1-preview') || requestModel.includes('o1-mini')) {
|
if (requestModel.includes('o1-preview') || requestModel.includes('o1-mini')) {
|
||||||
return 'gpt-4o';
|
return 'gpt-4o';
|
||||||
}
|
}
|
||||||
@ -427,7 +430,7 @@ function getTokenizerModel(requestModel) {
|
|||||||
return 'gpt-3.5-turbo';
|
return 'gpt-3.5-turbo';
|
||||||
}
|
}
|
||||||
|
|
||||||
function getTiktokenTokenizer(model) {
|
export function getTiktokenTokenizer(model) {
|
||||||
if (tokenizersCache[model]) {
|
if (tokenizersCache[model]) {
|
||||||
return tokenizersCache[model];
|
return tokenizersCache[model];
|
||||||
}
|
}
|
||||||
@ -444,7 +447,7 @@ function getTiktokenTokenizer(model) {
|
|||||||
* @param {object[]} messages Array of messages
|
* @param {object[]} messages Array of messages
|
||||||
* @returns {number} Number of tokens
|
* @returns {number} Number of tokens
|
||||||
*/
|
*/
|
||||||
function countWebTokenizerTokens(tokenizer, messages) {
|
export function countWebTokenizerTokens(tokenizer, messages) {
|
||||||
// Should be fine if we use the old conversion method instead of the messages API one i think?
|
// Should be fine if we use the old conversion method instead of the messages API one i think?
|
||||||
const convertedPrompt = convertClaudePrompt(messages, false, '', false, false, '', false);
|
const convertedPrompt = convertClaudePrompt(messages, false, '', false, false, '', false);
|
||||||
|
|
||||||
@ -636,7 +639,7 @@ function createWebTokenizerDecodingHandler(tokenizer) {
|
|||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
const router = express.Router();
|
export const router = express.Router();
|
||||||
|
|
||||||
router.post('/llama/encode', jsonParser, createSentencepieceEncodingHandler(spp_llama));
|
router.post('/llama/encode', jsonParser, createSentencepieceEncodingHandler(spp_llama));
|
||||||
router.post('/nerdstash/encode', jsonParser, createSentencepieceEncodingHandler(spp_nerd));
|
router.post('/nerdstash/encode', jsonParser, createSentencepieceEncodingHandler(spp_nerd));
|
||||||
@ -1002,13 +1005,3 @@ router.post('/remote/textgenerationwebui/encode', jsonParser, async function (re
|
|||||||
return response.send({ error: true });
|
return response.send({ error: true });
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
module.exports = {
|
|
||||||
TEXT_COMPLETION_MODELS,
|
|
||||||
getTokenizerModel,
|
|
||||||
getTiktokenTokenizer,
|
|
||||||
countWebTokenizerTokens,
|
|
||||||
getSentencepiceTokenizer,
|
|
||||||
sentencepieceTokenizers,
|
|
||||||
router,
|
|
||||||
};
|
|
||||||
|
@ -1,10 +1,16 @@
|
|||||||
const fetch = require('node-fetch').default;
|
import * as https from 'node:https';
|
||||||
const https = require('https');
|
import { createRequire } from 'node:module';
|
||||||
const express = require('express');
|
import fetch from 'node-fetch';
|
||||||
const iconv = require('iconv-lite');
|
import express from 'express';
|
||||||
const { readSecret, SECRET_KEYS } = require('./secrets');
|
import iconv from 'iconv-lite';
|
||||||
const { getConfigValue, uuidv4 } = require('../util');
|
import bingTranslateApi from 'bing-translate-api';
|
||||||
const { jsonParser } = require('../express-common');
|
|
||||||
|
const require = createRequire(import.meta.url);
|
||||||
|
const { generateRequestUrl, normaliseResponse } = require('google-translate-api-browser');
|
||||||
|
|
||||||
|
import { readSecret, SECRET_KEYS } from './secrets.js';
|
||||||
|
import { getConfigValue, uuidv4 } from '../util.js';
|
||||||
|
import { jsonParser } from '../express-common.js';
|
||||||
|
|
||||||
const DEEPLX_URL_DEFAULT = 'http://127.0.0.1:1188/translate';
|
const DEEPLX_URL_DEFAULT = 'http://127.0.0.1:1188/translate';
|
||||||
const ONERING_URL_DEFAULT = 'http://127.0.0.1:4990/translate';
|
const ONERING_URL_DEFAULT = 'http://127.0.0.1:4990/translate';
|
||||||
@ -23,7 +29,7 @@ function decodeBuffer(buffer) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
const router = express.Router();
|
export const router = express.Router();
|
||||||
|
|
||||||
router.post('/libre', jsonParser, async (request, response) => {
|
router.post('/libre', jsonParser, async (request, response) => {
|
||||||
const key = readSecret(request.user.directories, SECRET_KEYS.LIBRE);
|
const key = readSecret(request.user.directories, SECRET_KEYS.LIBRE);
|
||||||
@ -82,7 +88,6 @@ router.post('/libre', jsonParser, async (request, response) => {
|
|||||||
|
|
||||||
router.post('/google', jsonParser, async (request, response) => {
|
router.post('/google', jsonParser, async (request, response) => {
|
||||||
try {
|
try {
|
||||||
const { generateRequestUrl, normaliseResponse } = require('google-translate-api-browser');
|
|
||||||
const text = request.body.text;
|
const text = request.body.text;
|
||||||
const lang = request.body.lang;
|
const lang = request.body.lang;
|
||||||
|
|
||||||
@ -386,7 +391,6 @@ router.post('/deeplx', jsonParser, async (request, response) => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
router.post('/bing', jsonParser, async (request, response) => {
|
router.post('/bing', jsonParser, async (request, response) => {
|
||||||
const bingTranslateApi = require('bing-translate-api');
|
|
||||||
const text = request.body.text;
|
const text = request.body.text;
|
||||||
let lang = request.body.lang;
|
let lang = request.body.lang;
|
||||||
|
|
||||||
@ -408,5 +412,3 @@ router.post('/bing', jsonParser, async (request, response) => {
|
|||||||
return response.sendStatus(500);
|
return response.sendStatus(500);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
module.exports = { router };
|
|
||||||
|
@ -1,10 +1,11 @@
|
|||||||
const fsPromises = require('fs').promises;
|
import { promises as fsPromises } from 'node:fs';
|
||||||
const storage = require('node-persist');
|
|
||||||
const express = require('express');
|
import storage from 'node-persist';
|
||||||
const lodash = require('lodash');
|
import express from 'express';
|
||||||
const { jsonParser } = require('../express-common');
|
import lodash from 'lodash';
|
||||||
const { checkForNewContent } = require('./content-manager');
|
import { jsonParser } from '../express-common.js';
|
||||||
const {
|
import { checkForNewContent } from './content-manager.js';
|
||||||
|
import {
|
||||||
KEY_PREFIX,
|
KEY_PREFIX,
|
||||||
toKey,
|
toKey,
|
||||||
requireAdminMiddleware,
|
requireAdminMiddleware,
|
||||||
@ -14,10 +15,10 @@ const {
|
|||||||
getPasswordHash,
|
getPasswordHash,
|
||||||
getUserDirectories,
|
getUserDirectories,
|
||||||
ensurePublicDirectoriesExist,
|
ensurePublicDirectoriesExist,
|
||||||
} = require('../users');
|
} from '../users.js';
|
||||||
const { DEFAULT_USER } = require('../constants');
|
import { DEFAULT_USER } from '../constants.js';
|
||||||
|
|
||||||
const router = express.Router();
|
export const router = express.Router();
|
||||||
|
|
||||||
router.post('/get', requireAdminMiddleware, jsonParser, async (_request, response) => {
|
router.post('/get', requireAdminMiddleware, jsonParser, async (_request, response) => {
|
||||||
try {
|
try {
|
||||||
@ -249,7 +250,3 @@ router.post('/slugify', requireAdminMiddleware, jsonParser, async (request, resp
|
|||||||
return response.sendStatus(500);
|
return response.sendStatus(500);
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
module.exports = {
|
|
||||||
router,
|
|
||||||
};
|
|
||||||
|
@ -1,18 +1,20 @@
|
|||||||
const path = require('path');
|
import * as path from 'node:path';
|
||||||
const fsPromises = require('fs').promises;
|
import { promises as fsPromises } from 'node:fs';
|
||||||
const storage = require('node-persist');
|
import * as crypto from 'node:crypto';
|
||||||
const express = require('express');
|
|
||||||
const crypto = require('crypto');
|
import storage from 'node-persist';
|
||||||
const { jsonParser } = require('../express-common');
|
import express from 'express';
|
||||||
const { getUserAvatar, toKey, getPasswordHash, getPasswordSalt, createBackupArchive, ensurePublicDirectoriesExist, toAvatarKey } = require('../users');
|
|
||||||
const { SETTINGS_FILE } = require('../constants');
|
import { jsonParser } from '../express-common.js';
|
||||||
const contentManager = require('./content-manager');
|
import { getUserAvatar, toKey, getPasswordHash, getPasswordSalt, createBackupArchive, ensurePublicDirectoriesExist, toAvatarKey } from '../users.js';
|
||||||
const { color, Cache } = require('../util');
|
import { SETTINGS_FILE } from '../constants.js';
|
||||||
const { checkForNewContent } = require('./content-manager');
|
import * as contentManager from './content-manager.js';
|
||||||
|
import { color, Cache } from '../util.js';
|
||||||
|
import { checkForNewContent } from './content-manager.js';
|
||||||
|
|
||||||
const RESET_CACHE = new Cache(5 * 60 * 1000);
|
const RESET_CACHE = new Cache(5 * 60 * 1000);
|
||||||
|
|
||||||
const router = express.Router();
|
export const router = express.Router();
|
||||||
|
|
||||||
router.post('/logout', async (request, response) => {
|
router.post('/logout', async (request, response) => {
|
||||||
try {
|
try {
|
||||||
@ -251,7 +253,3 @@ router.post('/reset-step2', jsonParser, async (request, response) => {
|
|||||||
return response.sendStatus(500);
|
return response.sendStatus(500);
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
module.exports = {
|
|
||||||
router,
|
|
||||||
};
|
|
||||||
|
@ -1,15 +1,16 @@
|
|||||||
const crypto = require('crypto');
|
import * as crypto from 'node:crypto';
|
||||||
const storage = require('node-persist');
|
|
||||||
const express = require('express');
|
import storage from 'node-persist';
|
||||||
const { RateLimiterMemory, RateLimiterRes } = require('rate-limiter-flexible');
|
import express from 'express';
|
||||||
const { jsonParser, getIpFromRequest } = require('../express-common');
|
import { RateLimiterMemory, RateLimiterRes } from 'rate-limiter-flexible';
|
||||||
const { color, Cache, getConfigValue } = require('../util');
|
import { jsonParser, getIpFromRequest } from '../express-common.js';
|
||||||
const { KEY_PREFIX, getUserAvatar, toKey, getPasswordHash, getPasswordSalt } = require('../users');
|
import { color, Cache, getConfigValue } from '../util.js';
|
||||||
|
import { KEY_PREFIX, getUserAvatar, toKey, getPasswordHash, getPasswordSalt } from '../users.js';
|
||||||
|
|
||||||
const DISCREET_LOGIN = getConfigValue('enableDiscreetLogin', false);
|
const DISCREET_LOGIN = getConfigValue('enableDiscreetLogin', false);
|
||||||
const MFA_CACHE = new Cache(5 * 60 * 1000);
|
const MFA_CACHE = new Cache(5 * 60 * 1000);
|
||||||
|
|
||||||
const router = express.Router();
|
export const router = express.Router();
|
||||||
const loginLimiter = new RateLimiterMemory({
|
const loginLimiter = new RateLimiterMemory({
|
||||||
points: 5,
|
points: 5,
|
||||||
duration: 60,
|
duration: 60,
|
||||||
@ -193,7 +194,3 @@ router.post('/recover-step2', jsonParser, async (request, response) => {
|
|||||||
return response.sendStatus(500);
|
return response.sendStatus(500);
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
module.exports = {
|
|
||||||
router,
|
|
||||||
};
|
|
||||||
|
@ -1,10 +1,22 @@
|
|||||||
const vectra = require('vectra');
|
import * as path from 'node:path';
|
||||||
const path = require('path');
|
import * as fs from 'node:fs';
|
||||||
const fs = require('fs');
|
|
||||||
const express = require('express');
|
import vectra from 'vectra';
|
||||||
const sanitize = require('sanitize-filename');
|
import express from 'express';
|
||||||
const { jsonParser } = require('../express-common');
|
import sanitize from 'sanitize-filename';
|
||||||
const { getConfigValue } = require('../util');
|
|
||||||
|
import { jsonParser } from '../express-common.js';
|
||||||
|
import { getConfigValue } from '../util.js';
|
||||||
|
|
||||||
|
import { getNomicAIBatchVector, getNomicAIVector } from '../vectors/nomicai-vectors.js';
|
||||||
|
import { getOpenAIVector, getOpenAIBatchVector } from '../vectors/openai-vectors.js';
|
||||||
|
import { getTransformersVector, getTransformersBatchVector } from '../vectors/embedding.js';
|
||||||
|
import { getExtrasVector, getExtrasBatchVector } from '../vectors/extras-vectors.js';
|
||||||
|
import { getMakerSuiteVector, getMakerSuiteBatchVector } from '../vectors/makersuite-vectors.js';
|
||||||
|
import { getCohereVector, getCohereBatchVector } from '../vectors/cohere-vectors.js';
|
||||||
|
import { getLlamaCppVector, getLlamaCppBatchVector } from '../vectors/llamacpp-vectors.js';
|
||||||
|
import { getVllmVector, getVllmBatchVector } from '../vectors/vllm-vectors.js';
|
||||||
|
import { getOllamaVector, getOllamaBatchVector } from '../vectors/ollama-vectors.js';
|
||||||
|
|
||||||
// Don't forget to add new sources to the SOURCES array
|
// Don't forget to add new sources to the SOURCES array
|
||||||
const SOURCES = [
|
const SOURCES = [
|
||||||
@ -33,25 +45,25 @@ const SOURCES = [
|
|||||||
async function getVector(source, sourceSettings, text, isQuery, directories) {
|
async function getVector(source, sourceSettings, text, isQuery, directories) {
|
||||||
switch (source) {
|
switch (source) {
|
||||||
case 'nomicai':
|
case 'nomicai':
|
||||||
return require('../vectors/nomicai-vectors').getNomicAIVector(text, source, directories);
|
return getNomicAIVector(text, source, directories);
|
||||||
case 'togetherai':
|
case 'togetherai':
|
||||||
case 'mistral':
|
case 'mistral':
|
||||||
case 'openai':
|
case 'openai':
|
||||||
return require('../vectors/openai-vectors').getOpenAIVector(text, source, directories, sourceSettings.model);
|
return getOpenAIVector(text, source, directories, sourceSettings.model);
|
||||||
case 'transformers':
|
case 'transformers':
|
||||||
return require('../vectors/embedding').getTransformersVector(text);
|
return getTransformersVector(text);
|
||||||
case 'extras':
|
case 'extras':
|
||||||
return require('../vectors/extras-vectors').getExtrasVector(text, sourceSettings.extrasUrl, sourceSettings.extrasKey);
|
return getExtrasVector(text, sourceSettings.extrasUrl, sourceSettings.extrasKey);
|
||||||
case 'palm':
|
case 'palm':
|
||||||
return require('../vectors/makersuite-vectors').getMakerSuiteVector(text, directories);
|
return getMakerSuiteVector(text, directories);
|
||||||
case 'cohere':
|
case 'cohere':
|
||||||
return require('../vectors/cohere-vectors').getCohereVector(text, isQuery, directories, sourceSettings.model);
|
return getCohereVector(text, isQuery, directories, sourceSettings.model);
|
||||||
case 'llamacpp':
|
case 'llamacpp':
|
||||||
return require('../vectors/llamacpp-vectors').getLlamaCppVector(text, sourceSettings.apiUrl, directories);
|
return getLlamaCppVector(text, sourceSettings.apiUrl, directories);
|
||||||
case 'vllm':
|
case 'vllm':
|
||||||
return require('../vectors/vllm-vectors').getVllmVector(text, sourceSettings.apiUrl, sourceSettings.model, directories);
|
return getVllmVector(text, sourceSettings.apiUrl, sourceSettings.model, directories);
|
||||||
case 'ollama':
|
case 'ollama':
|
||||||
return require('../vectors/ollama-vectors').getOllamaVector(text, sourceSettings.apiUrl, sourceSettings.model, sourceSettings.keep, directories);
|
return getOllamaVector(text, sourceSettings.apiUrl, sourceSettings.model, sourceSettings.keep, directories);
|
||||||
}
|
}
|
||||||
|
|
||||||
throw new Error(`Unknown vector source ${source}`);
|
throw new Error(`Unknown vector source ${source}`);
|
||||||
@ -74,33 +86,33 @@ async function getBatchVector(source, sourceSettings, texts, isQuery, directorie
|
|||||||
for (let batch of batches) {
|
for (let batch of batches) {
|
||||||
switch (source) {
|
switch (source) {
|
||||||
case 'nomicai':
|
case 'nomicai':
|
||||||
results.push(...await require('../vectors/nomicai-vectors').getNomicAIBatchVector(batch, source, directories));
|
results.push(...await getNomicAIBatchVector(batch, source, directories));
|
||||||
break;
|
break;
|
||||||
case 'togetherai':
|
case 'togetherai':
|
||||||
case 'mistral':
|
case 'mistral':
|
||||||
case 'openai':
|
case 'openai':
|
||||||
results.push(...await require('../vectors/openai-vectors').getOpenAIBatchVector(batch, source, directories, sourceSettings.model));
|
results.push(...await getOpenAIBatchVector(batch, source, directories, sourceSettings.model));
|
||||||
break;
|
break;
|
||||||
case 'transformers':
|
case 'transformers':
|
||||||
results.push(...await require('../vectors/embedding').getTransformersBatchVector(batch));
|
results.push(...await getTransformersBatchVector(batch));
|
||||||
break;
|
break;
|
||||||
case 'extras':
|
case 'extras':
|
||||||
results.push(...await require('../vectors/extras-vectors').getExtrasBatchVector(batch, sourceSettings.extrasUrl, sourceSettings.extrasKey));
|
results.push(...await getExtrasBatchVector(batch, sourceSettings.extrasUrl, sourceSettings.extrasKey));
|
||||||
break;
|
break;
|
||||||
case 'palm':
|
case 'palm':
|
||||||
results.push(...await require('../vectors/makersuite-vectors').getMakerSuiteBatchVector(batch, directories));
|
results.push(...await getMakerSuiteBatchVector(batch, directories));
|
||||||
break;
|
break;
|
||||||
case 'cohere':
|
case 'cohere':
|
||||||
results.push(...await require('../vectors/cohere-vectors').getCohereBatchVector(batch, isQuery, directories, sourceSettings.model));
|
results.push(...await getCohereBatchVector(batch, isQuery, directories, sourceSettings.model));
|
||||||
break;
|
break;
|
||||||
case 'llamacpp':
|
case 'llamacpp':
|
||||||
results.push(...await require('../vectors/llamacpp-vectors').getLlamaCppBatchVector(batch, sourceSettings.apiUrl, directories));
|
results.push(...await getLlamaCppBatchVector(batch, sourceSettings.apiUrl, directories));
|
||||||
break;
|
break;
|
||||||
case 'vllm':
|
case 'vllm':
|
||||||
results.push(...await require('../vectors/vllm-vectors').getVllmBatchVector(batch, sourceSettings.apiUrl, sourceSettings.model, directories));
|
results.push(...await getVllmBatchVector(batch, sourceSettings.apiUrl, sourceSettings.model, directories));
|
||||||
break;
|
break;
|
||||||
case 'ollama':
|
case 'ollama':
|
||||||
results.push(...await require('../vectors/ollama-vectors').getOllamaBatchVector(batch, sourceSettings.apiUrl, sourceSettings.model, sourceSettings.keep, directories));
|
results.push(...await getOllamaBatchVector(batch, sourceSettings.apiUrl, sourceSettings.model, sourceSettings.keep, directories));
|
||||||
break;
|
break;
|
||||||
default:
|
default:
|
||||||
throw new Error(`Unknown vector source ${source}`);
|
throw new Error(`Unknown vector source ${source}`);
|
||||||
@ -359,7 +371,7 @@ async function regenerateCorruptedIndexErrorHandler(req, res, error) {
|
|||||||
return res.sendStatus(500);
|
return res.sendStatus(500);
|
||||||
}
|
}
|
||||||
|
|
||||||
const router = express.Router();
|
export const router = express.Router();
|
||||||
|
|
||||||
router.post('/query', jsonParser, async (req, res) => {
|
router.post('/query', jsonParser, async (req, res) => {
|
||||||
try {
|
try {
|
||||||
@ -495,5 +507,3 @@ router.post('/purge', jsonParser, async (req, res) => {
|
|||||||
return res.sendStatus(500);
|
return res.sendStatus(500);
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
module.exports = { router };
|
|
||||||
|
@ -1,10 +1,11 @@
|
|||||||
const fs = require('fs');
|
import * as fs from 'node:fs';
|
||||||
const path = require('path');
|
import * as path from 'node:path';
|
||||||
const express = require('express');
|
|
||||||
const sanitize = require('sanitize-filename');
|
|
||||||
const writeFileAtomicSync = require('write-file-atomic').sync;
|
|
||||||
|
|
||||||
const { jsonParser, urlencodedParser } = require('../express-common');
|
import express from 'express';
|
||||||
|
import sanitize from 'sanitize-filename';
|
||||||
|
import { sync as writeFileAtomicSync } from 'write-file-atomic';
|
||||||
|
|
||||||
|
import { jsonParser, urlencodedParser } from '../express-common.js';
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Reads a World Info file and returns its contents
|
* Reads a World Info file and returns its contents
|
||||||
@ -13,7 +14,7 @@ const { jsonParser, urlencodedParser } = require('../express-common');
|
|||||||
* @param {boolean} allowDummy If true, returns an empty object if the file doesn't exist
|
* @param {boolean} allowDummy If true, returns an empty object if the file doesn't exist
|
||||||
* @returns {object} World Info file contents
|
* @returns {object} World Info file contents
|
||||||
*/
|
*/
|
||||||
function readWorldInfoFile(directories, worldInfoName, allowDummy) {
|
export function readWorldInfoFile(directories, worldInfoName, allowDummy) {
|
||||||
const dummyObject = allowDummy ? { entries: {} } : null;
|
const dummyObject = allowDummy ? { entries: {} } : null;
|
||||||
|
|
||||||
if (!worldInfoName) {
|
if (!worldInfoName) {
|
||||||
@ -33,7 +34,7 @@ function readWorldInfoFile(directories, worldInfoName, allowDummy) {
|
|||||||
return worldInfo;
|
return worldInfo;
|
||||||
}
|
}
|
||||||
|
|
||||||
const router = express.Router();
|
export const router = express.Router();
|
||||||
|
|
||||||
router.post('/get', jsonParser, (request, response) => {
|
router.post('/get', jsonParser, (request, response) => {
|
||||||
if (!request.body?.name) {
|
if (!request.body?.name) {
|
||||||
@ -122,5 +123,3 @@ router.post('/edit', jsonParser, (request, response) => {
|
|||||||
|
|
||||||
return response.send({ ok: true });
|
return response.send({ ok: true });
|
||||||
});
|
});
|
||||||
|
|
||||||
module.exports = { router, readWorldInfoFile };
|
|
||||||
|
@ -1,28 +1,27 @@
|
|||||||
const express = require('express');
|
import express from 'express';
|
||||||
const ipaddr = require('ipaddr.js');
|
import ipaddr from 'ipaddr.js';
|
||||||
|
|
||||||
// Instantiate parser middleware here with application-level size limits
|
// Instantiate parser middleware here with application-level size limits
|
||||||
const jsonParser = express.json({ limit: '200mb' });
|
export const jsonParser = express.json({ limit: '200mb' });
|
||||||
const urlencodedParser = express.urlencoded({ extended: true, limit: '200mb' });
|
export const urlencodedParser = express.urlencoded({ extended: true, limit: '200mb' });
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Gets the IP address of the client from the request object.
|
* Gets the IP address of the client from the request object.
|
||||||
* @param {import('express'.Request)} req Request object
|
* @param {import('express').Request} req Request object
|
||||||
* @returns {string} IP address of the client
|
* @returns {string} IP address of the client
|
||||||
*/
|
*/
|
||||||
function getIpFromRequest(req) {
|
export function getIpFromRequest(req) {
|
||||||
let clientIp = req.connection.remoteAddress;
|
let clientIp = req.socket.remoteAddress;
|
||||||
|
if (!clientIp) {
|
||||||
|
return 'unknown';
|
||||||
|
}
|
||||||
let ip = ipaddr.parse(clientIp);
|
let ip = ipaddr.parse(clientIp);
|
||||||
// Check if the IP address is IPv4-mapped IPv6 address
|
// Check if the IP address is IPv4-mapped IPv6 address
|
||||||
if (ip.kind() === 'ipv6' && ip instanceof ipaddr.IPv6 && ip.isIPv4MappedAddress()) {
|
if (ip.kind() === 'ipv6' && ip instanceof ipaddr.IPv6 && ip.isIPv4MappedAddress()) {
|
||||||
const ipv4 = ip.toIPv4Address().toString();
|
const ipv4 = ip.toIPv4Address().toString();
|
||||||
clientIp = ipv4;
|
clientIp = ipv4;
|
||||||
} else {
|
} else {
|
||||||
clientIp = ip;
|
clientIp = ip.toString();
|
||||||
clientIp = clientIp.toString();
|
|
||||||
}
|
}
|
||||||
return clientIp;
|
return clientIp;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
module.exports = { jsonParser, urlencodedParser, getIpFromRequest };
|
|
||||||
|
@ -2,9 +2,9 @@
|
|||||||
* When applied, this middleware will ensure the request contains the required header for basic authentication and only
|
* When applied, this middleware will ensure the request contains the required header for basic authentication and only
|
||||||
* allow access to the endpoint after successful authentication.
|
* allow access to the endpoint after successful authentication.
|
||||||
*/
|
*/
|
||||||
const { getAllUserHandles, toKey, getPasswordHash } = require('../users.js');
|
import storage from 'node-persist';
|
||||||
const { getConfig, getConfigValue } = require('../util.js');
|
import { getAllUserHandles, toKey, getPasswordHash } from '../users.js';
|
||||||
const storage = require('node-persist');
|
import { getConfig, getConfigValue } from '../util.js';
|
||||||
|
|
||||||
const PER_USER_BASIC_AUTH = getConfigValue('perUserBasicAuth', false);
|
const PER_USER_BASIC_AUTH = getConfigValue('perUserBasicAuth', false);
|
||||||
const ENABLE_ACCOUNTS = getConfigValue('enableUserAccounts', false);
|
const ENABLE_ACCOUNTS = getConfigValue('enableUserAccounts', false);
|
||||||
@ -49,4 +49,4 @@ const basicAuthMiddleware = async function (request, response, callback) {
|
|||||||
return unauthorizedResponse(response);
|
return unauthorizedResponse(response);
|
||||||
};
|
};
|
||||||
|
|
||||||
module.exports = basicAuthMiddleware;
|
export default basicAuthMiddleware;
|
||||||
|
@ -14,7 +14,7 @@ function decodeFileName(str) {
|
|||||||
* @param {import('express').Response} _res Response
|
* @param {import('express').Response} _res Response
|
||||||
* @param {import('express').NextFunction} next Next middleware
|
* @param {import('express').NextFunction} next Next middleware
|
||||||
*/
|
*/
|
||||||
function multerMonkeyPatch(req, _res, next) {
|
export default function multerMonkeyPatch(req, _res, next) {
|
||||||
try {
|
try {
|
||||||
if (req.file) {
|
if (req.file) {
|
||||||
req.file.originalname = decodeFileName(req.file.originalname);
|
req.file.originalname = decodeFileName(req.file.originalname);
|
||||||
@ -26,5 +26,3 @@ function multerMonkeyPatch(req, _res, next) {
|
|||||||
next();
|
next();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
module.exports = multerMonkeyPatch;
|
|
||||||
|
@ -1,9 +1,9 @@
|
|||||||
const path = require('path');
|
import * as path from 'node:path';
|
||||||
const fs = require('fs');
|
import * as fs from 'node:fs';
|
||||||
const ipMatching = require('ip-matching');
|
import ipMatching from 'ip-matching';
|
||||||
|
|
||||||
const { getIpFromRequest } = require('../express-common');
|
import { getIpFromRequest } from '../express-common.js';
|
||||||
const { color, getConfigValue } = require('../util');
|
import { color, getConfigValue } from '../util.js';
|
||||||
|
|
||||||
const whitelistPath = path.join(process.cwd(), './whitelist.txt');
|
const whitelistPath = path.join(process.cwd(), './whitelist.txt');
|
||||||
const enableForwardedWhitelist = getConfigValue('enableForwardedWhitelist', false);
|
const enableForwardedWhitelist = getConfigValue('enableForwardedWhitelist', false);
|
||||||
@ -50,7 +50,7 @@ function getForwardedIp(req) {
|
|||||||
* @param {boolean} listen If listen mode is enabled via config or command line
|
* @param {boolean} listen If listen mode is enabled via config or command line
|
||||||
* @returns {import('express').RequestHandler} The middleware function
|
* @returns {import('express').RequestHandler} The middleware function
|
||||||
*/
|
*/
|
||||||
function whitelistMiddleware(whitelistMode, listen) {
|
export default function whitelistMiddleware(whitelistMode, listen) {
|
||||||
return function (req, res, next) {
|
return function (req, res, next) {
|
||||||
const clientIp = getIpFromRequest(req);
|
const clientIp = getIpFromRequest(req);
|
||||||
const forwardedIp = getForwardedIp(req);
|
const forwardedIp = getForwardedIp(req);
|
||||||
@ -82,5 +82,3 @@ function whitelistMiddleware(whitelistMode, listen) {
|
|||||||
next();
|
next();
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
module.exports = whitelistMiddleware;
|
|
||||||
|
@ -1,8 +1,9 @@
|
|||||||
const fs = require('fs');
|
import * as fs from 'node:fs';
|
||||||
const path = require('path');
|
import * as path from 'node:path';
|
||||||
const url = require('url');
|
import * as url from 'node:url';
|
||||||
const express = require('express');
|
|
||||||
const { getConfigValue } = require('./util');
|
import express from 'express';
|
||||||
|
import { getConfigValue } from './util.js';
|
||||||
const enableServerPlugins = getConfigValue('enableServerPlugins', false);
|
const enableServerPlugins = getConfigValue('enableServerPlugins', false);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@ -32,7 +33,7 @@ const isESModule = (file) => path.extname(file) === '.mjs';
|
|||||||
* @returns {Promise<Function>} Promise that resolves when all plugins are loaded. Resolves to a "cleanup" function to
|
* @returns {Promise<Function>} Promise that resolves when all plugins are loaded. Resolves to a "cleanup" function to
|
||||||
* be called before the server shuts down.
|
* be called before the server shuts down.
|
||||||
*/
|
*/
|
||||||
async function loadPlugins(app, pluginsPath) {
|
export async function loadPlugins(app, pluginsPath) {
|
||||||
const exitHooks = [];
|
const exitHooks = [];
|
||||||
const emptyFn = () => {};
|
const emptyFn = () => {};
|
||||||
|
|
||||||
@ -217,7 +218,3 @@ async function initPlugin(app, plugin, exitHooks) {
|
|||||||
|
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
module.exports = {
|
|
||||||
loadPlugins,
|
|
||||||
};
|
|
||||||
|
@ -7,4 +7,4 @@ if (!Array.prototype.findLastIndex) {
|
|||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
module.exports = {};
|
export default () => {};
|
||||||
|
@ -1,6 +1,8 @@
|
|||||||
require('./polyfill.js');
|
import * as crypto from 'node:crypto';
|
||||||
const { getConfigValue } = require('./util.js');
|
import polyfill from './polyfill.js';
|
||||||
const crypto = require('crypto');
|
import { getConfigValue } from './util.js';
|
||||||
|
|
||||||
|
polyfill();
|
||||||
|
|
||||||
const PROMPT_PLACEHOLDER = getConfigValue('promptPlaceholder', 'Let\'s get started.');
|
const PROMPT_PLACEHOLDER = getConfigValue('promptPlaceholder', 'Let\'s get started.');
|
||||||
|
|
||||||
@ -17,7 +19,7 @@ const PROMPT_PLACEHOLDER = getConfigValue('promptPlaceholder', 'Let\'s get start
|
|||||||
* @returns {string} Prompt for Claude
|
* @returns {string} Prompt for Claude
|
||||||
* @copyright Prompt Conversion script taken from RisuAI by kwaroran (GPLv3).
|
* @copyright Prompt Conversion script taken from RisuAI by kwaroran (GPLv3).
|
||||||
*/
|
*/
|
||||||
function convertClaudePrompt(messages, addAssistantPostfix, addAssistantPrefill, withSysPromptSupport, useSystemPrompt, addSysHumanMsg, excludePrefixes) {
|
export function convertClaudePrompt(messages, addAssistantPostfix, addAssistantPrefill, withSysPromptSupport, useSystemPrompt, addSysHumanMsg, excludePrefixes) {
|
||||||
|
|
||||||
//Prepare messages for claude.
|
//Prepare messages for claude.
|
||||||
//When 'Exclude Human/Assistant prefixes' checked, setting messages role to the 'system'(last message is exception).
|
//When 'Exclude Human/Assistant prefixes' checked, setting messages role to the 'system'(last message is exception).
|
||||||
@ -96,7 +98,7 @@ function convertClaudePrompt(messages, addAssistantPostfix, addAssistantPrefill,
|
|||||||
* @param {string} charName Character name
|
* @param {string} charName Character name
|
||||||
* @param {string} userName User name
|
* @param {string} userName User name
|
||||||
*/
|
*/
|
||||||
function convertClaudeMessages(messages, prefillString, useSysPrompt, useTools, humanMsgFix, charName = '', userName = '') {
|
export function convertClaudeMessages(messages, prefillString, useSysPrompt, useTools, humanMsgFix, charName = '', userName = '') {
|
||||||
let systemPrompt = [];
|
let systemPrompt = [];
|
||||||
if (useSysPrompt) {
|
if (useSysPrompt) {
|
||||||
// Collect all the system messages up until the first instance of a non-system message, and then remove them from the messages array.
|
// Collect all the system messages up until the first instance of a non-system message, and then remove them from the messages array.
|
||||||
@ -279,7 +281,7 @@ function convertClaudeMessages(messages, prefillString, useSysPrompt, useTools,
|
|||||||
* @param {string} userName User name
|
* @param {string} userName User name
|
||||||
* @returns {{chatHistory: object[]}} Prompt for Cohere
|
* @returns {{chatHistory: object[]}} Prompt for Cohere
|
||||||
*/
|
*/
|
||||||
function convertCohereMessages(messages, charName = '', userName = '') {
|
export function convertCohereMessages(messages, charName = '', userName = '') {
|
||||||
if (messages.length === 0) {
|
if (messages.length === 0) {
|
||||||
messages.unshift({
|
messages.unshift({
|
||||||
role: 'user',
|
role: 'user',
|
||||||
@ -333,7 +335,7 @@ function convertCohereMessages(messages, charName = '', userName = '') {
|
|||||||
* @param {string} userName User name
|
* @param {string} userName User name
|
||||||
* @returns {{contents: *[], system_instruction: {parts: {text: string}}}} Prompt for Google MakerSuite models
|
* @returns {{contents: *[], system_instruction: {parts: {text: string}}}} Prompt for Google MakerSuite models
|
||||||
*/
|
*/
|
||||||
function convertGooglePrompt(messages, model, useSysPrompt = false, charName = '', userName = '') {
|
export function convertGooglePrompt(messages, model, useSysPrompt = false, charName = '', userName = '') {
|
||||||
// This is a 1x1 transparent PNG
|
// This is a 1x1 transparent PNG
|
||||||
const PNG_PIXEL = 'iVBORw0KGgoAAAANSUhEUgAAAAEAAAABCAQAAAC1HAwCAAAAC0lEQVR42mNkYAAAAAYAAjCB0C8AAAAASUVORK5CYII=';
|
const PNG_PIXEL = 'iVBORw0KGgoAAAANSUhEUgAAAAEAAAABCAQAAAC1HAwCAAAAC0lEQVR42mNkYAAAAAYAAjCB0C8AAAAASUVORK5CYII=';
|
||||||
|
|
||||||
@ -456,7 +458,7 @@ function convertGooglePrompt(messages, model, useSysPrompt = false, charName = '
|
|||||||
* @param {string} charName Character name
|
* @param {string} charName Character name
|
||||||
* @param {string} userName User name
|
* @param {string} userName User name
|
||||||
*/
|
*/
|
||||||
function convertAI21Messages(messages, charName = '', userName = '') {
|
export function convertAI21Messages(messages, charName = '', userName = '') {
|
||||||
if (!Array.isArray(messages)) {
|
if (!Array.isArray(messages)) {
|
||||||
return [];
|
return [];
|
||||||
}
|
}
|
||||||
@ -528,7 +530,7 @@ function convertAI21Messages(messages, charName = '', userName = '') {
|
|||||||
* @param {string} charName Character name
|
* @param {string} charName Character name
|
||||||
* @param {string} userName User name
|
* @param {string} userName User name
|
||||||
*/
|
*/
|
||||||
function convertMistralMessages(messages, charName = '', userName = '') {
|
export function convertMistralMessages(messages, charName = '', userName = '') {
|
||||||
if (!Array.isArray(messages)) {
|
if (!Array.isArray(messages)) {
|
||||||
return [];
|
return [];
|
||||||
}
|
}
|
||||||
@ -612,7 +614,7 @@ function convertMistralMessages(messages, charName = '', userName = '') {
|
|||||||
* @param {boolean} strict Enable strict mode: only allow one system message at the start, force user first message
|
* @param {boolean} strict Enable strict mode: only allow one system message at the start, force user first message
|
||||||
* @returns {any[]} Merged messages
|
* @returns {any[]} Merged messages
|
||||||
*/
|
*/
|
||||||
function mergeMessages(messages, charName, userName, strict) {
|
export function mergeMessages(messages, charName, userName, strict) {
|
||||||
let mergedMessages = [];
|
let mergedMessages = [];
|
||||||
|
|
||||||
// Remove names from the messages
|
// Remove names from the messages
|
||||||
@ -686,7 +688,7 @@ function mergeMessages(messages, charName, userName, strict) {
|
|||||||
* @param {object[]} messages Array of messages
|
* @param {object[]} messages Array of messages
|
||||||
* @returns {string} Prompt for Text Completion API
|
* @returns {string} Prompt for Text Completion API
|
||||||
*/
|
*/
|
||||||
function convertTextCompletionPrompt(messages) {
|
export function convertTextCompletionPrompt(messages) {
|
||||||
if (typeof messages === 'string') {
|
if (typeof messages === 'string') {
|
||||||
return messages;
|
return messages;
|
||||||
}
|
}
|
||||||
@ -705,14 +707,3 @@ function convertTextCompletionPrompt(messages) {
|
|||||||
});
|
});
|
||||||
return messageStrings.join('\n') + '\nassistant:';
|
return messageStrings.join('\n') + '\nassistant:';
|
||||||
}
|
}
|
||||||
|
|
||||||
module.exports = {
|
|
||||||
convertClaudePrompt,
|
|
||||||
convertClaudeMessages,
|
|
||||||
convertGooglePrompt,
|
|
||||||
convertTextCompletionPrompt,
|
|
||||||
convertCohereMessages,
|
|
||||||
convertMistralMessages,
|
|
||||||
convertAI21Messages,
|
|
||||||
mergeMessages,
|
|
||||||
};
|
|
||||||
|
@ -1,7 +1,8 @@
|
|||||||
const http = require('node:http');
|
import * as http from 'node:http';
|
||||||
const https = require('node:https');
|
import * as https from 'node:https';
|
||||||
|
|
||||||
const { isValidUrl, color } = require('./util.js');
|
import { ProxyAgent } from 'proxy-agent';
|
||||||
|
import { isValidUrl, color } from './util.js';
|
||||||
|
|
||||||
const LOG_HEADER = '[Request Proxy]';
|
const LOG_HEADER = '[Request Proxy]';
|
||||||
|
|
||||||
@ -13,10 +14,8 @@ const LOG_HEADER = '[Request Proxy]';
|
|||||||
* @property {string} url Proxy URL.
|
* @property {string} url Proxy URL.
|
||||||
* @property {string[]} bypass List of URLs to bypass proxy.
|
* @property {string[]} bypass List of URLs to bypass proxy.
|
||||||
*/
|
*/
|
||||||
function initRequestProxy({ enabled, url, bypass }) {
|
export default function initRequestProxy({ enabled, url, bypass }) {
|
||||||
try {
|
try {
|
||||||
const { ProxyAgent } = require('proxy-agent');
|
|
||||||
|
|
||||||
// No proxy is enabled, so return
|
// No proxy is enabled, so return
|
||||||
if (!enabled) {
|
if (!enabled) {
|
||||||
return;
|
return;
|
||||||
@ -52,5 +51,3 @@ function initRequestProxy({ enabled, url, bypass }) {
|
|||||||
console.error(color.red(LOG_HEADER), 'Failed to initialize request proxy:', error);
|
console.error(color.red(LOG_HEADER), 'Failed to initialize request proxy:', error);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
module.exports = initRequestProxy;
|
|
||||||
|
@ -1,7 +1,8 @@
|
|||||||
|
import path from 'node:path';
|
||||||
|
import fs from 'node:fs';
|
||||||
|
|
||||||
import { pipeline, env, RawImage, Pipeline } from 'sillytavern-transformers';
|
import { pipeline, env, RawImage, Pipeline } from 'sillytavern-transformers';
|
||||||
import { getConfigValue } from './util.js';
|
import { getConfigValue } from './util.js';
|
||||||
import path from 'path';
|
|
||||||
import fs from 'fs';
|
|
||||||
|
|
||||||
configureTransformers();
|
configureTransformers();
|
||||||
|
|
||||||
@ -50,7 +51,7 @@ const tasks = {
|
|||||||
* @param {string} image Base64-encoded image
|
* @param {string} image Base64-encoded image
|
||||||
* @returns {Promise<RawImage|null>} Object representing the image
|
* @returns {Promise<RawImage|null>} Object representing the image
|
||||||
*/
|
*/
|
||||||
async function getRawImage(image) {
|
export async function getRawImage(image) {
|
||||||
try {
|
try {
|
||||||
const buffer = Buffer.from(image, 'base64');
|
const buffer = Buffer.from(image, 'base64');
|
||||||
const byteArray = new Uint8Array(buffer);
|
const byteArray = new Uint8Array(buffer);
|
||||||
@ -116,7 +117,7 @@ async function migrateCacheToDataDir() {
|
|||||||
* @param {string} forceModel The model to use for the pipeline, if any
|
* @param {string} forceModel The model to use for the pipeline, if any
|
||||||
* @returns {Promise<Pipeline>} Pipeline for the task
|
* @returns {Promise<Pipeline>} Pipeline for the task
|
||||||
*/
|
*/
|
||||||
async function getPipeline(task, forceModel = '') {
|
export async function getPipeline(task, forceModel = '') {
|
||||||
await migrateCacheToDataDir();
|
await migrateCacheToDataDir();
|
||||||
|
|
||||||
if (tasks[task].pipeline) {
|
if (tasks[task].pipeline) {
|
||||||
@ -138,6 +139,6 @@ async function getPipeline(task, forceModel = '') {
|
|||||||
}
|
}
|
||||||
|
|
||||||
export default {
|
export default {
|
||||||
getPipeline,
|
|
||||||
getRawImage,
|
getRawImage,
|
||||||
|
getPipeline,
|
||||||
};
|
};
|
||||||
|
104
src/users.js
104
src/users.js
@ -1,22 +1,23 @@
|
|||||||
// Native Node Modules
|
// Native Node Modules
|
||||||
const path = require('path');
|
import * as path from 'node:path';
|
||||||
const fs = require('fs');
|
import * as fs from 'node:fs';
|
||||||
const crypto = require('crypto');
|
import * as crypto from 'node:crypto';
|
||||||
const os = require('os');
|
import * as os from 'node:os';
|
||||||
|
|
||||||
// Express and other dependencies
|
// Express and other dependencies
|
||||||
const storage = require('node-persist');
|
import storage from 'node-persist';
|
||||||
const express = require('express');
|
import express from 'express';
|
||||||
const mime = require('mime-types');
|
import mime from 'mime-types';
|
||||||
const archiver = require('archiver');
|
import archiver from 'archiver';
|
||||||
const writeFileAtomicSync = require('write-file-atomic').sync;
|
import _ from 'lodash';
|
||||||
const _ = require('lodash');
|
import { sync as writeFileAtomicSync } from 'write-file-atomic';
|
||||||
|
|
||||||
const { USER_DIRECTORY_TEMPLATE, DEFAULT_USER, PUBLIC_DIRECTORIES, SETTINGS_FILE } = require('./constants');
|
import { USER_DIRECTORY_TEMPLATE, DEFAULT_USER, PUBLIC_DIRECTORIES, SETTINGS_FILE } from './constants.js';
|
||||||
const { getConfigValue, color, delay, setConfigValue, generateTimestamp } = require('./util');
|
import { getConfigValue, color, delay, setConfigValue, generateTimestamp } from './util.js';
|
||||||
const { readSecret, writeSecret } = require('./endpoints/secrets');
|
import { readSecret, writeSecret } from './endpoints/secrets.js';
|
||||||
|
import { getContentOfType } from './endpoints/content-manager.js';
|
||||||
|
|
||||||
const KEY_PREFIX = 'user:';
|
export const KEY_PREFIX = 'user:';
|
||||||
const AVATAR_PREFIX = 'avatar:';
|
const AVATAR_PREFIX = 'avatar:';
|
||||||
const ENABLE_ACCOUNTS = getConfigValue('enableUserAccounts', false);
|
const ENABLE_ACCOUNTS = getConfigValue('enableUserAccounts', false);
|
||||||
const AUTHELIA_AUTH = getConfigValue('autheliaAuth', false);
|
const AUTHELIA_AUTH = getConfigValue('autheliaAuth', false);
|
||||||
@ -94,7 +95,7 @@ const STORAGE_KEYS = {
|
|||||||
* Ensures that the content directories exist.
|
* Ensures that the content directories exist.
|
||||||
* @returns {Promise<import('./users').UserDirectoryList[]>} - The list of user directories
|
* @returns {Promise<import('./users').UserDirectoryList[]>} - The list of user directories
|
||||||
*/
|
*/
|
||||||
async function ensurePublicDirectoriesExist() {
|
export async function ensurePublicDirectoriesExist() {
|
||||||
for (const dir of Object.values(PUBLIC_DIRECTORIES)) {
|
for (const dir of Object.values(PUBLIC_DIRECTORIES)) {
|
||||||
if (!fs.existsSync(dir)) {
|
if (!fs.existsSync(dir)) {
|
||||||
fs.mkdirSync(dir, { recursive: true });
|
fs.mkdirSync(dir, { recursive: true });
|
||||||
@ -117,7 +118,7 @@ async function ensurePublicDirectoriesExist() {
|
|||||||
* Gets a list of all user directories.
|
* Gets a list of all user directories.
|
||||||
* @returns {Promise<import('./users').UserDirectoryList[]>} - The list of user directories
|
* @returns {Promise<import('./users').UserDirectoryList[]>} - The list of user directories
|
||||||
*/
|
*/
|
||||||
async function getUserDirectoriesList() {
|
export async function getUserDirectoriesList() {
|
||||||
const userHandles = await getAllUserHandles();
|
const userHandles = await getAllUserHandles();
|
||||||
const directoriesList = userHandles.map(handle => getUserDirectories(handle));
|
const directoriesList = userHandles.map(handle => getUserDirectories(handle));
|
||||||
return directoriesList;
|
return directoriesList;
|
||||||
@ -126,7 +127,7 @@ async function getUserDirectoriesList() {
|
|||||||
/**
|
/**
|
||||||
* Perform migration from the old user data format to the new one.
|
* Perform migration from the old user data format to the new one.
|
||||||
*/
|
*/
|
||||||
async function migrateUserData() {
|
export async function migrateUserData() {
|
||||||
const publicDirectory = path.join(process.cwd(), 'public');
|
const publicDirectory = path.join(process.cwd(), 'public');
|
||||||
|
|
||||||
// No need to migrate if the characters directory doesn't exists
|
// No need to migrate if the characters directory doesn't exists
|
||||||
@ -328,14 +329,13 @@ async function migrateUserData() {
|
|||||||
console.log(color.green('Migration completed!'));
|
console.log(color.green('Migration completed!'));
|
||||||
}
|
}
|
||||||
|
|
||||||
async function migrateSystemPrompts() {
|
export async function migrateSystemPrompts() {
|
||||||
/**
|
/**
|
||||||
* Gets the default system prompts.
|
* Gets the default system prompts.
|
||||||
* @returns {Promise<any[]>} - The list of default system prompts
|
* @returns {Promise<any[]>} - The list of default system prompts
|
||||||
*/
|
*/
|
||||||
async function getDefaultSystemPrompts() {
|
async function getDefaultSystemPrompts() {
|
||||||
try {
|
try {
|
||||||
const { getContentOfType } = await import('./endpoints/content-manager.js');
|
|
||||||
return getContentOfType('sysprompt', 'json');
|
return getContentOfType('sysprompt', 'json');
|
||||||
} catch {
|
} catch {
|
||||||
return [];
|
return [];
|
||||||
@ -391,7 +391,7 @@ async function migrateSystemPrompts() {
|
|||||||
* @param {string} handle User handle
|
* @param {string} handle User handle
|
||||||
* @returns {string} The key for the user storage
|
* @returns {string} The key for the user storage
|
||||||
*/
|
*/
|
||||||
function toKey(handle) {
|
export function toKey(handle) {
|
||||||
return `${KEY_PREFIX}${handle}`;
|
return `${KEY_PREFIX}${handle}`;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -400,7 +400,7 @@ function toKey(handle) {
|
|||||||
* @param {string} handle User handle
|
* @param {string} handle User handle
|
||||||
* @returns {string} The key for the avatar storage
|
* @returns {string} The key for the avatar storage
|
||||||
*/
|
*/
|
||||||
function toAvatarKey(handle) {
|
export function toAvatarKey(handle) {
|
||||||
return `${AVATAR_PREFIX}${handle}`;
|
return `${AVATAR_PREFIX}${handle}`;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -409,7 +409,7 @@ function toAvatarKey(handle) {
|
|||||||
* @param {string} dataRoot The root directory for user data
|
* @param {string} dataRoot The root directory for user data
|
||||||
* @returns {Promise<void>}
|
* @returns {Promise<void>}
|
||||||
*/
|
*/
|
||||||
async function initUserStorage(dataRoot) {
|
export async function initUserStorage(dataRoot) {
|
||||||
global.DATA_ROOT = dataRoot;
|
global.DATA_ROOT = dataRoot;
|
||||||
console.log('Using data root:', color.green(global.DATA_ROOT));
|
console.log('Using data root:', color.green(global.DATA_ROOT));
|
||||||
console.log();
|
console.log();
|
||||||
@ -430,7 +430,7 @@ async function initUserStorage(dataRoot) {
|
|||||||
* Get the cookie secret from the config. If it doesn't exist, generate a new one.
|
* Get the cookie secret from the config. If it doesn't exist, generate a new one.
|
||||||
* @returns {string} The cookie secret
|
* @returns {string} The cookie secret
|
||||||
*/
|
*/
|
||||||
function getCookieSecret() {
|
export function getCookieSecret() {
|
||||||
let secret = getConfigValue(STORAGE_KEYS.cookieSecret);
|
let secret = getConfigValue(STORAGE_KEYS.cookieSecret);
|
||||||
|
|
||||||
if (!secret) {
|
if (!secret) {
|
||||||
@ -446,7 +446,7 @@ function getCookieSecret() {
|
|||||||
* Generates a random password salt.
|
* Generates a random password salt.
|
||||||
* @returns {string} The password salt
|
* @returns {string} The password salt
|
||||||
*/
|
*/
|
||||||
function getPasswordSalt() {
|
export function getPasswordSalt() {
|
||||||
return crypto.randomBytes(16).toString('base64');
|
return crypto.randomBytes(16).toString('base64');
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -454,7 +454,7 @@ function getPasswordSalt() {
|
|||||||
* Get the session name for the current server.
|
* Get the session name for the current server.
|
||||||
* @returns {string} The session name
|
* @returns {string} The session name
|
||||||
*/
|
*/
|
||||||
function getCookieSessionName() {
|
export function getCookieSessionName() {
|
||||||
// Get server hostname and hash it to generate a session suffix
|
// Get server hostname and hash it to generate a session suffix
|
||||||
const suffix = crypto.createHash('sha256').update(os.hostname()).digest('hex').slice(0, 8);
|
const suffix = crypto.createHash('sha256').update(os.hostname()).digest('hex').slice(0, 8);
|
||||||
return `session-${suffix}`;
|
return `session-${suffix}`;
|
||||||
@ -466,7 +466,7 @@ function getCookieSessionName() {
|
|||||||
* @param {string} salt Salt to use for hashing
|
* @param {string} salt Salt to use for hashing
|
||||||
* @returns {string} Hashed password
|
* @returns {string} Hashed password
|
||||||
*/
|
*/
|
||||||
function getPasswordHash(password, salt) {
|
export function getPasswordHash(password, salt) {
|
||||||
return crypto.scryptSync(password.normalize(), salt, 64).toString('base64');
|
return crypto.scryptSync(password.normalize(), salt, 64).toString('base64');
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -475,7 +475,7 @@ function getPasswordHash(password, salt) {
|
|||||||
* @param {import('express').Request} [request] HTTP request object
|
* @param {import('express').Request} [request] HTTP request object
|
||||||
* @returns {string} The CSRF secret
|
* @returns {string} The CSRF secret
|
||||||
*/
|
*/
|
||||||
function getCsrfSecret(request) {
|
export function getCsrfSecret(request) {
|
||||||
if (!request || !request.user) {
|
if (!request || !request.user) {
|
||||||
return ANON_CSRF_SECRET;
|
return ANON_CSRF_SECRET;
|
||||||
}
|
}
|
||||||
@ -494,7 +494,7 @@ function getCsrfSecret(request) {
|
|||||||
* Gets a list of all user handles.
|
* Gets a list of all user handles.
|
||||||
* @returns {Promise<string[]>} - The list of user handles
|
* @returns {Promise<string[]>} - The list of user handles
|
||||||
*/
|
*/
|
||||||
async function getAllUserHandles() {
|
export async function getAllUserHandles() {
|
||||||
const keys = await storage.keys(x => x.key.startsWith(KEY_PREFIX));
|
const keys = await storage.keys(x => x.key.startsWith(KEY_PREFIX));
|
||||||
const handles = keys.map(x => x.replace(KEY_PREFIX, ''));
|
const handles = keys.map(x => x.replace(KEY_PREFIX, ''));
|
||||||
return handles;
|
return handles;
|
||||||
@ -505,7 +505,7 @@ async function getAllUserHandles() {
|
|||||||
* @param {string} handle User handle
|
* @param {string} handle User handle
|
||||||
* @returns {UserDirectoryList} User directories
|
* @returns {UserDirectoryList} User directories
|
||||||
*/
|
*/
|
||||||
function getUserDirectories(handle) {
|
export function getUserDirectories(handle) {
|
||||||
if (DIRECTORIES_CACHE.has(handle)) {
|
if (DIRECTORIES_CACHE.has(handle)) {
|
||||||
const cache = DIRECTORIES_CACHE.get(handle);
|
const cache = DIRECTORIES_CACHE.get(handle);
|
||||||
if (cache) {
|
if (cache) {
|
||||||
@ -526,7 +526,7 @@ function getUserDirectories(handle) {
|
|||||||
* @param {string} handle User handle
|
* @param {string} handle User handle
|
||||||
* @returns {Promise<string>} User avatar URL
|
* @returns {Promise<string>} User avatar URL
|
||||||
*/
|
*/
|
||||||
async function getUserAvatar(handle) {
|
export async function getUserAvatar(handle) {
|
||||||
try {
|
try {
|
||||||
// Check if the user has a custom avatar
|
// Check if the user has a custom avatar
|
||||||
const avatarKey = toAvatarKey(handle);
|
const avatarKey = toAvatarKey(handle);
|
||||||
@ -563,7 +563,7 @@ async function getUserAvatar(handle) {
|
|||||||
* @param {import('express').Request} request Request object
|
* @param {import('express').Request} request Request object
|
||||||
* @returns {boolean} Whether the user should be redirected to the login page
|
* @returns {boolean} Whether the user should be redirected to the login page
|
||||||
*/
|
*/
|
||||||
function shouldRedirectToLogin(request) {
|
export function shouldRedirectToLogin(request) {
|
||||||
return ENABLE_ACCOUNTS && !request.user;
|
return ENABLE_ACCOUNTS && !request.user;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -574,7 +574,7 @@ function shouldRedirectToLogin(request) {
|
|||||||
* @param {boolean} basicAuthMode If Basic auth mode is enabled
|
* @param {boolean} basicAuthMode If Basic auth mode is enabled
|
||||||
* @returns {Promise<boolean>} Whether auto-login was performed
|
* @returns {Promise<boolean>} Whether auto-login was performed
|
||||||
*/
|
*/
|
||||||
async function tryAutoLogin(request, basicAuthMode) {
|
export async function tryAutoLogin(request, basicAuthMode) {
|
||||||
if (!ENABLE_ACCOUNTS || request.user || !request.session) {
|
if (!ENABLE_ACCOUNTS || request.user || !request.session) {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
@ -693,7 +693,7 @@ async function basicUserLogin(request) {
|
|||||||
* @param {import('express').Response} response Response object
|
* @param {import('express').Response} response Response object
|
||||||
* @param {import('express').NextFunction} next Next function
|
* @param {import('express').NextFunction} next Next function
|
||||||
*/
|
*/
|
||||||
async function setUserDataMiddleware(request, response, next) {
|
export async function setUserDataMiddleware(request, response, next) {
|
||||||
// If user accounts are disabled, use the default user
|
// If user accounts are disabled, use the default user
|
||||||
if (!ENABLE_ACCOUNTS) {
|
if (!ENABLE_ACCOUNTS) {
|
||||||
const handle = DEFAULT_USER.handle;
|
const handle = DEFAULT_USER.handle;
|
||||||
@ -751,7 +751,7 @@ async function setUserDataMiddleware(request, response, next) {
|
|||||||
* @param {import('express').Response} response Response object
|
* @param {import('express').Response} response Response object
|
||||||
* @param {import('express').NextFunction} next Next function
|
* @param {import('express').NextFunction} next Next function
|
||||||
*/
|
*/
|
||||||
function requireLoginMiddleware(request, response, next) {
|
export function requireLoginMiddleware(request, response, next) {
|
||||||
if (!request.user) {
|
if (!request.user) {
|
||||||
return response.sendStatus(403);
|
return response.sendStatus(403);
|
||||||
}
|
}
|
||||||
@ -787,7 +787,7 @@ function createRouteHandler(directoryFn) {
|
|||||||
* @param {import('express').NextFunction} next Next function
|
* @param {import('express').NextFunction} next Next function
|
||||||
* @returns {any}
|
* @returns {any}
|
||||||
*/
|
*/
|
||||||
function requireAdminMiddleware(request, response, next) {
|
export function requireAdminMiddleware(request, response, next) {
|
||||||
if (!request.user) {
|
if (!request.user) {
|
||||||
return response.sendStatus(403);
|
return response.sendStatus(403);
|
||||||
}
|
}
|
||||||
@ -806,7 +806,7 @@ function requireAdminMiddleware(request, response, next) {
|
|||||||
* @param {import('express').Response} response Express response object to write to
|
* @param {import('express').Response} response Express response object to write to
|
||||||
* @returns {Promise<void>} Promise that resolves when the archive is created
|
* @returns {Promise<void>} Promise that resolves when the archive is created
|
||||||
*/
|
*/
|
||||||
async function createBackupArchive(handle, response) {
|
export async function createBackupArchive(handle, response) {
|
||||||
const directories = getUserDirectories(handle);
|
const directories = getUserDirectories(handle);
|
||||||
|
|
||||||
console.log('Backup requested for', handle);
|
console.log('Backup requested for', handle);
|
||||||
@ -855,7 +855,7 @@ async function getAllUsers() {
|
|||||||
* Gets all of the enabled users.
|
* Gets all of the enabled users.
|
||||||
* @returns {Promise<User[]>}
|
* @returns {Promise<User[]>}
|
||||||
*/
|
*/
|
||||||
async function getAllEnabledUsers() {
|
export async function getAllEnabledUsers() {
|
||||||
const users = await getAllUsers();
|
const users = await getAllUsers();
|
||||||
return users.filter(x => x.enabled);
|
return users.filter(x => x.enabled);
|
||||||
}
|
}
|
||||||
@ -863,7 +863,7 @@ async function getAllEnabledUsers() {
|
|||||||
/**
|
/**
|
||||||
* Express router for serving files from the user's directories.
|
* Express router for serving files from the user's directories.
|
||||||
*/
|
*/
|
||||||
const router = express.Router();
|
export const router = express.Router();
|
||||||
router.use('/backgrounds/*', createRouteHandler(req => req.user.directories.backgrounds));
|
router.use('/backgrounds/*', createRouteHandler(req => req.user.directories.backgrounds));
|
||||||
router.use('/characters/*', createRouteHandler(req => req.user.directories.characters));
|
router.use('/characters/*', createRouteHandler(req => req.user.directories.characters));
|
||||||
router.use('/User%20Avatars/*', createRouteHandler(req => req.user.directories.avatars));
|
router.use('/User%20Avatars/*', createRouteHandler(req => req.user.directories.avatars));
|
||||||
@ -871,31 +871,3 @@ router.use('/assets/*', createRouteHandler(req => req.user.directories.assets));
|
|||||||
router.use('/user/images/*', createRouteHandler(req => req.user.directories.userImages));
|
router.use('/user/images/*', createRouteHandler(req => req.user.directories.userImages));
|
||||||
router.use('/user/files/*', createRouteHandler(req => req.user.directories.files));
|
router.use('/user/files/*', createRouteHandler(req => req.user.directories.files));
|
||||||
router.use('/scripts/extensions/third-party/*', createRouteHandler(req => req.user.directories.extensions));
|
router.use('/scripts/extensions/third-party/*', createRouteHandler(req => req.user.directories.extensions));
|
||||||
|
|
||||||
module.exports = {
|
|
||||||
KEY_PREFIX,
|
|
||||||
toKey,
|
|
||||||
toAvatarKey,
|
|
||||||
initUserStorage,
|
|
||||||
ensurePublicDirectoriesExist,
|
|
||||||
getUserDirectoriesList,
|
|
||||||
getAllUserHandles,
|
|
||||||
getUserDirectories,
|
|
||||||
setUserDataMiddleware,
|
|
||||||
requireLoginMiddleware,
|
|
||||||
requireAdminMiddleware,
|
|
||||||
migrateUserData,
|
|
||||||
migrateSystemPrompts,
|
|
||||||
getPasswordSalt,
|
|
||||||
getPasswordHash,
|
|
||||||
getCsrfSecret,
|
|
||||||
getCookieSecret,
|
|
||||||
getCookieSessionName,
|
|
||||||
getUserAvatar,
|
|
||||||
shouldRedirectToLogin,
|
|
||||||
createBackupArchive,
|
|
||||||
tryAutoLogin,
|
|
||||||
getAllUsers,
|
|
||||||
getAllEnabledUsers,
|
|
||||||
router,
|
|
||||||
};
|
|
||||||
|
118
src/util.js
118
src/util.js
@ -1,13 +1,16 @@
|
|||||||
const path = require('path');
|
import * as path from 'node:path';
|
||||||
const fs = require('fs');
|
import * as fs from 'node:fs';
|
||||||
const commandExistsSync = require('command-exists').sync;
|
import * as http2 from 'node:http2';
|
||||||
const writeFileAtomicSync = require('write-file-atomic').sync;
|
import { Readable } from 'node:stream';
|
||||||
const _ = require('lodash');
|
import { createRequire } from 'node:module';
|
||||||
const yauzl = require('yauzl');
|
|
||||||
const mime = require('mime-types');
|
import yaml from 'yaml';
|
||||||
const yaml = require('yaml');
|
import { sync as commandExistsSync } from 'command-exists';
|
||||||
const { default: simpleGit } = require('simple-git');
|
import { sync as writeFileAtomicSync } from 'write-file-atomic';
|
||||||
const { Readable } = require('stream');
|
import _ from 'lodash';
|
||||||
|
import yauzl from 'yauzl';
|
||||||
|
import mime from 'mime-types';
|
||||||
|
import { default as simpleGit } from 'simple-git';
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Parsed config object.
|
* Parsed config object.
|
||||||
@ -18,7 +21,7 @@ let CACHED_CONFIG = null;
|
|||||||
* Returns the config object from the config.yaml file.
|
* Returns the config object from the config.yaml file.
|
||||||
* @returns {object} Config object
|
* @returns {object} Config object
|
||||||
*/
|
*/
|
||||||
function getConfig() {
|
export function getConfig() {
|
||||||
if (CACHED_CONFIG) {
|
if (CACHED_CONFIG) {
|
||||||
return CACHED_CONFIG;
|
return CACHED_CONFIG;
|
||||||
}
|
}
|
||||||
@ -46,7 +49,7 @@ function getConfig() {
|
|||||||
* @param {any} defaultValue - Default value to return if the key is not found
|
* @param {any} defaultValue - Default value to return if the key is not found
|
||||||
* @returns {any} Value for the given key
|
* @returns {any} Value for the given key
|
||||||
*/
|
*/
|
||||||
function getConfigValue(key, defaultValue = null) {
|
export function getConfigValue(key, defaultValue = null) {
|
||||||
const config = getConfig();
|
const config = getConfig();
|
||||||
return _.get(config, key, defaultValue);
|
return _.get(config, key, defaultValue);
|
||||||
}
|
}
|
||||||
@ -56,7 +59,7 @@ function getConfigValue(key, defaultValue = null) {
|
|||||||
* @param {string} key Key to set
|
* @param {string} key Key to set
|
||||||
* @param {any} value Value to set
|
* @param {any} value Value to set
|
||||||
*/
|
*/
|
||||||
function setConfigValue(key, value) {
|
export function setConfigValue(key, value) {
|
||||||
// Reset cache so that the next getConfig call will read the updated config file
|
// Reset cache so that the next getConfig call will read the updated config file
|
||||||
CACHED_CONFIG = null;
|
CACHED_CONFIG = null;
|
||||||
const config = getConfig();
|
const config = getConfig();
|
||||||
@ -69,7 +72,7 @@ function setConfigValue(key, value) {
|
|||||||
* @param {string} auth username:password
|
* @param {string} auth username:password
|
||||||
* @returns {string} Basic Auth header value
|
* @returns {string} Basic Auth header value
|
||||||
*/
|
*/
|
||||||
function getBasicAuthHeader(auth) {
|
export function getBasicAuthHeader(auth) {
|
||||||
const encoded = Buffer.from(`${auth}`).toString('base64');
|
const encoded = Buffer.from(`${auth}`).toString('base64');
|
||||||
return `Basic ${encoded}`;
|
return `Basic ${encoded}`;
|
||||||
}
|
}
|
||||||
@ -79,7 +82,7 @@ function getBasicAuthHeader(auth) {
|
|||||||
* Also returns the agent string for the Horde API.
|
* Also returns the agent string for the Horde API.
|
||||||
* @returns {Promise<{agent: string, pkgVersion: string, gitRevision: string | null, gitBranch: string | null, commitDate: string | null, isLatest: boolean}>} Version info object
|
* @returns {Promise<{agent: string, pkgVersion: string, gitRevision: string | null, gitBranch: string | null, commitDate: string | null, isLatest: boolean}>} Version info object
|
||||||
*/
|
*/
|
||||||
async function getVersion() {
|
export async function getVersion() {
|
||||||
let pkgVersion = 'UNKNOWN';
|
let pkgVersion = 'UNKNOWN';
|
||||||
let gitRevision = null;
|
let gitRevision = null;
|
||||||
let gitBranch = null;
|
let gitBranch = null;
|
||||||
@ -87,9 +90,10 @@ async function getVersion() {
|
|||||||
let isLatest = true;
|
let isLatest = true;
|
||||||
|
|
||||||
try {
|
try {
|
||||||
|
const require = createRequire(import.meta.url);
|
||||||
const pkgJson = require(path.join(process.cwd(), './package.json'));
|
const pkgJson = require(path.join(process.cwd(), './package.json'));
|
||||||
pkgVersion = pkgJson.version;
|
pkgVersion = pkgJson.version;
|
||||||
if (!process['pkg'] && commandExistsSync('git')) {
|
if (commandExistsSync('git')) {
|
||||||
const git = simpleGit();
|
const git = simpleGit();
|
||||||
const cwd = process.cwd();
|
const cwd = process.cwd();
|
||||||
gitRevision = await git.cwd(cwd).revparse(['--short', 'HEAD']);
|
gitRevision = await git.cwd(cwd).revparse(['--short', 'HEAD']);
|
||||||
@ -117,7 +121,7 @@ async function getVersion() {
|
|||||||
* @param {number} ms Milliseconds to wait
|
* @param {number} ms Milliseconds to wait
|
||||||
* @returns {Promise<void>} Promise that resolves after the given amount of milliseconds
|
* @returns {Promise<void>} Promise that resolves after the given amount of milliseconds
|
||||||
*/
|
*/
|
||||||
function delay(ms) {
|
export function delay(ms) {
|
||||||
return new Promise(resolve => setTimeout(resolve, ms));
|
return new Promise(resolve => setTimeout(resolve, ms));
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -127,7 +131,7 @@ function delay(ms) {
|
|||||||
* @returns {string} Random hex string
|
* @returns {string} Random hex string
|
||||||
* @example getHexString(8) // 'a1b2c3d4'
|
* @example getHexString(8) // 'a1b2c3d4'
|
||||||
*/
|
*/
|
||||||
function getHexString(length) {
|
export function getHexString(length) {
|
||||||
const chars = '0123456789abcdef';
|
const chars = '0123456789abcdef';
|
||||||
let result = '';
|
let result = '';
|
||||||
for (let i = 0; i < length; i++) {
|
for (let i = 0; i < length; i++) {
|
||||||
@ -142,7 +146,7 @@ function getHexString(length) {
|
|||||||
* @param {string} fileExtension File extension to look for
|
* @param {string} fileExtension File extension to look for
|
||||||
* @returns {Promise<Buffer|null>} Buffer containing the extracted file. Null if the file was not found.
|
* @returns {Promise<Buffer|null>} Buffer containing the extracted file. Null if the file was not found.
|
||||||
*/
|
*/
|
||||||
async function extractFileFromZipBuffer(archiveBuffer, fileExtension) {
|
export async function extractFileFromZipBuffer(archiveBuffer, fileExtension) {
|
||||||
return await new Promise((resolve, reject) => yauzl.fromBuffer(Buffer.from(archiveBuffer), { lazyEntries: true }, (err, zipfile) => {
|
return await new Promise((resolve, reject) => yauzl.fromBuffer(Buffer.from(archiveBuffer), { lazyEntries: true }, (err, zipfile) => {
|
||||||
if (err) {
|
if (err) {
|
||||||
reject(err);
|
reject(err);
|
||||||
@ -181,7 +185,7 @@ async function extractFileFromZipBuffer(archiveBuffer, fileExtension) {
|
|||||||
* @param {string} zipFilePath Path to the ZIP archive
|
* @param {string} zipFilePath Path to the ZIP archive
|
||||||
* @returns {Promise<[string, Buffer][]>} Array of image buffers
|
* @returns {Promise<[string, Buffer][]>} Array of image buffers
|
||||||
*/
|
*/
|
||||||
async function getImageBuffers(zipFilePath) {
|
export async function getImageBuffers(zipFilePath) {
|
||||||
return new Promise((resolve, reject) => {
|
return new Promise((resolve, reject) => {
|
||||||
// Check if the zip file exists
|
// Check if the zip file exists
|
||||||
if (!fs.existsSync(zipFilePath)) {
|
if (!fs.existsSync(zipFilePath)) {
|
||||||
@ -237,7 +241,7 @@ async function getImageBuffers(zipFilePath) {
|
|||||||
* @param {any} readableStream Readable stream to read from
|
* @param {any} readableStream Readable stream to read from
|
||||||
* @returns {Promise<Buffer[]>} Array of chunks
|
* @returns {Promise<Buffer[]>} Array of chunks
|
||||||
*/
|
*/
|
||||||
async function readAllChunks(readableStream) {
|
export async function readAllChunks(readableStream) {
|
||||||
return new Promise((resolve, reject) => {
|
return new Promise((resolve, reject) => {
|
||||||
// Consume the readable stream
|
// Consume the readable stream
|
||||||
const chunks = [];
|
const chunks = [];
|
||||||
@ -261,7 +265,7 @@ function isObject(item) {
|
|||||||
return (item && typeof item === 'object' && !Array.isArray(item));
|
return (item && typeof item === 'object' && !Array.isArray(item));
|
||||||
}
|
}
|
||||||
|
|
||||||
function deepMerge(target, source) {
|
export function deepMerge(target, source) {
|
||||||
let output = Object.assign({}, target);
|
let output = Object.assign({}, target);
|
||||||
if (isObject(target) && isObject(source)) {
|
if (isObject(target) && isObject(source)) {
|
||||||
Object.keys(source).forEach(key => {
|
Object.keys(source).forEach(key => {
|
||||||
@ -278,7 +282,7 @@ function deepMerge(target, source) {
|
|||||||
return output;
|
return output;
|
||||||
}
|
}
|
||||||
|
|
||||||
const color = {
|
export const color = {
|
||||||
byNum: (mess, fgNum) => {
|
byNum: (mess, fgNum) => {
|
||||||
mess = mess || '';
|
mess = mess || '';
|
||||||
fgNum = fgNum === undefined ? 31 : fgNum;
|
fgNum = fgNum === undefined ? 31 : fgNum;
|
||||||
@ -298,7 +302,7 @@ const color = {
|
|||||||
* Gets a random UUIDv4 string.
|
* Gets a random UUIDv4 string.
|
||||||
* @returns {string} A UUIDv4 string
|
* @returns {string} A UUIDv4 string
|
||||||
*/
|
*/
|
||||||
function uuidv4() {
|
export function uuidv4() {
|
||||||
if ('crypto' in global && 'randomUUID' in global.crypto) {
|
if ('crypto' in global && 'randomUUID' in global.crypto) {
|
||||||
return global.crypto.randomUUID();
|
return global.crypto.randomUUID();
|
||||||
}
|
}
|
||||||
@ -309,7 +313,7 @@ function uuidv4() {
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
function humanizedISO8601DateTime(date) {
|
export function humanizedISO8601DateTime(date) {
|
||||||
let baseDate = typeof date === 'number' ? new Date(date) : new Date();
|
let baseDate = typeof date === 'number' ? new Date(date) : new Date();
|
||||||
let humanYear = baseDate.getFullYear();
|
let humanYear = baseDate.getFullYear();
|
||||||
let humanMonth = (baseDate.getMonth() + 1);
|
let humanMonth = (baseDate.getMonth() + 1);
|
||||||
@ -322,7 +326,7 @@ function humanizedISO8601DateTime(date) {
|
|||||||
return HumanizedDateTime;
|
return HumanizedDateTime;
|
||||||
}
|
}
|
||||||
|
|
||||||
function tryParse(str) {
|
export function tryParse(str) {
|
||||||
try {
|
try {
|
||||||
return JSON.parse(str);
|
return JSON.parse(str);
|
||||||
} catch {
|
} catch {
|
||||||
@ -337,7 +341,7 @@ function tryParse(str) {
|
|||||||
* @param {string} inputPath The path to be converted.
|
* @param {string} inputPath The path to be converted.
|
||||||
* @returns The relative URL path from which the client can access the file.
|
* @returns The relative URL path from which the client can access the file.
|
||||||
*/
|
*/
|
||||||
function clientRelativePath(root, inputPath) {
|
export function clientRelativePath(root, inputPath) {
|
||||||
if (!inputPath.startsWith(root)) {
|
if (!inputPath.startsWith(root)) {
|
||||||
throw new Error('Input path does not start with the root directory');
|
throw new Error('Input path does not start with the root directory');
|
||||||
}
|
}
|
||||||
@ -350,11 +354,11 @@ function clientRelativePath(root, inputPath) {
|
|||||||
* @param {string} filename The file name to remove the extension from.
|
* @param {string} filename The file name to remove the extension from.
|
||||||
* @returns The file name, sans extension
|
* @returns The file name, sans extension
|
||||||
*/
|
*/
|
||||||
function removeFileExtension(filename) {
|
export function removeFileExtension(filename) {
|
||||||
return filename.replace(/\.[^.]+$/, '');
|
return filename.replace(/\.[^.]+$/, '');
|
||||||
}
|
}
|
||||||
|
|
||||||
function generateTimestamp() {
|
export function generateTimestamp() {
|
||||||
const now = new Date();
|
const now = new Date();
|
||||||
const year = now.getFullYear();
|
const year = now.getFullYear();
|
||||||
const month = String(now.getMonth() + 1).padStart(2, '0');
|
const month = String(now.getMonth() + 1).padStart(2, '0');
|
||||||
@ -371,7 +375,7 @@ function generateTimestamp() {
|
|||||||
* @param {string} directory The root directory to remove backups from.
|
* @param {string} directory The root directory to remove backups from.
|
||||||
* @param {string} prefix File prefix to filter backups by.
|
* @param {string} prefix File prefix to filter backups by.
|
||||||
*/
|
*/
|
||||||
function removeOldBackups(directory, prefix) {
|
export function removeOldBackups(directory, prefix) {
|
||||||
const MAX_BACKUPS = Number(getConfigValue('numberOfBackups', 50));
|
const MAX_BACKUPS = Number(getConfigValue('numberOfBackups', 50));
|
||||||
|
|
||||||
let files = fs.readdirSync(directory).filter(f => f.startsWith(prefix));
|
let files = fs.readdirSync(directory).filter(f => f.startsWith(prefix));
|
||||||
@ -389,7 +393,7 @@ function removeOldBackups(directory, prefix) {
|
|||||||
* @param {'name' | 'date'} sortBy Sort images by name or date
|
* @param {'name' | 'date'} sortBy Sort images by name or date
|
||||||
* @returns {string[]} List of image file names
|
* @returns {string[]} List of image file names
|
||||||
*/
|
*/
|
||||||
function getImages(directoryPath, sortBy = 'name') {
|
export function getImages(directoryPath, sortBy = 'name') {
|
||||||
function getSortFunction() {
|
function getSortFunction() {
|
||||||
switch (sortBy) {
|
switch (sortBy) {
|
||||||
case 'name':
|
case 'name':
|
||||||
@ -415,7 +419,7 @@ function getImages(directoryPath, sortBy = 'name') {
|
|||||||
* @param {import('node-fetch').Response} from The Fetch API response to pipe from.
|
* @param {import('node-fetch').Response} from The Fetch API response to pipe from.
|
||||||
* @param {import('express').Response} to The Express response to pipe to.
|
* @param {import('express').Response} to The Express response to pipe to.
|
||||||
*/
|
*/
|
||||||
function forwardFetchResponse(from, to) {
|
export function forwardFetchResponse(from, to) {
|
||||||
let statusCode = from.status;
|
let statusCode = from.status;
|
||||||
let statusText = from.statusText;
|
let statusText = from.statusText;
|
||||||
|
|
||||||
@ -434,6 +438,7 @@ function forwardFetchResponse(from, to) {
|
|||||||
|
|
||||||
to.statusCode = statusCode;
|
to.statusCode = statusCode;
|
||||||
to.statusMessage = statusText;
|
to.statusMessage = statusText;
|
||||||
|
|
||||||
from.body.pipe(to);
|
from.body.pipe(to);
|
||||||
|
|
||||||
to.socket.on('close', function () {
|
to.socket.on('close', function () {
|
||||||
@ -457,10 +462,9 @@ function forwardFetchResponse(from, to) {
|
|||||||
* @param {object} headers Request headers
|
* @param {object} headers Request headers
|
||||||
* @returns {Promise<string>} Response body
|
* @returns {Promise<string>} Response body
|
||||||
*/
|
*/
|
||||||
function makeHttp2Request(endpoint, method, body, headers) {
|
export function makeHttp2Request(endpoint, method, body, headers) {
|
||||||
return new Promise((resolve, reject) => {
|
return new Promise((resolve, reject) => {
|
||||||
try {
|
try {
|
||||||
const http2 = require('http2');
|
|
||||||
const url = new URL(endpoint);
|
const url = new URL(endpoint);
|
||||||
const client = http2.connect(url.origin);
|
const client = http2.connect(url.origin);
|
||||||
|
|
||||||
@ -511,7 +515,7 @@ function makeHttp2Request(endpoint, method, body, headers) {
|
|||||||
* @param {string} yamlString YAML-serialized object
|
* @param {string} yamlString YAML-serialized object
|
||||||
* @returns
|
* @returns
|
||||||
*/
|
*/
|
||||||
function mergeObjectWithYaml(obj, yamlString) {
|
export function mergeObjectWithYaml(obj, yamlString) {
|
||||||
if (!yamlString) {
|
if (!yamlString) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
@ -540,7 +544,7 @@ function mergeObjectWithYaml(obj, yamlString) {
|
|||||||
* @param {string} yamlString YAML-serialized array
|
* @param {string} yamlString YAML-serialized array
|
||||||
* @returns {void} Nothing
|
* @returns {void} Nothing
|
||||||
*/
|
*/
|
||||||
function excludeKeysByYaml(obj, yamlString) {
|
export function excludeKeysByYaml(obj, yamlString) {
|
||||||
if (!yamlString) {
|
if (!yamlString) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
@ -569,14 +573,14 @@ function excludeKeysByYaml(obj, yamlString) {
|
|||||||
* @param {string} str Input string
|
* @param {string} str Input string
|
||||||
* @returns {string} Trimmed string
|
* @returns {string} Trimmed string
|
||||||
*/
|
*/
|
||||||
function trimV1(str) {
|
export function trimV1(str) {
|
||||||
return String(str ?? '').replace(/\/$/, '').replace(/\/v1$/, '');
|
return String(str ?? '').replace(/\/$/, '').replace(/\/v1$/, '');
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Simple TTL memory cache.
|
* Simple TTL memory cache.
|
||||||
*/
|
*/
|
||||||
class Cache {
|
export class Cache {
|
||||||
/**
|
/**
|
||||||
* @param {number} ttl Time to live in milliseconds
|
* @param {number} ttl Time to live in milliseconds
|
||||||
*/
|
*/
|
||||||
@ -633,7 +637,7 @@ class Cache {
|
|||||||
* @param {string} text Text with color formatting
|
* @param {string} text Text with color formatting
|
||||||
* @returns {string} Text without color formatting
|
* @returns {string} Text without color formatting
|
||||||
*/
|
*/
|
||||||
function removeColorFormatting(text) {
|
export function removeColorFormatting(text) {
|
||||||
// ANSI escape codes for colors are usually in the format \x1b[<codes>m
|
// ANSI escape codes for colors are usually in the format \x1b[<codes>m
|
||||||
return text.replace(/\x1b\[\d{1,2}(;\d{1,2})*m/g, '');
|
return text.replace(/\x1b\[\d{1,2}(;\d{1,2})*m/g, '');
|
||||||
}
|
}
|
||||||
@ -643,7 +647,7 @@ function removeColorFormatting(text) {
|
|||||||
* @param {number} n Number of times to repeat the separator
|
* @param {number} n Number of times to repeat the separator
|
||||||
* @returns {string} Separator string
|
* @returns {string} Separator string
|
||||||
*/
|
*/
|
||||||
function getSeparator(n) {
|
export function getSeparator(n) {
|
||||||
return '='.repeat(n);
|
return '='.repeat(n);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -652,7 +656,7 @@ function getSeparator(n) {
|
|||||||
* @param {string} url String to check
|
* @param {string} url String to check
|
||||||
* @returns {boolean} If the URL is valid
|
* @returns {boolean} If the URL is valid
|
||||||
*/
|
*/
|
||||||
function isValidUrl(url) {
|
export function isValidUrl(url) {
|
||||||
try {
|
try {
|
||||||
new URL(url);
|
new URL(url);
|
||||||
return true;
|
return true;
|
||||||
@ -660,35 +664,3 @@ function isValidUrl(url) {
|
|||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
module.exports = {
|
|
||||||
getConfig,
|
|
||||||
getConfigValue,
|
|
||||||
setConfigValue,
|
|
||||||
getVersion,
|
|
||||||
getBasicAuthHeader,
|
|
||||||
extractFileFromZipBuffer,
|
|
||||||
getImageBuffers,
|
|
||||||
readAllChunks,
|
|
||||||
delay,
|
|
||||||
deepMerge,
|
|
||||||
color,
|
|
||||||
uuidv4,
|
|
||||||
humanizedISO8601DateTime,
|
|
||||||
tryParse,
|
|
||||||
clientRelativePath,
|
|
||||||
removeFileExtension,
|
|
||||||
generateTimestamp,
|
|
||||||
removeOldBackups,
|
|
||||||
getImages,
|
|
||||||
forwardFetchResponse,
|
|
||||||
getHexString,
|
|
||||||
mergeObjectWithYaml,
|
|
||||||
excludeKeysByYaml,
|
|
||||||
trimV1,
|
|
||||||
Cache,
|
|
||||||
makeHttp2Request,
|
|
||||||
removeColorFormatting,
|
|
||||||
getSeparator,
|
|
||||||
isValidUrl,
|
|
||||||
};
|
|
||||||
|
@ -5,7 +5,7 @@
|
|||||||
*
|
*
|
||||||
* @link https://github.com/malfoyslastname/character-card-spec-v2
|
* @link https://github.com/malfoyslastname/character-card-spec-v2
|
||||||
*/
|
*/
|
||||||
class TavernCardValidator {
|
export class TavernCardValidator {
|
||||||
/**
|
/**
|
||||||
* @type {string|null}
|
* @type {string|null}
|
||||||
*/
|
*/
|
||||||
@ -167,5 +167,3 @@ class TavernCardValidator {
|
|||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
module.exports = { TavernCardValidator };
|
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
const fetch = require('node-fetch').default;
|
import fetch from 'node-fetch';
|
||||||
const { SECRET_KEYS, readSecret } = require('../endpoints/secrets');
|
import { SECRET_KEYS, readSecret } from '../endpoints/secrets.js';
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Gets the vector for the given text batch from an OpenAI compatible endpoint.
|
* Gets the vector for the given text batch from an OpenAI compatible endpoint.
|
||||||
@ -9,7 +9,7 @@ const { SECRET_KEYS, readSecret } = require('../endpoints/secrets');
|
|||||||
* @param {string} model - The model to use for the embedding
|
* @param {string} model - The model to use for the embedding
|
||||||
* @returns {Promise<number[][]>} - The array of vectors for the texts
|
* @returns {Promise<number[][]>} - The array of vectors for the texts
|
||||||
*/
|
*/
|
||||||
async function getCohereBatchVector(texts, isQuery, directories, model) {
|
export async function getCohereBatchVector(texts, isQuery, directories, model) {
|
||||||
const key = readSecret(directories, SECRET_KEYS.COHERE);
|
const key = readSecret(directories, SECRET_KEYS.COHERE);
|
||||||
|
|
||||||
if (!key) {
|
if (!key) {
|
||||||
@ -55,12 +55,8 @@ async function getCohereBatchVector(texts, isQuery, directories, model) {
|
|||||||
* @param {string} model - The model to use for the embedding
|
* @param {string} model - The model to use for the embedding
|
||||||
* @returns {Promise<number[]>} - The vector for the text
|
* @returns {Promise<number[]>} - The vector for the text
|
||||||
*/
|
*/
|
||||||
async function getCohereVector(text, isQuery, directories, model) {
|
export async function getCohereVector(text, isQuery, directories, model) {
|
||||||
const vectors = await getCohereBatchVector([text], isQuery, directories, model);
|
const vectors = await getCohereBatchVector([text], isQuery, directories, model);
|
||||||
return vectors[0];
|
return vectors[0];
|
||||||
}
|
}
|
||||||
|
|
||||||
module.exports = {
|
|
||||||
getCohereBatchVector,
|
|
||||||
getCohereVector,
|
|
||||||
};
|
|
||||||
|
@ -1,3 +1,4 @@
|
|||||||
|
import { getPipeline } from '../transformers.mjs';
|
||||||
const TASK = 'feature-extraction';
|
const TASK = 'feature-extraction';
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@ -5,9 +6,8 @@ const TASK = 'feature-extraction';
|
|||||||
* @param {string} text - The text to vectorize
|
* @param {string} text - The text to vectorize
|
||||||
* @returns {Promise<number[]>} - The vectorized text in form of an array of numbers
|
* @returns {Promise<number[]>} - The vectorized text in form of an array of numbers
|
||||||
*/
|
*/
|
||||||
async function getTransformersVector(text) {
|
export async function getTransformersVector(text) {
|
||||||
const module = await import('../transformers.mjs');
|
const pipe = await getPipeline(TASK);
|
||||||
const pipe = await module.default.getPipeline(TASK);
|
|
||||||
const result = await pipe(text, { pooling: 'mean', normalize: true });
|
const result = await pipe(text, { pooling: 'mean', normalize: true });
|
||||||
const vector = Array.from(result.data);
|
const vector = Array.from(result.data);
|
||||||
return vector;
|
return vector;
|
||||||
@ -18,15 +18,10 @@ async function getTransformersVector(text) {
|
|||||||
* @param {string[]} texts - The texts to vectorize
|
* @param {string[]} texts - The texts to vectorize
|
||||||
* @returns {Promise<number[][]>} - The vectorized texts in form of an array of arrays of numbers
|
* @returns {Promise<number[][]>} - The vectorized texts in form of an array of arrays of numbers
|
||||||
*/
|
*/
|
||||||
async function getTransformersBatchVector(texts) {
|
export async function getTransformersBatchVector(texts) {
|
||||||
const result = [];
|
const result = [];
|
||||||
for (const text of texts) {
|
for (const text of texts) {
|
||||||
result.push(await getTransformersVector(text));
|
result.push(await getTransformersVector(text));
|
||||||
}
|
}
|
||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
|
|
||||||
module.exports = {
|
|
||||||
getTransformersVector,
|
|
||||||
getTransformersBatchVector,
|
|
||||||
};
|
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
const fetch = require('node-fetch').default;
|
import fetch from 'node-fetch';
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Gets the vector for the given text from SillyTavern-extras
|
* Gets the vector for the given text from SillyTavern-extras
|
||||||
@ -7,7 +7,7 @@ const fetch = require('node-fetch').default;
|
|||||||
* @param {string} apiKey - The Extras API key, or empty string if API key not enabled
|
* @param {string} apiKey - The Extras API key, or empty string if API key not enabled
|
||||||
* @returns {Promise<number[][]>} - The array of vectors for the texts
|
* @returns {Promise<number[][]>} - The array of vectors for the texts
|
||||||
*/
|
*/
|
||||||
async function getExtrasBatchVector(texts, apiUrl, apiKey) {
|
export async function getExtrasBatchVector(texts, apiUrl, apiKey) {
|
||||||
return getExtrasVectorImpl(texts, apiUrl, apiKey);
|
return getExtrasVectorImpl(texts, apiUrl, apiKey);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -18,7 +18,7 @@ async function getExtrasBatchVector(texts, apiUrl, apiKey) {
|
|||||||
* @param {string} apiKey - The Extras API key, or empty string if API key not enabled
|
* @param {string} apiKey - The Extras API key, or empty string if API key not enabled
|
||||||
* @returns {Promise<number[]>} - The vector for the text
|
* @returns {Promise<number[]>} - The vector for the text
|
||||||
*/
|
*/
|
||||||
async function getExtrasVector(text, apiUrl, apiKey) {
|
export async function getExtrasVector(text, apiUrl, apiKey) {
|
||||||
return getExtrasVectorImpl(text, apiUrl, apiKey);
|
return getExtrasVectorImpl(text, apiUrl, apiKey);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -71,8 +71,3 @@ async function getExtrasVectorImpl(text, apiUrl, apiKey) {
|
|||||||
|
|
||||||
return vector;
|
return vector;
|
||||||
}
|
}
|
||||||
|
|
||||||
module.exports = {
|
|
||||||
getExtrasVector,
|
|
||||||
getExtrasBatchVector,
|
|
||||||
};
|
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
const fetch = require('node-fetch').default;
|
import fetch from 'node-fetch';
|
||||||
const { setAdditionalHeadersByType } = require('../additional-headers');
|
import { setAdditionalHeadersByType } from '../additional-headers.js';
|
||||||
const { TEXTGEN_TYPES } = require('../constants');
|
import { TEXTGEN_TYPES } from '../constants.js';
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Gets the vector for the given text from LlamaCpp
|
* Gets the vector for the given text from LlamaCpp
|
||||||
@ -9,7 +9,7 @@ const { TEXTGEN_TYPES } = require('../constants');
|
|||||||
* @param {import('../users').UserDirectoryList} directories - The directories object for the user
|
* @param {import('../users').UserDirectoryList} directories - The directories object for the user
|
||||||
* @returns {Promise<number[][]>} - The array of vectors for the texts
|
* @returns {Promise<number[][]>} - The array of vectors for the texts
|
||||||
*/
|
*/
|
||||||
async function getLlamaCppBatchVector(texts, apiUrl, directories) {
|
export async function getLlamaCppBatchVector(texts, apiUrl, directories) {
|
||||||
const url = new URL(apiUrl);
|
const url = new URL(apiUrl);
|
||||||
url.pathname = '/v1/embeddings';
|
url.pathname = '/v1/embeddings';
|
||||||
|
|
||||||
@ -50,12 +50,7 @@ async function getLlamaCppBatchVector(texts, apiUrl, directories) {
|
|||||||
* @param {import('../users').UserDirectoryList} directories - The directories object for the user
|
* @param {import('../users').UserDirectoryList} directories - The directories object for the user
|
||||||
* @returns {Promise<number[]>} - The vector for the text
|
* @returns {Promise<number[]>} - The vector for the text
|
||||||
*/
|
*/
|
||||||
async function getLlamaCppVector(text, apiUrl, directories) {
|
export async function getLlamaCppVector(text, apiUrl, directories) {
|
||||||
const vectors = await getLlamaCppBatchVector([text], apiUrl, directories);
|
const vectors = await getLlamaCppBatchVector([text], apiUrl, directories);
|
||||||
return vectors[0];
|
return vectors[0];
|
||||||
}
|
}
|
||||||
|
|
||||||
module.exports = {
|
|
||||||
getLlamaCppBatchVector,
|
|
||||||
getLlamaCppVector,
|
|
||||||
};
|
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
const fetch = require('node-fetch').default;
|
import fetch from 'node-fetch';
|
||||||
const { SECRET_KEYS, readSecret } = require('../endpoints/secrets');
|
import { SECRET_KEYS, readSecret } from '../endpoints/secrets.js';
|
||||||
const API_MAKERSUITE = 'https://generativelanguage.googleapis.com';
|
const API_MAKERSUITE = 'https://generativelanguage.googleapis.com';
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@ -8,7 +8,7 @@ const API_MAKERSUITE = 'https://generativelanguage.googleapis.com';
|
|||||||
* @param {import('../users').UserDirectoryList} directories - The directories object for the user
|
* @param {import('../users').UserDirectoryList} directories - The directories object for the user
|
||||||
* @returns {Promise<number[][]>} - The array of vectors for the texts
|
* @returns {Promise<number[][]>} - The array of vectors for the texts
|
||||||
*/
|
*/
|
||||||
async function getMakerSuiteBatchVector(texts, directories) {
|
export async function getMakerSuiteBatchVector(texts, directories) {
|
||||||
const promises = texts.map(text => getMakerSuiteVector(text, directories));
|
const promises = texts.map(text => getMakerSuiteVector(text, directories));
|
||||||
return await Promise.all(promises);
|
return await Promise.all(promises);
|
||||||
}
|
}
|
||||||
@ -19,7 +19,7 @@ async function getMakerSuiteBatchVector(texts, directories) {
|
|||||||
* @param {import('../users').UserDirectoryList} directories - The directories object for the user
|
* @param {import('../users').UserDirectoryList} directories - The directories object for the user
|
||||||
* @returns {Promise<number[]>} - The vector for the text
|
* @returns {Promise<number[]>} - The vector for the text
|
||||||
*/
|
*/
|
||||||
async function getMakerSuiteVector(text, directories) {
|
export async function getMakerSuiteVector(text, directories) {
|
||||||
const key = readSecret(directories, SECRET_KEYS.MAKERSUITE);
|
const key = readSecret(directories, SECRET_KEYS.MAKERSUITE);
|
||||||
|
|
||||||
if (!key) {
|
if (!key) {
|
||||||
@ -56,8 +56,3 @@ async function getMakerSuiteVector(text, directories) {
|
|||||||
// noinspection JSValidateTypes
|
// noinspection JSValidateTypes
|
||||||
return data['embedding']['values'];
|
return data['embedding']['values'];
|
||||||
}
|
}
|
||||||
|
|
||||||
module.exports = {
|
|
||||||
getMakerSuiteVector,
|
|
||||||
getMakerSuiteBatchVector,
|
|
||||||
};
|
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
const fetch = require('node-fetch').default;
|
import fetch from 'node-fetch';
|
||||||
const { SECRET_KEYS, readSecret } = require('../endpoints/secrets');
|
import { SECRET_KEYS, readSecret } from '../endpoints/secrets.js';
|
||||||
|
|
||||||
const SOURCES = {
|
const SOURCES = {
|
||||||
'nomicai': {
|
'nomicai': {
|
||||||
@ -16,7 +16,7 @@ const SOURCES = {
|
|||||||
* @param {import('../users').UserDirectoryList} directories - The directories object for the user
|
* @param {import('../users').UserDirectoryList} directories - The directories object for the user
|
||||||
* @returns {Promise<number[][]>} - The array of vectors for the texts
|
* @returns {Promise<number[][]>} - The array of vectors for the texts
|
||||||
*/
|
*/
|
||||||
async function getNomicAIBatchVector(texts, source, directories) {
|
export async function getNomicAIBatchVector(texts, source, directories) {
|
||||||
const config = SOURCES[source];
|
const config = SOURCES[source];
|
||||||
|
|
||||||
if (!config) {
|
if (!config) {
|
||||||
@ -67,12 +67,7 @@ async function getNomicAIBatchVector(texts, source, directories) {
|
|||||||
* @param {import('../users').UserDirectoryList} directories - The directories object for the user
|
* @param {import('../users').UserDirectoryList} directories - The directories object for the user
|
||||||
* @returns {Promise<number[]>} - The vector for the text
|
* @returns {Promise<number[]>} - The vector for the text
|
||||||
*/
|
*/
|
||||||
async function getNomicAIVector(text, source, directories) {
|
export async function getNomicAIVector(text, source, directories) {
|
||||||
const vectors = await getNomicAIBatchVector([text], source, directories);
|
const vectors = await getNomicAIBatchVector([text], source, directories);
|
||||||
return vectors[0];
|
return vectors[0];
|
||||||
}
|
}
|
||||||
|
|
||||||
module.exports = {
|
|
||||||
getNomicAIVector,
|
|
||||||
getNomicAIBatchVector,
|
|
||||||
};
|
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
const fetch = require('node-fetch').default;
|
import fetch from 'node-fetch';
|
||||||
const { setAdditionalHeadersByType } = require('../additional-headers');
|
import { setAdditionalHeadersByType } from '../additional-headers.js';
|
||||||
const { TEXTGEN_TYPES } = require('../constants');
|
import { TEXTGEN_TYPES } from '../constants.js';
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Gets the vector for the given text from Ollama
|
* Gets the vector for the given text from Ollama
|
||||||
@ -11,7 +11,7 @@ const { TEXTGEN_TYPES } = require('../constants');
|
|||||||
* @param {import('../users').UserDirectoryList} directories - The directories object for the user
|
* @param {import('../users').UserDirectoryList} directories - The directories object for the user
|
||||||
* @returns {Promise<number[][]>} - The array of vectors for the texts
|
* @returns {Promise<number[][]>} - The array of vectors for the texts
|
||||||
*/
|
*/
|
||||||
async function getOllamaBatchVector(texts, apiUrl, model, keep, directories) {
|
export async function getOllamaBatchVector(texts, apiUrl, model, keep, directories) {
|
||||||
const result = [];
|
const result = [];
|
||||||
for (const text of texts) {
|
for (const text of texts) {
|
||||||
const vector = await getOllamaVector(text, apiUrl, model, keep, directories);
|
const vector = await getOllamaVector(text, apiUrl, model, keep, directories);
|
||||||
@ -29,7 +29,7 @@ async function getOllamaBatchVector(texts, apiUrl, model, keep, directories) {
|
|||||||
* @param {import('../users').UserDirectoryList} directories - The directories object for the user
|
* @param {import('../users').UserDirectoryList} directories - The directories object for the user
|
||||||
* @returns {Promise<number[]>} - The vector for the text
|
* @returns {Promise<number[]>} - The vector for the text
|
||||||
*/
|
*/
|
||||||
async function getOllamaVector(text, apiUrl, model, keep, directories) {
|
export async function getOllamaVector(text, apiUrl, model, keep, directories) {
|
||||||
const url = new URL(apiUrl);
|
const url = new URL(apiUrl);
|
||||||
url.pathname = '/api/embeddings';
|
url.pathname = '/api/embeddings';
|
||||||
|
|
||||||
@ -62,8 +62,3 @@ async function getOllamaVector(text, apiUrl, model, keep, directories) {
|
|||||||
|
|
||||||
return data.embedding;
|
return data.embedding;
|
||||||
}
|
}
|
||||||
|
|
||||||
module.exports = {
|
|
||||||
getOllamaBatchVector,
|
|
||||||
getOllamaVector,
|
|
||||||
};
|
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
const fetch = require('node-fetch').default;
|
import fetch from 'node-fetch';
|
||||||
const { SECRET_KEYS, readSecret } = require('../endpoints/secrets');
|
import { SECRET_KEYS, readSecret } from '../endpoints/secrets.js';
|
||||||
|
|
||||||
const SOURCES = {
|
const SOURCES = {
|
||||||
'togetherai': {
|
'togetherai': {
|
||||||
@ -27,7 +27,7 @@ const SOURCES = {
|
|||||||
* @param {string} model - The model to use for the embedding
|
* @param {string} model - The model to use for the embedding
|
||||||
* @returns {Promise<number[][]>} - The array of vectors for the texts
|
* @returns {Promise<number[][]>} - The array of vectors for the texts
|
||||||
*/
|
*/
|
||||||
async function getOpenAIBatchVector(texts, source, directories, model = '') {
|
export async function getOpenAIBatchVector(texts, source, directories, model = '') {
|
||||||
const config = SOURCES[source];
|
const config = SOURCES[source];
|
||||||
|
|
||||||
if (!config) {
|
if (!config) {
|
||||||
@ -83,12 +83,7 @@ async function getOpenAIBatchVector(texts, source, directories, model = '') {
|
|||||||
* @param {string} model - The model to use for the embedding
|
* @param {string} model - The model to use for the embedding
|
||||||
* @returns {Promise<number[]>} - The vector for the text
|
* @returns {Promise<number[]>} - The vector for the text
|
||||||
*/
|
*/
|
||||||
async function getOpenAIVector(text, source, directories, model = '') {
|
export async function getOpenAIVector(text, source, directories, model = '') {
|
||||||
const vectors = await getOpenAIBatchVector([text], source, directories, model);
|
const vectors = await getOpenAIBatchVector([text], source, directories, model);
|
||||||
return vectors[0];
|
return vectors[0];
|
||||||
}
|
}
|
||||||
|
|
||||||
module.exports = {
|
|
||||||
getOpenAIVector,
|
|
||||||
getOpenAIBatchVector,
|
|
||||||
};
|
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
const fetch = require('node-fetch').default;
|
import fetch from 'node-fetch';
|
||||||
const { setAdditionalHeadersByType } = require('../additional-headers');
|
import { setAdditionalHeadersByType } from '../additional-headers.js';
|
||||||
const { TEXTGEN_TYPES } = require('../constants');
|
import { TEXTGEN_TYPES } from '../constants.js';
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Gets the vector for the given text from VLLM
|
* Gets the vector for the given text from VLLM
|
||||||
@ -10,7 +10,7 @@ const { TEXTGEN_TYPES } = require('../constants');
|
|||||||
* @param {import('../users').UserDirectoryList} directories - The directories object for the user
|
* @param {import('../users').UserDirectoryList} directories - The directories object for the user
|
||||||
* @returns {Promise<number[][]>} - The array of vectors for the texts
|
* @returns {Promise<number[][]>} - The array of vectors for the texts
|
||||||
*/
|
*/
|
||||||
async function getVllmBatchVector(texts, apiUrl, model, directories) {
|
export async function getVllmBatchVector(texts, apiUrl, model, directories) {
|
||||||
const url = new URL(apiUrl);
|
const url = new URL(apiUrl);
|
||||||
url.pathname = '/v1/embeddings';
|
url.pathname = '/v1/embeddings';
|
||||||
|
|
||||||
@ -52,12 +52,7 @@ async function getVllmBatchVector(texts, apiUrl, model, directories) {
|
|||||||
* @param {import('../users').UserDirectoryList} directories - The directories object for the user
|
* @param {import('../users').UserDirectoryList} directories - The directories object for the user
|
||||||
* @returns {Promise<number[]>} - The vector for the text
|
* @returns {Promise<number[]>} - The vector for the text
|
||||||
*/
|
*/
|
||||||
async function getVllmVector(text, apiUrl, model, directories) {
|
export async function getVllmVector(text, apiUrl, model, directories) {
|
||||||
const vectors = await getVllmBatchVector([text], apiUrl, model, directories);
|
const vectors = await getVllmBatchVector([text], apiUrl, model, directories);
|
||||||
return vectors[0];
|
return vectors[0];
|
||||||
}
|
}
|
||||||
|
|
||||||
module.exports = {
|
|
||||||
getVllmBatchVector,
|
|
||||||
getVllmVector,
|
|
||||||
};
|
|
||||||
|
Reference in New Issue
Block a user