Merge branch 'staging' into cc-async-count

This commit is contained in:
Cohee
2024-10-12 12:49:49 +03:00
75 changed files with 1569 additions and 1375 deletions

View File

@@ -16,6 +16,9 @@ module.exports = {
env: { env: {
node: true, node: true,
}, },
parserOptions: {
sourceType: 'module',
},
}, },
{ {
files: ['src/**/*.mjs'], files: ['src/**/*.mjs'],

View File

@@ -2,7 +2,7 @@
"compilerOptions": { "compilerOptions": {
"module": "ESNext", "module": "ESNext",
"target": "ESNext", "target": "ESNext",
"moduleResolution": "node", "moduleResolution": "Node",
"strictNullChecks": true, "strictNullChecks": true,
"strictFunctionTypes": true, "strictFunctionTypes": true,
"checkJs": true, "checkJs": true,

471
package-lock.json generated
View File

@@ -24,9 +24,9 @@
"csrf-csrf": "^2.2.3", "csrf-csrf": "^2.2.3",
"express": "^4.21.0", "express": "^4.21.0",
"form-data": "^4.0.0", "form-data": "^4.0.0",
"google-translate-api-browser": "^3.0.1", "google-translate-api-x": "^10.7.1",
"he": "^1.2.0",
"helmet": "^7.1.0", "helmet": "^7.1.0",
"html-entities": "^2.5.2",
"iconv-lite": "^0.6.3", "iconv-lite": "^0.6.3",
"ip-matching": "^2.1.2", "ip-matching": "^2.1.2",
"ipaddr.js": "^2.0.1", "ipaddr.js": "^2.0.1",
@@ -34,7 +34,7 @@
"lodash": "^4.17.21", "lodash": "^4.17.21",
"mime-types": "^2.1.35", "mime-types": "^2.1.35",
"multer": "^1.4.5-lts.1", "multer": "^1.4.5-lts.1",
"node-fetch": "^2.6.11", "node-fetch": "^3.3.2",
"node-persist": "^4.0.1", "node-persist": "^4.0.1",
"open": "^8.4.2", "open": "^8.4.2",
"png-chunk-text": "^1.0.0", "png-chunk-text": "^1.0.0",
@@ -59,9 +59,28 @@
"sillytavern": "server.js" "sillytavern": "server.js"
}, },
"devDependencies": { "devDependencies": {
"@types/archiver": "^6.0.2",
"@types/command-exists": "^1.2.3",
"@types/compression": "^1.7.5",
"@types/cookie-parser": "^1.4.7",
"@types/cookie-session": "^2.0.49",
"@types/cors": "^2.8.17",
"@types/dompurify": "^3.0.5", "@types/dompurify": "^3.0.5",
"@types/express": "^4.17.21",
"@types/jquery": "^3.5.29", "@types/jquery": "^3.5.29",
"@types/lodash": "^4.17.10",
"@types/mime-types": "^2.1.4",
"@types/multer": "^1.4.12",
"@types/node": "^18.19.55",
"@types/node-persist": "^3.1.8",
"@types/png-chunk-text": "^1.0.3",
"@types/png-chunks-encode": "^1.0.2",
"@types/png-chunks-extract": "^1.0.2",
"@types/response-time": "^2.3.8",
"@types/toastr": "^2.1.43", "@types/toastr": "^2.1.43",
"@types/write-file-atomic": "^4.0.3",
"@types/yargs": "^17.0.33",
"@types/yauzl": "^2.10.3",
"eslint": "^8.57.0" "eslint": "^8.57.0"
}, },
"engines": { "engines": {
@@ -949,6 +968,27 @@
"integrity": "sha512-C5Mc6rdnsaJDjO3UpGW/CQTHtCKaYlScZTly4JIu97Jxo/odCiH0ITnDXSJPTOrEKk/ycSZ0AOgTmkDtkOsvIA==", "integrity": "sha512-C5Mc6rdnsaJDjO3UpGW/CQTHtCKaYlScZTly4JIu97Jxo/odCiH0ITnDXSJPTOrEKk/ycSZ0AOgTmkDtkOsvIA==",
"license": "MIT" "license": "MIT"
}, },
"node_modules/@types/archiver": {
"version": "6.0.2",
"resolved": "https://registry.npmjs.org/@types/archiver/-/archiver-6.0.2.tgz",
"integrity": "sha512-KmROQqbQzKGuaAbmK+ZcytkJ51+YqDa7NmbXjmtC5YBLSyQYo21YaUnQ3HbaPFKL1ooo6RQ6OPYPIDyxfpDDXw==",
"dev": true,
"license": "MIT",
"dependencies": {
"@types/readdir-glob": "*"
}
},
"node_modules/@types/body-parser": {
"version": "1.19.5",
"resolved": "https://registry.npmjs.org/@types/body-parser/-/body-parser-1.19.5.tgz",
"integrity": "sha512-fB3Zu92ucau0iQ0JMCFQE7b/dv8Ot07NI3KaZIkIUNXq82k4eBAqUaneXfleGY9JWskeS9y+u0nXMyspcuQrCg==",
"dev": true,
"license": "MIT",
"dependencies": {
"@types/connect": "*",
"@types/node": "*"
}
},
"node_modules/@types/cacheable-request": { "node_modules/@types/cacheable-request": {
"version": "6.0.3", "version": "6.0.3",
"resolved": "https://registry.npmjs.org/@types/cacheable-request/-/cacheable-request-6.0.3.tgz", "resolved": "https://registry.npmjs.org/@types/cacheable-request/-/cacheable-request-6.0.3.tgz",
@@ -961,6 +1001,64 @@
"@types/responselike": "^1.0.0" "@types/responselike": "^1.0.0"
} }
}, },
"node_modules/@types/command-exists": {
"version": "1.2.3",
"resolved": "https://registry.npmjs.org/@types/command-exists/-/command-exists-1.2.3.tgz",
"integrity": "sha512-PpbaE2XWLaWYboXD6k70TcXO/OdOyyRFq5TVpmlUELNxdkkmXU9fkImNosmXU1DtsNrqdUgWd/nJQYXgwmtdXQ==",
"dev": true,
"license": "MIT"
},
"node_modules/@types/compression": {
"version": "1.7.5",
"resolved": "https://registry.npmjs.org/@types/compression/-/compression-1.7.5.tgz",
"integrity": "sha512-AAQvK5pxMpaT+nDvhHrsBhLSYG5yQdtkaJE1WYieSNY2mVFKAgmU4ks65rkZD5oqnGCFLyQpUr1CqI4DmUMyDg==",
"dev": true,
"license": "MIT",
"dependencies": {
"@types/express": "*"
}
},
"node_modules/@types/connect": {
"version": "3.4.38",
"resolved": "https://registry.npmjs.org/@types/connect/-/connect-3.4.38.tgz",
"integrity": "sha512-K6uROf1LD88uDQqJCktA4yzL1YYAK6NgfsI0v/mTgyPKWsX1CnJ0XPSDhViejru1GcRkLWb8RlzFYJRqGUbaug==",
"dev": true,
"license": "MIT",
"dependencies": {
"@types/node": "*"
}
},
"node_modules/@types/cookie-parser": {
"version": "1.4.7",
"resolved": "https://registry.npmjs.org/@types/cookie-parser/-/cookie-parser-1.4.7.tgz",
"integrity": "sha512-Fvuyi354Z+uayxzIGCwYTayFKocfV7TuDYZClCdIP9ckhvAu/ixDtCB6qx2TT0FKjPLf1f3P/J1rgf6lPs64mw==",
"dev": true,
"license": "MIT",
"dependencies": {
"@types/express": "*"
}
},
"node_modules/@types/cookie-session": {
"version": "2.0.49",
"resolved": "https://registry.npmjs.org/@types/cookie-session/-/cookie-session-2.0.49.tgz",
"integrity": "sha512-4E/bBjlqLhU5l4iGPR+NkVJH593hpNsT4dC3DJDr+ODm6Qpe13kZQVkezRIb+TYDXaBMemS3yLQ+0leba3jlkQ==",
"dev": true,
"license": "MIT",
"dependencies": {
"@types/express": "*",
"@types/keygrip": "*"
}
},
"node_modules/@types/cors": {
"version": "2.8.17",
"resolved": "https://registry.npmjs.org/@types/cors/-/cors-2.8.17.tgz",
"integrity": "sha512-8CGDvrBj1zgo2qE+oS3pOCyYNqCPryMWY2bGfwA0dcfopWGgxs+78df0Rs3rc9THP4JkOhLsAa+15VdpAqkcUA==",
"dev": true,
"license": "MIT",
"dependencies": {
"@types/node": "*"
}
},
"node_modules/@types/dompurify": { "node_modules/@types/dompurify": {
"version": "3.0.5", "version": "3.0.5",
"resolved": "https://registry.npmjs.org/@types/dompurify/-/dompurify-3.0.5.tgz", "resolved": "https://registry.npmjs.org/@types/dompurify/-/dompurify-3.0.5.tgz",
@@ -971,12 +1069,43 @@
"@types/trusted-types": "*" "@types/trusted-types": "*"
} }
}, },
"node_modules/@types/express": {
"version": "4.17.21",
"resolved": "https://registry.npmjs.org/@types/express/-/express-4.17.21.tgz",
"integrity": "sha512-ejlPM315qwLpaQlQDTjPdsUFSc6ZsP4AN6AlWnogPjQ7CVi7PYF3YVz+CY3jE2pwYf7E/7HlDAN0rV2GxTG0HQ==",
"dev": true,
"dependencies": {
"@types/body-parser": "*",
"@types/express-serve-static-core": "^4.17.33",
"@types/qs": "*",
"@types/serve-static": "*"
}
},
"node_modules/@types/express-serve-static-core": {
"version": "4.19.6",
"resolved": "https://registry.npmjs.org/@types/express-serve-static-core/-/express-serve-static-core-4.19.6.tgz",
"integrity": "sha512-N4LZ2xG7DatVqhCZzOGb1Yi5lMbXSZcmdLDe9EzSndPV2HpWYWzRbaerl2n27irrm94EPpprqa8KpskPT085+A==",
"dev": true,
"dependencies": {
"@types/node": "*",
"@types/qs": "*",
"@types/range-parser": "*",
"@types/send": "*"
}
},
"node_modules/@types/http-cache-semantics": { "node_modules/@types/http-cache-semantics": {
"version": "4.0.2", "version": "4.0.2",
"resolved": "https://registry.npmjs.org/@types/http-cache-semantics/-/http-cache-semantics-4.0.2.tgz", "resolved": "https://registry.npmjs.org/@types/http-cache-semantics/-/http-cache-semantics-4.0.2.tgz",
"integrity": "sha512-FD+nQWA2zJjh4L9+pFXqWOi0Hs1ryBCfI+985NjluQ1p8EYtoLvjLOKidXBtZ4/IcxDX4o8/E8qDS3540tNliw==", "integrity": "sha512-FD+nQWA2zJjh4L9+pFXqWOi0Hs1ryBCfI+985NjluQ1p8EYtoLvjLOKidXBtZ4/IcxDX4o8/E8qDS3540tNliw==",
"license": "MIT" "license": "MIT"
}, },
"node_modules/@types/http-errors": {
"version": "2.0.4",
"resolved": "https://registry.npmjs.org/@types/http-errors/-/http-errors-2.0.4.tgz",
"integrity": "sha512-D0CFMMtydbJAegzOyHjtiKPLlvnm3iTZyZRSZoLq2mRhDdmLfIWOCYPfQJ4cu2erKghU++QvjcUjp/5h7hESpA==",
"dev": true,
"license": "MIT"
},
"node_modules/@types/jquery": { "node_modules/@types/jquery": {
"version": "3.5.31", "version": "3.5.31",
"resolved": "https://registry.npmjs.org/@types/jquery/-/jquery-3.5.31.tgz", "resolved": "https://registry.npmjs.org/@types/jquery/-/jquery-3.5.31.tgz",
@@ -987,6 +1116,13 @@
"@types/sizzle": "*" "@types/sizzle": "*"
} }
}, },
"node_modules/@types/keygrip": {
"version": "1.0.6",
"resolved": "https://registry.npmjs.org/@types/keygrip/-/keygrip-1.0.6.tgz",
"integrity": "sha512-lZuNAY9xeJt7Bx4t4dx0rYCDqGPW8RXhQZK1td7d4H6E9zYbLoOtjBvfwdTKpsyxQI/2jv+armjX/RW+ZNpXOQ==",
"dev": true,
"license": "MIT"
},
"node_modules/@types/keyv": { "node_modules/@types/keyv": {
"version": "3.1.4", "version": "3.1.4",
"resolved": "https://registry.npmjs.org/@types/keyv/-/keyv-3.1.4.tgz", "resolved": "https://registry.npmjs.org/@types/keyv/-/keyv-3.1.4.tgz",
@@ -996,28 +1132,126 @@
"@types/node": "*" "@types/node": "*"
} }
}, },
"node_modules/@types/lodash": {
"version": "4.17.10",
"resolved": "https://registry.npmjs.org/@types/lodash/-/lodash-4.17.10.tgz",
"integrity": "sha512-YpS0zzoduEhuOWjAotS6A5AVCva7X4lVlYLF0FYHAY9sdraBfnatttHItlWeZdGhuEkf+OzMNg2ZYAx8t+52uQ==",
"dev": true,
"license": "MIT"
},
"node_modules/@types/long": { "node_modules/@types/long": {
"version": "4.0.2", "version": "4.0.2",
"resolved": "https://registry.npmjs.org/@types/long/-/long-4.0.2.tgz", "resolved": "https://registry.npmjs.org/@types/long/-/long-4.0.2.tgz",
"integrity": "sha512-MqTGEo5bj5t157U6fA/BiDynNkn0YknVdh48CMPkTSpFTVmvao5UQmm7uEF6xBEo7qIMAlY/JSleYaE6VOdpaA==", "integrity": "sha512-MqTGEo5bj5t157U6fA/BiDynNkn0YknVdh48CMPkTSpFTVmvao5UQmm7uEF6xBEo7qIMAlY/JSleYaE6VOdpaA==",
"license": "MIT" "license": "MIT"
}, },
"node_modules/@types/node": { "node_modules/@types/mime": {
"version": "16.9.1", "version": "1.3.5",
"resolved": "https://registry.npmjs.org/@types/node/-/node-16.9.1.tgz", "resolved": "https://registry.npmjs.org/@types/mime/-/mime-1.3.5.tgz",
"integrity": "sha512-QpLcX9ZSsq3YYUUnD3nFDY8H7wctAhQj/TFKL8Ya8v5fMm3CFXxo8zStsLAl780ltoYoo1WvKUVGBQK+1ifr7g==", "integrity": "sha512-/pyBZWSLD2n0dcHE3hq8s8ZvcETHtEuF+3E7XVt0Ig2nvsVQXdghHVcEkIWjy9A0wKfTn97a/PSDYohKIlnP/w==",
"dev": true,
"license": "MIT" "license": "MIT"
}, },
"node_modules/@types/mime-types": {
"version": "2.1.4",
"resolved": "https://registry.npmjs.org/@types/mime-types/-/mime-types-2.1.4.tgz",
"integrity": "sha512-lfU4b34HOri+kAY5UheuFMWPDOI+OPceBSHZKp69gEyTL/mmJ4cnU6Y/rlme3UL3GyOn6Y42hyIEw0/q8sWx5w==",
"dev": true,
"license": "MIT"
},
"node_modules/@types/multer": {
"version": "1.4.12",
"resolved": "https://registry.npmjs.org/@types/multer/-/multer-1.4.12.tgz",
"integrity": "sha512-pQ2hoqvXiJt2FP9WQVLPRO+AmiIm/ZYkavPlIQnx282u4ZrVdztx0pkh3jjpQt0Kz+YI0YhSG264y08UJKoUQg==",
"dev": true,
"license": "MIT",
"dependencies": {
"@types/express": "*"
}
},
"node_modules/@types/node": {
"version": "18.19.55",
"resolved": "https://registry.npmjs.org/@types/node/-/node-18.19.55.tgz",
"integrity": "sha512-zzw5Vw52205Zr/nmErSEkN5FLqXPuKX/k5d1D7RKHATGqU7y6YfX9QxZraUzUrFGqH6XzOzG196BC35ltJC4Cw==",
"license": "MIT",
"dependencies": {
"undici-types": "~5.26.4"
}
},
"node_modules/@types/node-fetch": { "node_modules/@types/node-fetch": {
"version": "2.6.9", "version": "2.6.11",
"resolved": "https://registry.npmjs.org/@types/node-fetch/-/node-fetch-2.6.9.tgz", "resolved": "https://registry.npmjs.org/@types/node-fetch/-/node-fetch-2.6.11.tgz",
"integrity": "sha512-bQVlnMLFJ2d35DkPNjEPmd9ueO/rh5EiaZt2bhqiSarPjZIuIV6bPQVqcrEyvNo+AfTrRGVazle1tl597w3gfA==", "integrity": "sha512-24xFj9R5+rfQJLRyM56qh+wnVSYhyXC2tkoBndtY0U+vubqNsYXGjufB2nn8Q6gt0LrARwL6UBtMCSVCwl4B1g==",
"license": "MIT", "license": "MIT",
"dependencies": { "dependencies": {
"@types/node": "*", "@types/node": "*",
"form-data": "^4.0.0" "form-data": "^4.0.0"
} }
}, },
"node_modules/@types/node-persist": {
"version": "3.1.8",
"resolved": "https://registry.npmjs.org/@types/node-persist/-/node-persist-3.1.8.tgz",
"integrity": "sha512-QLidg6/SadZYPrTKxtxL1A85XBoQlG40bhoMdhu6DH6+eNCMr2j+RGfFZ9I9+IY8W/PDwQonJ+iBWD62jZjMfg==",
"dev": true,
"license": "MIT",
"dependencies": {
"@types/node": "*"
}
},
"node_modules/@types/png-chunk-text": {
"version": "1.0.3",
"resolved": "https://registry.npmjs.org/@types/png-chunk-text/-/png-chunk-text-1.0.3.tgz",
"integrity": "sha512-7keEFz73uNJ9Ar1XMCNnHEXT9pICJnouMQCCYgBEmHMgdkXaQzSTmSvr6tUDSqgdEgmlRAxZd97wprgliyZoCg==",
"dev": true,
"license": "MIT"
},
"node_modules/@types/png-chunks-encode": {
"version": "1.0.2",
"resolved": "https://registry.npmjs.org/@types/png-chunks-encode/-/png-chunks-encode-1.0.2.tgz",
"integrity": "sha512-Dxn0aXEcSg1wVeHjvNlygm/+fKBDzWMCdxJYhjGUTeefFW/jYxWcrg+W7ppLBfH44iJMqeVBHtHBwtYQUeYvgw==",
"dev": true,
"license": "MIT"
},
"node_modules/@types/png-chunks-extract": {
"version": "1.0.2",
"resolved": "https://registry.npmjs.org/@types/png-chunks-extract/-/png-chunks-extract-1.0.2.tgz",
"integrity": "sha512-z6djfFIbrrddtunoMJBOPlyZrnmeuG1kkvHUNi2QfpOb+JMMLuLliHHTmMyRi7k7LiTAut0HbdGCF6ibDtQAHQ==",
"dev": true,
"license": "MIT"
},
"node_modules/@types/qs": {
"version": "6.9.16",
"resolved": "https://registry.npmjs.org/@types/qs/-/qs-6.9.16.tgz",
"integrity": "sha512-7i+zxXdPD0T4cKDuxCUXJ4wHcsJLwENa6Z3dCu8cfCK743OGy5Nu1RmAGqDPsoTDINVEcdXKRvR/zre+P2Ku1A==",
"dev": true
},
"node_modules/@types/range-parser": {
"version": "1.2.7",
"resolved": "https://registry.npmjs.org/@types/range-parser/-/range-parser-1.2.7.tgz",
"integrity": "sha512-hKormJbkJqzQGhziax5PItDUTMAM9uE2XXQmM37dyd4hVM+5aVl7oVxMVUiVQn2oCQFN/LKCZdvSM0pFRqbSmQ==",
"dev": true
},
"node_modules/@types/readdir-glob": {
"version": "1.1.5",
"resolved": "https://registry.npmjs.org/@types/readdir-glob/-/readdir-glob-1.1.5.tgz",
"integrity": "sha512-raiuEPUYqXu+nvtY2Pe8s8FEmZ3x5yAH4VkLdihcPdalvsHltomrRC9BzuStrJ9yk06470hS0Crw0f1pXqD+Hg==",
"dev": true,
"license": "MIT",
"dependencies": {
"@types/node": "*"
}
},
"node_modules/@types/response-time": {
"version": "2.3.8",
"resolved": "https://registry.npmjs.org/@types/response-time/-/response-time-2.3.8.tgz",
"integrity": "sha512-7qGaNYvdxc0zRab8oHpYx7AW17qj+G0xuag1eCrw3M2VWPJQ/HyKaaghWygiaOUl0y9x7QGQwppDpqLJ5V9pzw==",
"dev": true,
"license": "MIT",
"dependencies": {
"@types/express": "*",
"@types/node": "*"
}
},
"node_modules/@types/responselike": { "node_modules/@types/responselike": {
"version": "1.0.1", "version": "1.0.1",
"resolved": "https://registry.npmjs.org/@types/responselike/-/responselike-1.0.1.tgz", "resolved": "https://registry.npmjs.org/@types/responselike/-/responselike-1.0.1.tgz",
@@ -1027,6 +1261,29 @@
"@types/node": "*" "@types/node": "*"
} }
}, },
"node_modules/@types/send": {
"version": "0.17.4",
"resolved": "https://registry.npmjs.org/@types/send/-/send-0.17.4.tgz",
"integrity": "sha512-x2EM6TJOybec7c52BX0ZspPodMsQUd5L6PRwOunVyVUhXiBSKf3AezDL8Dgvgt5o0UfKNfuA0eMLr2wLT4AiBA==",
"dev": true,
"license": "MIT",
"dependencies": {
"@types/mime": "^1",
"@types/node": "*"
}
},
"node_modules/@types/serve-static": {
"version": "1.15.7",
"resolved": "https://registry.npmjs.org/@types/serve-static/-/serve-static-1.15.7.tgz",
"integrity": "sha512-W8Ym+h8nhuRwaKPaDw34QUkwsGi6Rc4yYqvKFo5rm2FUEhCFbzVWrxXUxuKK8TASjWsysJY0nsmNCGhCOIsrOw==",
"dev": true,
"license": "MIT",
"dependencies": {
"@types/http-errors": "*",
"@types/node": "*",
"@types/send": "*"
}
},
"node_modules/@types/sizzle": { "node_modules/@types/sizzle": {
"version": "2.3.8", "version": "2.3.8",
"resolved": "https://registry.npmjs.org/@types/sizzle/-/sizzle-2.3.8.tgz", "resolved": "https://registry.npmjs.org/@types/sizzle/-/sizzle-2.3.8.tgz",
@@ -1050,6 +1307,43 @@
"dev": true, "dev": true,
"license": "MIT" "license": "MIT"
}, },
"node_modules/@types/write-file-atomic": {
"version": "4.0.3",
"resolved": "https://registry.npmjs.org/@types/write-file-atomic/-/write-file-atomic-4.0.3.tgz",
"integrity": "sha512-qdo+vZRchyJIHNeuI1nrpsLw+hnkgqP/8mlaN6Wle/NKhydHmUN9l4p3ZE8yP90AJNJW4uB8HQhedb4f1vNayQ==",
"dev": true,
"license": "MIT",
"dependencies": {
"@types/node": "*"
}
},
"node_modules/@types/yargs": {
"version": "17.0.33",
"resolved": "https://registry.npmjs.org/@types/yargs/-/yargs-17.0.33.tgz",
"integrity": "sha512-WpxBCKWPLr4xSsHgz511rFJAM+wS28w2zEO1QDNY5zM/S8ok70NNfztH0xwhqKyaK0OHCbN98LDAZuy1ctxDkA==",
"dev": true,
"license": "MIT",
"dependencies": {
"@types/yargs-parser": "*"
}
},
"node_modules/@types/yargs-parser": {
"version": "21.0.3",
"resolved": "https://registry.npmjs.org/@types/yargs-parser/-/yargs-parser-21.0.3.tgz",
"integrity": "sha512-I4q9QU9MQv4oEOz4tAHJtNz1cwuLxn2F3xcc2iV5WdqLPpUnj30aUuxt1mAxYTG+oe8CZMV/+6rU4S4gRDzqtQ==",
"dev": true,
"license": "MIT"
},
"node_modules/@types/yauzl": {
"version": "2.10.3",
"resolved": "https://registry.npmjs.org/@types/yauzl/-/yauzl-2.10.3.tgz",
"integrity": "sha512-oJoftv0LSuaDZE3Le4DbKX+KS9G36NzOeSap90UIK0yMA/NhKJhqlSGtNDORNRaIbQfzjXDrQa0ytJ6mNRGz/Q==",
"dev": true,
"license": "MIT",
"dependencies": {
"@types/node": "*"
}
},
"node_modules/@ungap/structured-clone": { "node_modules/@ungap/structured-clone": {
"version": "1.2.0", "version": "1.2.0",
"resolved": "https://registry.npmjs.org/@ungap/structured-clone/-/structured-clone-1.2.0.tgz", "resolved": "https://registry.npmjs.org/@ungap/structured-clone/-/structured-clone-1.2.0.tgz",
@@ -3009,6 +3303,29 @@
"pend": "~1.2.0" "pend": "~1.2.0"
} }
}, },
"node_modules/fetch-blob": {
"version": "3.2.0",
"resolved": "https://registry.npmjs.org/fetch-blob/-/fetch-blob-3.2.0.tgz",
"integrity": "sha512-7yAQpD2UMJzLi1Dqv7qFYnPbaPx7ZfFK6PiIxQ4PfkGPyNyl2Ugx+a/umUonmKqjhM4DnfbMvdX6otXq83soQQ==",
"funding": [
{
"type": "github",
"url": "https://github.com/sponsors/jimmywarting"
},
{
"type": "paypal",
"url": "https://paypal.me/jimmywarting"
}
],
"license": "MIT",
"dependencies": {
"node-domexception": "^1.0.0",
"web-streams-polyfill": "^3.0.3"
},
"engines": {
"node": "^12.20 || >= 14.13"
}
},
"node_modules/file-entry-cache": { "node_modules/file-entry-cache": {
"version": "6.0.1", "version": "6.0.1",
"resolved": "https://registry.npmjs.org/file-entry-cache/-/file-entry-cache-6.0.1.tgz", "resolved": "https://registry.npmjs.org/file-entry-cache/-/file-entry-cache-6.0.1.tgz",
@@ -3178,6 +3495,18 @@
"node": ">= 14" "node": ">= 14"
} }
}, },
"node_modules/formdata-polyfill": {
"version": "4.0.10",
"resolved": "https://registry.npmjs.org/formdata-polyfill/-/formdata-polyfill-4.0.10.tgz",
"integrity": "sha512-buewHzMvYL29jdeQTVILecSaZKnt/RJWjoZCF5OW60Z67/GmSLBkOFM7qh1PI3zFNtJbaZL5eQu1vLfazOwj4g==",
"license": "MIT",
"dependencies": {
"fetch-blob": "^3.1.2"
},
"engines": {
"node": ">=12.20.0"
}
},
"node_modules/forwarded": { "node_modules/forwarded": {
"version": "0.2.0", "version": "0.2.0",
"resolved": "https://registry.npmjs.org/forwarded/-/forwarded-0.2.0.tgz", "resolved": "https://registry.npmjs.org/forwarded/-/forwarded-0.2.0.tgz",
@@ -3364,11 +3693,18 @@
"url": "https://github.com/sponsors/sindresorhus" "url": "https://github.com/sponsors/sindresorhus"
} }
}, },
"node_modules/google-translate-api-browser": { "node_modules/google-translate-api-x": {
"version": "3.0.1", "version": "10.7.1",
"resolved": "https://registry.npmjs.org/google-translate-api-browser/-/google-translate-api-browser-3.0.1.tgz", "resolved": "https://registry.npmjs.org/google-translate-api-x/-/google-translate-api-x-10.7.1.tgz",
"integrity": "sha512-KTLodkyGBWMK9IW6QIeJ2zCuju4Z0CLpbkADKo+yLhbSTD4l+CXXpQ/xaynGVAzeBezzJG6qn8MLeqOq3SmW0A==", "integrity": "sha512-OdZDS6jRWzn1woOk62aOKQ5OyVaJSA+eyc6CktOWxo36IWfstOjwG/dkvnGl3Z2Sbpmk1A+jc2WwrBiRjqaY2A==",
"license": "MIT" "license": "MIT",
"engines": {
"node": ">=14.0.0"
},
"funding": {
"type": "github",
"url": "https://github.com/sponsors/AidanWelch"
}
}, },
"node_modules/gopd": { "node_modules/gopd": {
"version": "1.0.1", "version": "1.0.1",
@@ -3489,14 +3825,6 @@
"node": ">= 0.4" "node": ">= 0.4"
} }
}, },
"node_modules/he": {
"version": "1.2.0",
"resolved": "https://registry.npmjs.org/he/-/he-1.2.0.tgz",
"integrity": "sha512-F/1DnUGPopORZi0ni+CvrCgHQ5FyEAHRLSApuYWMmrbSwoN2Mn/7k+Gl38gJnR7yyDZk6WLXwiGod1JOWNDKGw==",
"bin": {
"he": "bin/he"
}
},
"node_modules/helmet": { "node_modules/helmet": {
"version": "7.1.0", "version": "7.1.0",
"resolved": "https://registry.npmjs.org/helmet/-/helmet-7.1.0.tgz", "resolved": "https://registry.npmjs.org/helmet/-/helmet-7.1.0.tgz",
@@ -3505,6 +3833,22 @@
"node": ">=16.0.0" "node": ">=16.0.0"
} }
}, },
"node_modules/html-entities": {
"version": "2.5.2",
"resolved": "https://registry.npmjs.org/html-entities/-/html-entities-2.5.2.tgz",
"integrity": "sha512-K//PSRMQk4FZ78Kyau+mZurHn3FH0Vwr+H36eE0rPbeYkRRi9YxceYPhuN60UwWorxyKHhqoAJl2OFKa4BVtaA==",
"funding": [
{
"type": "github",
"url": "https://github.com/sponsors/mdevils"
},
{
"type": "patreon",
"url": "https://patreon.com/mdevils"
}
],
"license": "MIT"
},
"node_modules/htmlparser2": { "node_modules/htmlparser2": {
"version": "8.0.2", "version": "8.0.2",
"resolved": "https://registry.npmjs.org/htmlparser2/-/htmlparser2-8.0.2.tgz", "resolved": "https://registry.npmjs.org/htmlparser2/-/htmlparser2-8.0.2.tgz",
@@ -3691,6 +4035,12 @@
"@types/node": "16.9.1" "@types/node": "16.9.1"
} }
}, },
"node_modules/image-q/node_modules/@types/node": {
"version": "16.9.1",
"resolved": "https://registry.npmjs.org/@types/node/-/node-16.9.1.tgz",
"integrity": "sha512-QpLcX9ZSsq3YYUUnD3nFDY8H7wctAhQj/TFKL8Ya8v5fMm3CFXxo8zStsLAl780ltoYoo1WvKUVGBQK+1ifr7g==",
"license": "MIT"
},
"node_modules/import-fresh": { "node_modules/import-fresh": {
"version": "3.3.0", "version": "3.3.0",
"resolved": "https://registry.npmjs.org/import-fresh/-/import-fresh-3.3.0.tgz", "resolved": "https://registry.npmjs.org/import-fresh/-/import-fresh-3.3.0.tgz",
@@ -3876,6 +4226,26 @@
"whatwg-fetch": "^3.4.1" "whatwg-fetch": "^3.4.1"
} }
}, },
"node_modules/isomorphic-fetch/node_modules/node-fetch": {
"version": "2.7.0",
"resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.7.0.tgz",
"integrity": "sha512-c4FRfUm/dbcWZ7U+1Wq0AwCyFL+3nt2bEw05wfxSz+DWpWsitgmSgYmy2dQdWyKC1694ELPqMs/YzUSNozLt8A==",
"license": "MIT",
"dependencies": {
"whatwg-url": "^5.0.0"
},
"engines": {
"node": "4.x || >=6.0.0"
},
"peerDependencies": {
"encoding": "^0.1.0"
},
"peerDependenciesMeta": {
"encoding": {
"optional": true
}
}
},
"node_modules/jackspeak": { "node_modules/jackspeak": {
"version": "2.3.6", "version": "2.3.6",
"resolved": "https://registry.npmjs.org/jackspeak/-/jackspeak-2.3.6.tgz", "resolved": "https://registry.npmjs.org/jackspeak/-/jackspeak-2.3.6.tgz",
@@ -4355,23 +4725,30 @@
} }
}, },
"node_modules/node-fetch": { "node_modules/node-fetch": {
"version": "2.6.12", "version": "3.3.2",
"resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.6.12.tgz", "resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-3.3.2.tgz",
"integrity": "sha512-C/fGU2E8ToujUivIO0H+tpQ6HWo4eEmchoPIoXtxCrVghxdKq+QOHqEZW7tuP3KlV3bC8FRMO5nMCC7Zm1VP6g==", "integrity": "sha512-dRB78srN/l6gqWulah9SrxeYnxeddIG30+GOqK/9OlLVyLg3HPnr6SqOWTWOXKRwC2eGYCkZ59NNuSgvSrpgOA==",
"license": "MIT", "license": "MIT",
"dependencies": { "dependencies": {
"whatwg-url": "^5.0.0" "data-uri-to-buffer": "^4.0.0",
"fetch-blob": "^3.1.4",
"formdata-polyfill": "^4.0.10"
}, },
"engines": { "engines": {
"node": "4.x || >=6.0.0" "node": "^12.20.0 || ^14.13.1 || >=16.0.0"
}, },
"peerDependencies": { "funding": {
"encoding": "^0.1.0" "type": "opencollective",
}, "url": "https://opencollective.com/node-fetch"
"peerDependenciesMeta": { }
"encoding": { },
"optional": true "node_modules/node-fetch/node_modules/data-uri-to-buffer": {
} "version": "4.0.1",
"resolved": "https://registry.npmjs.org/data-uri-to-buffer/-/data-uri-to-buffer-4.0.1.tgz",
"integrity": "sha512-0R9ikRb668HB7QDxT1vkpuUBtqc53YyAwMwGeUFKRojY/NWKvdZ+9UYtRfGmhqNbRkTSVpMbmyhXipFFv2cb/A==",
"license": "MIT",
"engines": {
"node": ">= 12"
} }
}, },
"node_modules/node-persist": { "node_modules/node-persist": {
@@ -4537,13 +4914,24 @@
"openai": "bin/cli" "openai": "bin/cli"
} }
}, },
"node_modules/openai/node_modules/@types/node": { "node_modules/openai/node_modules/node-fetch": {
"version": "18.18.9", "version": "2.7.0",
"resolved": "https://registry.npmjs.org/@types/node/-/node-18.18.9.tgz", "resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.7.0.tgz",
"integrity": "sha512-0f5klcuImLnG4Qreu9hPj/rEfFq6YRc5n2mAjSsH+ec/mJL+3voBH0+8T7o8RpFjH7ovc+TRsL/c7OYIQsPTfQ==", "integrity": "sha512-c4FRfUm/dbcWZ7U+1Wq0AwCyFL+3nt2bEw05wfxSz+DWpWsitgmSgYmy2dQdWyKC1694ELPqMs/YzUSNozLt8A==",
"license": "MIT", "license": "MIT",
"dependencies": { "dependencies": {
"undici-types": "~5.26.4" "whatwg-url": "^5.0.0"
},
"engines": {
"node": "4.x || >=6.0.0"
},
"peerDependencies": {
"encoding": "^0.1.0"
},
"peerDependenciesMeta": {
"encoding": {
"optional": true
}
} }
}, },
"node_modules/optionator": { "node_modules/optionator": {
@@ -5808,6 +6196,7 @@
"version": "5.0.0", "version": "5.0.0",
"resolved": "https://registry.npmjs.org/tr46/-/tr46-5.0.0.tgz", "resolved": "https://registry.npmjs.org/tr46/-/tr46-5.0.0.tgz",
"integrity": "sha512-tk2G5R2KRwBd+ZN0zaEXpmzdKyOYksXwywulIX95MBODjSzMIuQnQ3m8JxgbhnL1LeVo7lqQKsYa1O3Htl7K5g==", "integrity": "sha512-tk2G5R2KRwBd+ZN0zaEXpmzdKyOYksXwywulIX95MBODjSzMIuQnQ3m8JxgbhnL1LeVo7lqQKsYa1O3Htl7K5g==",
"license": "MIT",
"dependencies": { "dependencies": {
"punycode": "^2.3.1" "punycode": "^2.3.1"
}, },
@@ -6012,6 +6401,7 @@
"version": "7.0.0", "version": "7.0.0",
"resolved": "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-7.0.0.tgz", "resolved": "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-7.0.0.tgz",
"integrity": "sha512-VwddBukDzu71offAQR975unBIGqfKZpM+8ZX6ySk8nYhVoo5CYaZyzt3YBvYtRtO+aoGlqxPg/B87NGVZ/fu6g==", "integrity": "sha512-VwddBukDzu71offAQR975unBIGqfKZpM+8ZX6ySk8nYhVoo5CYaZyzt3YBvYtRtO+aoGlqxPg/B87NGVZ/fu6g==",
"license": "BSD-2-Clause",
"engines": { "engines": {
"node": ">=12" "node": ">=12"
} }
@@ -6026,6 +6416,7 @@
"version": "14.0.0", "version": "14.0.0",
"resolved": "https://registry.npmjs.org/whatwg-url/-/whatwg-url-14.0.0.tgz", "resolved": "https://registry.npmjs.org/whatwg-url/-/whatwg-url-14.0.0.tgz",
"integrity": "sha512-1lfMEm2IEr7RIV+f4lUNPOqfFL+pO+Xw3fJSqmjX9AbXcXcYOkCe1P6+9VBZB6n94af16NfZf+sSk0JCBZC9aw==", "integrity": "sha512-1lfMEm2IEr7RIV+f4lUNPOqfFL+pO+Xw3fJSqmjX9AbXcXcYOkCe1P6+9VBZB6n94af16NfZf+sSk0JCBZC9aw==",
"license": "MIT",
"dependencies": { "dependencies": {
"tr46": "^5.0.0", "tr46": "^5.0.0",
"webidl-conversions": "^7.0.0" "webidl-conversions": "^7.0.0"

View File

@@ -14,9 +14,9 @@
"csrf-csrf": "^2.2.3", "csrf-csrf": "^2.2.3",
"express": "^4.21.0", "express": "^4.21.0",
"form-data": "^4.0.0", "form-data": "^4.0.0",
"google-translate-api-browser": "^3.0.1", "google-translate-api-x": "^10.7.1",
"he": "^1.2.0",
"helmet": "^7.1.0", "helmet": "^7.1.0",
"html-entities": "^2.5.2",
"iconv-lite": "^0.6.3", "iconv-lite": "^0.6.3",
"ip-matching": "^2.1.2", "ip-matching": "^2.1.2",
"ipaddr.js": "^2.0.1", "ipaddr.js": "^2.0.1",
@@ -24,7 +24,7 @@
"lodash": "^4.17.21", "lodash": "^4.17.21",
"mime-types": "^2.1.35", "mime-types": "^2.1.35",
"multer": "^1.4.5-lts.1", "multer": "^1.4.5-lts.1",
"node-fetch": "^2.6.11", "node-fetch": "^3.3.2",
"node-persist": "^4.0.1", "node-persist": "^4.0.1",
"open": "^8.4.2", "open": "^8.4.2",
"png-chunk-text": "^1.0.0", "png-chunk-text": "^1.0.0",
@@ -60,7 +60,7 @@
} }
}, },
"name": "sillytavern", "name": "sillytavern",
"type": "commonjs", "type": "module",
"license": "AGPL-3.0", "license": "AGPL-3.0",
"repository": { "repository": {
"type": "git", "type": "git",
@@ -85,9 +85,28 @@
}, },
"main": "server.js", "main": "server.js",
"devDependencies": { "devDependencies": {
"@types/archiver": "^6.0.2",
"@types/command-exists": "^1.2.3",
"@types/compression": "^1.7.5",
"@types/cookie-parser": "^1.4.7",
"@types/cookie-session": "^2.0.49",
"@types/cors": "^2.8.17",
"@types/dompurify": "^3.0.5", "@types/dompurify": "^3.0.5",
"@types/express": "^4.17.21",
"@types/jquery": "^3.5.29", "@types/jquery": "^3.5.29",
"@types/lodash": "^4.17.10",
"@types/mime-types": "^2.1.4",
"@types/multer": "^1.4.12",
"@types/node": "^18.19.55",
"@types/node-persist": "^3.1.8",
"@types/png-chunk-text": "^1.0.3",
"@types/png-chunks-encode": "^1.0.2",
"@types/png-chunks-extract": "^1.0.2",
"@types/response-time": "^2.3.8",
"@types/toastr": "^2.1.43", "@types/toastr": "^2.1.43",
"@types/write-file-atomic": "^4.0.3",
"@types/yargs": "^17.0.33",
"@types/yauzl": "^2.10.3",
"eslint": "^8.57.0" "eslint": "^8.57.0"
} }
} }

View File

@@ -3,16 +3,28 @@
// 1. node plugins.js update // 1. node plugins.js update
// 2. node plugins.js install <plugin-git-url> // 2. node plugins.js install <plugin-git-url>
// More operations coming soon. // More operations coming soon.
const { default: git } = require('simple-git'); import fs from 'node:fs';
const fs = require('fs'); import path from 'node:path';
const path = require('path'); import process from 'node:process';
const { color } = require('./src/util'); import { fileURLToPath } from 'node:url';
import { default as git } from 'simple-git';
import { color } from './src/util.js';
const __dirname = import.meta.dirname ?? path.dirname(fileURLToPath(import.meta.url));
process.chdir(__dirname); process.chdir(__dirname);
const pluginsPath = './plugins'; const pluginsPath = './plugins';
const command = process.argv[2]; const command = process.argv[2];
if (!command) {
console.log('Usage: node plugins.js <command>');
console.log('Commands:');
console.log(' update - Update all installed plugins');
console.log(' install <plugin-git-url> - Install plugin from a Git URL');
process.exit(1);
}
if (command === 'update') { if (command === 'update') {
console.log(color.magenta('Updating all plugins')); console.log(color.magenta('Updating all plugins'));
updatePlugins(); updatePlugins();

4
plugins/package.json Normal file
View File

@@ -0,0 +1,4 @@
{
"name": "sillytavern-plugins",
"type": "commonjs"
}

View File

@@ -1,11 +1,13 @@
/** /**
* Scripts to be done before starting the server for the first time. * Scripts to be done before starting the server for the first time.
*/ */
const fs = require('fs'); import fs from 'node:fs';
const path = require('path'); import path from 'node:path';
const crypto = require('crypto'); import crypto from 'node:crypto';
const yaml = require('yaml'); import process from 'node:process';
const _ = require('lodash'); import yaml from 'yaml';
import _ from 'lodash';
import { createRequire } from 'node:module';
/** /**
* Colorizes console output. * Colorizes console output.
@@ -59,13 +61,15 @@ function convertConfig() {
try { try {
console.log(color.blue('Converting config.conf to config.yaml. Your old config.conf will be renamed to config.conf.bak')); console.log(color.blue('Converting config.conf to config.yaml. Your old config.conf will be renamed to config.conf.bak'));
const config = require(path.join(process.cwd(), './config.conf')); fs.renameSync('./config.conf', './config.conf.cjs'); // Force loading as CommonJS
fs.copyFileSync('./config.conf', './config.conf.bak'); const require = createRequire(import.meta.url);
fs.rmSync('./config.conf'); const config = require(path.join(process.cwd(), './config.conf.cjs'));
fs.copyFileSync('./config.conf.cjs', './config.conf.bak');
fs.rmSync('./config.conf.cjs');
fs.writeFileSync('./config.yaml', yaml.stringify(config)); fs.writeFileSync('./config.yaml', yaml.stringify(config));
console.log(color.green('Conversion successful. Please check your config.yaml and fix it if necessary.')); console.log(color.green('Conversion successful. Please check your config.yaml and fix it if necessary.'));
} catch (error) { } catch (error) {
console.error(color.red('FATAL: Config conversion failed. Please check your config.conf file and try again.')); console.error(color.red('FATAL: Config conversion failed. Please check your config.conf file and try again.'), error);
return; return;
} }
} }
@@ -75,7 +79,7 @@ function convertConfig() {
* Compares the current config.yaml with the default config.yaml and adds any missing values. * Compares the current config.yaml with the default config.yaml and adds any missing values.
*/ */
function addMissingConfigValues() { function addMissingConfigValues() {
try { try {
const defaultConfig = yaml.parse(fs.readFileSync(path.join(process.cwd(), './default/config.yaml'), 'utf8')); const defaultConfig = yaml.parse(fs.readFileSync(path.join(process.cwd(), './default/config.yaml'), 'utf8'));
let config = yaml.parse(fs.readFileSync(path.join(process.cwd(), './config.yaml'), 'utf8')); let config = yaml.parse(fs.readFileSync(path.join(process.cwd(), './config.yaml'), 'utf8'));
@@ -132,7 +136,7 @@ function createDefaultFiles() {
function getMd5Hash(data) { function getMd5Hash(data) {
return crypto return crypto
.createHash('md5') .createHash('md5')
.update(data) .update(new Uint8Array(data))
.digest('hex'); .digest('hex');
} }

View File

@@ -1,7 +1,13 @@
const yaml = require('yaml'); import fs from 'node:fs';
const fs = require('fs'); import process from 'node:process';
const storage = require('node-persist'); import yaml from 'yaml';
const users = require('./src/users'); import storage from 'node-persist';
import {
initUserStorage,
getPasswordSalt,
getPasswordHash,
toKey,
} from './src/users.js';
const userAccount = process.argv[2]; const userAccount = process.argv[2];
const userPassword = process.argv[3]; const userPassword = process.argv[3];
@@ -22,7 +28,7 @@ async function initStorage() {
process.exit(1); process.exit(1);
} }
await users.initUserStorage(dataRoot); await initUserStorage(dataRoot);
} }
async function main() { async function main() {
@@ -31,22 +37,22 @@ async function main() {
/** /**
* @type {import('./src/users').User} * @type {import('./src/users').User}
*/ */
const user = await storage.get(users.toKey(userAccount)); const user = await storage.get(toKey(userAccount));
if (!user) { if (!user) {
console.error(`User "${userAccount}" not found.`); console.error(`User "${userAccount}" not found.`);
process.exit(1); process.exit(1);
} }
if (!user.enabled) { if (!user.enabled) {
console.log('User is disabled. Enabling...'); console.log('User is disabled. Enabling...');
user.enabled = true; user.enabled = true;
} }
if (userPassword) { if (userPassword) {
console.log('Setting new password...'); console.log('Setting new password...');
const salt = users.getPasswordSalt(); const salt = getPasswordSalt();
const passwordHash = users.getPasswordHash(userPassword, salt); const passwordHash = getPasswordHash(userPassword, salt);
user.password = passwordHash; user.password = passwordHash;
user.salt = salt; user.salt = salt;
} else { } else {
@@ -55,7 +61,7 @@ async function main() {
user.salt = ''; user.salt = '';
} }
await storage.setItem(users.toKey(userAccount), user); await storage.setItem(toKey(userAccount), user);
console.log('User recovered. A program will exit now.'); console.log('User recovered. A program will exit now.');
} }

344
server.js
View File

@@ -1,32 +1,35 @@
#!/usr/bin/env node #!/usr/bin/env node
// native node modules // native node modules
const fs = require('fs'); import fs from 'node:fs';
const http = require('http'); import http from 'node:http';
const https = require('https'); import https from 'node:https';
const path = require('path'); import path from 'node:path';
const util = require('util'); import util from 'node:util';
import net from 'node:net';
import dns from 'node:dns';
import process from 'node:process';
import { fileURLToPath } from 'node:url';
// cli/fs related library imports // cli/fs related library imports
const open = require('open'); import open from 'open';
const yargs = require('yargs/yargs'); import yargs from 'yargs/yargs';
const { hideBin } = require('yargs/helpers'); import { hideBin } from 'yargs/helpers';
// express/server related library imports // express/server related library imports
const cors = require('cors'); import cors from 'cors';
const doubleCsrf = require('csrf-csrf').doubleCsrf; import { doubleCsrf } from 'csrf-csrf';
const express = require('express'); import express from 'express';
const compression = require('compression'); import compression from 'compression';
const cookieParser = require('cookie-parser'); import cookieParser from 'cookie-parser';
const cookieSession = require('cookie-session'); import cookieSession from 'cookie-session';
const multer = require('multer'); import multer from 'multer';
const responseTime = require('response-time'); import responseTime from 'response-time';
const helmet = require('helmet').default; import helmet from 'helmet';
import bodyParser from 'body-parser';
// net related library imports // net related library imports
const net = require('net'); import fetch from 'node-fetch';
const dns = require('dns');
const fetch = require('node-fetch').default;
// Unrestrict console logs display limit // Unrestrict console logs display limit
util.inspect.defaultOptions.maxArrayLength = null; util.inspect.defaultOptions.maxArrayLength = null;
@@ -34,20 +37,81 @@ util.inspect.defaultOptions.maxStringLength = null;
util.inspect.defaultOptions.depth = 4; util.inspect.defaultOptions.depth = 4;
// local library imports // local library imports
const userModule = require('./src/users'); import{ loadPlugins } from './src/plugin-loader.js';
const basicAuthMiddleware = require('./src/middleware/basicAuth'); import {
const whitelistMiddleware = require('./src/middleware/whitelist'); initUserStorage,
const initRequestProxy = require('./src/request-proxy'); getCsrfSecret,
const contentManager = require('./src/endpoints/content-manager'); getCookieSecret,
const { getCookieSessionName,
getAllEnabledUsers,
ensurePublicDirectoriesExist,
getUserDirectoriesList,
migrateSystemPrompts,
migrateUserData,
requireLoginMiddleware,
setUserDataMiddleware,
shouldRedirectToLogin,
tryAutoLogin,
router as userDataRouter,
} from './src/users.js';
import basicAuthMiddleware from './src/middleware/basicAuth.js';
import whitelistMiddleware from './src/middleware/whitelist.js';
import multerMonkeyPatch from './src/middleware/multerMonkeyPatch.js';
import initRequestProxy from './src/request-proxy.js';
import {
getVersion, getVersion,
getConfigValue, getConfigValue,
color, color,
forwardFetchResponse, forwardFetchResponse,
removeColorFormatting, removeColorFormatting,
getSeparator, getSeparator,
} = require('./src/util'); } from './src/util.js';
const { ensureThumbnailCache } = require('./src/endpoints/thumbnails'); import { UPLOADS_DIRECTORY } from './src/constants.js';
import { ensureThumbnailCache } from './src/endpoints/thumbnails.js';
// Routers
import { router as usersPublicRouter } from './src/endpoints/users-public.js';
import { router as usersPrivateRouter } from './src/endpoints/users-private.js';
import { router as usersAdminRouter } from './src/endpoints/users-admin.js';
import { router as movingUIRouter } from './src/endpoints/moving-ui.js';
import { router as imagesRouter } from './src/endpoints/images.js';
import { router as quickRepliesRouter } from './src/endpoints/quick-replies.js';
import { router as avatarsRouter } from './src/endpoints/avatars.js';
import { router as themesRouter } from './src/endpoints/themes.js';
import { router as openAiRouter } from './src/endpoints/openai.js';
import { router as googleRouter } from './src/endpoints/google.js';
import { router as anthropicRouter } from './src/endpoints/anthropic.js';
import { router as tokenizersRouter } from './src/endpoints/tokenizers.js';
import { router as presetsRouter } from './src/endpoints/presets.js';
import { router as secretsRouter } from './src/endpoints/secrets.js';
import { router as thumbnailRouter } from './src/endpoints/thumbnails.js';
import { router as novelAiRouter } from './src/endpoints/novelai.js';
import { router as extensionsRouter } from './src/endpoints/extensions.js';
import { router as assetsRouter } from './src/endpoints/assets.js';
import { router as filesRouter } from './src/endpoints/files.js';
import { router as charactersRouter } from './src/endpoints/characters.js';
import { router as chatsRouter } from './src/endpoints/chats.js';
import { router as groupsRouter } from './src/endpoints/groups.js';
import { router as worldInfoRouter } from './src/endpoints/worldinfo.js';
import { router as statsRouter, init as statsInit, onExit as statsOnExit } from './src/endpoints/stats.js';
import { router as backgroundsRouter } from './src/endpoints/backgrounds.js';
import { router as spritesRouter } from './src/endpoints/sprites.js';
import { router as contentManagerRouter, checkForNewContent } from './src/endpoints/content-manager.js';
import { router as settingsRouter, init as settingsInit } from './src/endpoints/settings.js';
import { router as stableDiffusionRouter } from './src/endpoints/stable-diffusion.js';
import { router as hordeRouter } from './src/endpoints/horde.js';
import { router as vectorsRouter } from './src/endpoints/vectors.js';
import { router as translateRouter } from './src/endpoints/translate.js';
import { router as classifyRouter } from './src/endpoints/classify.js';
import { router as captionRouter } from './src/endpoints/caption.js';
import { router as searchRouter } from './src/endpoints/search.js';
import { router as openRouterRouter } from './src/endpoints/openrouter.js';
import { router as chatCompletionsRouter } from './src/endpoints/backends/chat-completions.js';
import { router as koboldRouter } from './src/endpoints/backends/kobold.js';
import { router as textCompletionsRouter } from './src/endpoints/backends/text-completions.js';
import { router as scaleAltRouter } from './src/endpoints/backends/scale-alt.js';
import { router as speechRouter } from './src/endpoints/speech.js';
import { router as azureRouter } from './src/endpoints/azure.js';
// Work around a node v20.0.0, v20.1.0, and v20.2.0 bug. The issue was fixed in v20.3.0. // Work around a node v20.0.0, v20.1.0, and v20.2.0 bug. The issue was fixed in v20.3.0.
// https://github.com/nodejs/node/issues/47822#issuecomment-1564708870 // https://github.com/nodejs/node/issues/47822#issuecomment-1564708870
@@ -166,8 +230,8 @@ const cliArguments = yargs(hideBin(process.argv))
}).parseSync(); }).parseSync();
// change all relative paths // change all relative paths
console.log(`Node version: ${process.version}. Running in ${process.env.NODE_ENV} environment.`); const serverDirectory = import.meta.dirname ?? path.dirname(fileURLToPath(import.meta.url));
const serverDirectory = __dirname; console.log(`Node version: ${process.version}. Running in ${process.env.NODE_ENV} environment. Server directory: ${serverDirectory}`);
process.chdir(serverDirectory); process.chdir(serverDirectory);
const app = express(); const app = express();
@@ -188,7 +252,7 @@ const basicAuthMode = cliArguments.basicAuthMode ?? getConfigValue('basicAuthMod
const perUserBasicAuth = getConfigValue('perUserBasicAuth', DEFAULT_PER_USER_BASIC_AUTH); const perUserBasicAuth = getConfigValue('perUserBasicAuth', DEFAULT_PER_USER_BASIC_AUTH);
const enableAccounts = getConfigValue('enableUserAccounts', DEFAULT_ACCOUNTS); const enableAccounts = getConfigValue('enableUserAccounts', DEFAULT_ACCOUNTS);
const uploadsPath = path.join(dataRoot, require('./src/constants').UPLOADS_DIRECTORY); const uploadsPath = path.join(dataRoot, UPLOADS_DIRECTORY);
const enableIPv6 = cliArguments.enableIPv6 ?? getConfigValue('protocol.ipv6', DEFAULT_ENABLE_IPV6); const enableIPv6 = cliArguments.enableIPv6 ?? getConfigValue('protocol.ipv6', DEFAULT_ENABLE_IPV6);
const enableIPv4 = cliArguments.enableIPv4 ?? getConfigValue('protocol.ipv4', DEFAULT_ENABLE_IPV4); const enableIPv4 = cliArguments.enableIPv4 ?? getConfigValue('protocol.ipv4', DEFAULT_ENABLE_IPV4);
@@ -232,7 +296,6 @@ if (listen && basicAuthMode) app.use(basicAuthMiddleware);
app.use(whitelistMiddleware(enableWhitelist, listen)); app.use(whitelistMiddleware(enableWhitelist, listen));
if (enableCorsProxy) { if (enableCorsProxy) {
const bodyParser = require('body-parser');
app.use(bodyParser.json({ app.use(bodyParser.json({
limit: '200mb', limit: '200mb',
})); }));
@@ -301,24 +364,23 @@ function getSessionCookieAge() {
} }
app.use(cookieSession({ app.use(cookieSession({
name: userModule.getCookieSessionName(), name: getCookieSessionName(),
sameSite: 'strict', sameSite: 'strict',
httpOnly: true, httpOnly: true,
maxAge: getSessionCookieAge(), maxAge: getSessionCookieAge(),
secret: userModule.getCookieSecret(), secret: getCookieSecret(),
})); }));
app.use(userModule.setUserDataMiddleware); app.use(setUserDataMiddleware);
// CSRF Protection // // CSRF Protection //
if (!disableCsrf) { if (!disableCsrf) {
const COOKIES_SECRET = userModule.getCookieSecret(); const COOKIES_SECRET = getCookieSecret();
const { generateToken, doubleCsrfProtection } = doubleCsrf({ const { generateToken, doubleCsrfProtection } = doubleCsrf({
getSecret: userModule.getCsrfSecret, getSecret: getCsrfSecret,
cookieName: 'X-CSRF-Token', cookieName: 'X-CSRF-Token',
cookieOptions: { cookieOptions: {
httpOnly: true,
sameSite: 'strict', sameSite: 'strict',
secure: false, secure: false,
}, },
@@ -346,7 +408,7 @@ if (!disableCsrf) {
// Static files // Static files
// Host index page // Host index page
app.get('/', (request, response) => { app.get('/', (request, response) => {
if (userModule.shouldRedirectToLogin(request)) { if (shouldRedirectToLogin(request)) {
const query = request.url.split('?')[1]; const query = request.url.split('?')[1];
const redirectUrl = query ? `/login?${query}` : '/login'; const redirectUrl = query ? `/login?${query}` : '/login';
return response.redirect(redirectUrl); return response.redirect(redirectUrl);
@@ -363,7 +425,7 @@ app.get('/login', async (request, response) => {
} }
try { try {
const autoLogin = await userModule.tryAutoLogin(request, basicAuthMode); const autoLogin = await tryAutoLogin(request, basicAuthMode);
if (autoLogin) { if (autoLogin) {
return response.redirect('/'); return response.redirect('/');
@@ -379,22 +441,22 @@ app.get('/login', async (request, response) => {
app.use(express.static(process.cwd() + '/public', {})); app.use(express.static(process.cwd() + '/public', {}));
// Public API // Public API
app.use('/api/users', require('./src/endpoints/users-public').router); app.use('/api/users', usersPublicRouter);
// Everything below this line requires authentication // Everything below this line requires authentication
app.use(userModule.requireLoginMiddleware); app.use(requireLoginMiddleware);
app.get('/api/ping', (_, response) => response.sendStatus(204)); app.get('/api/ping', (_, response) => response.sendStatus(204));
// File uploads // File uploads
app.use(multer({ dest: uploadsPath, limits: { fieldSize: 10 * 1024 * 1024 } }).single('avatar')); app.use(multer({ dest: uploadsPath, limits: { fieldSize: 10 * 1024 * 1024 } }).single('avatar'));
app.use(require('./src/middleware/multerMonkeyPatch')); app.use(multerMonkeyPatch);
// User data mount // User data mount
app.use('/', userModule.router); app.use('/', userDataRouter);
// Private endpoints // Private endpoints
app.use('/api/users', require('./src/endpoints/users-private').router); app.use('/api/users', usersPrivateRouter);
// Admin endpoints // Admin endpoints
app.use('/api/users', require('./src/endpoints/users-admin').router); app.use('/api/users', usersAdminRouter);
app.get('/version', async function (_, response) { app.get('/version', async function (_, response) {
const data = await getVersion(); const data = await getVersion();
@@ -509,126 +571,45 @@ redirect('/api/serpapi/search', '/api/search/serpapi');
redirect('/api/serpapi/visit', '/api/search/visit'); redirect('/api/serpapi/visit', '/api/search/visit');
redirect('/api/serpapi/transcript', '/api/search/transcript'); redirect('/api/serpapi/transcript', '/api/search/transcript');
// Moving UI app.use('/api/moving-ui', movingUIRouter);
app.use('/api/moving-ui', require('./src/endpoints/moving-ui').router); app.use('/api/images', imagesRouter);
app.use('/api/quick-replies', quickRepliesRouter);
// Image management app.use('/api/avatars', avatarsRouter);
app.use('/api/images', require('./src/endpoints/images').router); app.use('/api/themes', themesRouter);
app.use('/api/openai', openAiRouter);
// Quick reply management app.use('/api/google', googleRouter);
app.use('/api/quick-replies', require('./src/endpoints/quick-replies').router); app.use('/api/anthropic', anthropicRouter);
app.use('/api/tokenizers', tokenizersRouter);
// Avatar management app.use('/api/presets', presetsRouter);
app.use('/api/avatars', require('./src/endpoints/avatars').router); app.use('/api/secrets', secretsRouter);
app.use('/thumbnail', thumbnailRouter);
// Theme management app.use('/api/novelai', novelAiRouter);
app.use('/api/themes', require('./src/endpoints/themes').router); app.use('/api/extensions', extensionsRouter);
app.use('/api/assets', assetsRouter);
// OpenAI API app.use('/api/files', filesRouter);
app.use('/api/openai', require('./src/endpoints/openai').router); app.use('/api/characters', charactersRouter);
app.use('/api/chats', chatsRouter);
//Google API app.use('/api/groups', groupsRouter);
app.use('/api/google', require('./src/endpoints/google').router); app.use('/api/worldinfo', worldInfoRouter);
app.use('/api/stats', statsRouter);
//Anthropic API app.use('/api/backgrounds', backgroundsRouter);
app.use('/api/anthropic', require('./src/endpoints/anthropic').router); app.use('/api/sprites', spritesRouter);
app.use('/api/content', contentManagerRouter);
// Tokenizers app.use('/api/settings', settingsRouter);
app.use('/api/tokenizers', require('./src/endpoints/tokenizers').router); app.use('/api/sd', stableDiffusionRouter);
app.use('/api/horde', hordeRouter);
// Preset management app.use('/api/vector', vectorsRouter);
app.use('/api/presets', require('./src/endpoints/presets').router); app.use('/api/translate', translateRouter);
app.use('/api/extra/classify', classifyRouter);
// Secrets managemenet app.use('/api/extra/caption', captionRouter);
app.use('/api/secrets', require('./src/endpoints/secrets').router); app.use('/api/search', searchRouter);
app.use('/api/backends/text-completions', textCompletionsRouter);
// Thumbnail generation. These URLs are saved in chat, so this route cannot be renamed! app.use('/api/openrouter', openRouterRouter);
app.use('/thumbnail', require('./src/endpoints/thumbnails').router); app.use('/api/backends/kobold', koboldRouter);
app.use('/api/backends/chat-completions', chatCompletionsRouter);
// NovelAI generation app.use('/api/backends/scale-alt', scaleAltRouter);
app.use('/api/novelai', require('./src/endpoints/novelai').router); app.use('/api/speech', speechRouter);
app.use('/api/azure', azureRouter);
// Third-party extensions
app.use('/api/extensions', require('./src/endpoints/extensions').router);
// Asset management
app.use('/api/assets', require('./src/endpoints/assets').router);
// File management
app.use('/api/files', require('./src/endpoints/files').router);
// Character management
app.use('/api/characters', require('./src/endpoints/characters').router);
// Chat management
app.use('/api/chats', require('./src/endpoints/chats').router);
// Group management
app.use('/api/groups', require('./src/endpoints/groups').router);
// World info management
app.use('/api/worldinfo', require('./src/endpoints/worldinfo').router);
// Stats calculation
const statsEndpoint = require('./src/endpoints/stats');
app.use('/api/stats', statsEndpoint.router);
// Background management
app.use('/api/backgrounds', require('./src/endpoints/backgrounds').router);
// Character sprite management
app.use('/api/sprites', require('./src/endpoints/sprites').router);
// Custom content management
app.use('/api/content', require('./src/endpoints/content-manager').router);
// Settings load/store
const settingsEndpoint = require('./src/endpoints/settings');
app.use('/api/settings', settingsEndpoint.router);
// Stable Diffusion generation
app.use('/api/sd', require('./src/endpoints/stable-diffusion').router);
// LLM and SD Horde generation
app.use('/api/horde', require('./src/endpoints/horde').router);
// Vector storage DB
app.use('/api/vector', require('./src/endpoints/vectors').router);
// Chat translation
app.use('/api/translate', require('./src/endpoints/translate').router);
// Emotion classification
app.use('/api/extra/classify', require('./src/endpoints/classify').router);
// Image captioning
app.use('/api/extra/caption', require('./src/endpoints/caption').router);
// Web search and scraping
app.use('/api/search', require('./src/endpoints/search').router);
// The different text generation APIs
// Ooba/OpenAI text completions
app.use('/api/backends/text-completions', require('./src/endpoints/backends/text-completions').router);
// OpenRouter
app.use('/api/openrouter', require('./src/endpoints/openrouter').router);
// KoboldAI
app.use('/api/backends/kobold', require('./src/endpoints/backends/kobold').router);
// OpenAI chat completions
app.use('/api/backends/chat-completions', require('./src/endpoints/backends/chat-completions').router);
// Scale (alt method)
app.use('/api/backends/scale-alt', require('./src/endpoints/backends/scale-alt').router);
// Speech (text-to-speech and speech-to-text)
app.use('/api/speech', require('./src/endpoints/speech').router);
// Azure TTS
app.use('/api/azure', require('./src/endpoints/azure').router);
const tavernUrlV6 = new URL( const tavernUrlV6 = new URL(
(cliArguments.ssl ? 'https://' : 'http://') + (cliArguments.ssl ? 'https://' : 'http://') +
@@ -658,22 +639,22 @@ const preSetupTasks = async function () {
} }
console.log(); console.log();
const directories = await userModule.getUserDirectoriesList(); const directories = await getUserDirectoriesList();
await contentManager.checkForNewContent(directories); await checkForNewContent(directories);
await ensureThumbnailCache(); await ensureThumbnailCache();
cleanUploads(); cleanUploads();
await settingsEndpoint.init(); await settingsInit();
await statsEndpoint.init(); await statsInit();
const cleanupPlugins = await loadPlugins(); const cleanupPlugins = await initializePlugins();
const consoleTitle = process.title; const consoleTitle = process.title;
let isExiting = false; let isExiting = false;
const exitProcess = async () => { const exitProcess = async () => {
if (isExiting) return; if (isExiting) return;
isExiting = true; isExiting = true;
statsEndpoint.onExit(); await statsOnExit();
if (typeof cleanupPlugins === 'function') { if (typeof cleanupPlugins === 'function') {
await cleanupPlugins(); await cleanupPlugins();
} }
@@ -773,11 +754,10 @@ const postSetupTasks = async function (v6Failed, v4Failed) {
* Loads server plugins from a directory. * Loads server plugins from a directory.
* @returns {Promise<Function>} Function to be run on server exit * @returns {Promise<Function>} Function to be run on server exit
*/ */
async function loadPlugins() { async function initializePlugins() {
try { try {
const pluginDirectory = path.join(serverDirectory, 'plugins'); const pluginDirectory = path.join(serverDirectory, 'plugins');
const loader = require('./src/plugin-loader'); const cleanupPlugins = await loadPlugins(app, pluginDirectory);
const cleanupPlugins = await loader.loadPlugins(app, pluginDirectory);
return cleanupPlugins; return cleanupPlugins;
} catch { } catch {
console.log('Plugin loading failed.'); console.log('Plugin loading failed.');
@@ -850,7 +830,7 @@ function createHttpsServer(url) {
}, app); }, app);
server.on('error', reject); server.on('error', reject);
server.on('listening', resolve); server.on('listening', resolve);
server.listen(url.port || 443, url.hostname); server.listen(Number(url.port || 443), url.hostname);
}); });
} }
@@ -865,7 +845,7 @@ function createHttpServer(url) {
const server = http.createServer(app); const server = http.createServer(app);
server.on('error', reject); server.on('error', reject);
server.on('listening', resolve); server.on('listening', resolve);
server.listen(url.port || 80, url.hostname); server.listen(Number(url.port || 80), url.hostname);
}); });
} }
@@ -917,7 +897,7 @@ async function verifySecuritySettings() {
logSecurityAlert('Your SillyTavern is currently insecurely open to the public. Enable whitelisting, basic authentication or user accounts.'); logSecurityAlert('Your SillyTavern is currently insecurely open to the public. Enable whitelisting, basic authentication or user accounts.');
} }
const users = await userModule.getAllEnabledUsers(); const users = await getAllEnabledUsers();
const unprotectedUsers = users.filter(x => !x.password); const unprotectedUsers = users.filter(x => !x.password);
const unprotectedAdminUsers = unprotectedUsers.filter(x => x.admin); const unprotectedAdminUsers = unprotectedUsers.filter(x => x.admin);
@@ -935,10 +915,10 @@ async function verifySecuritySettings() {
} }
// User storage module needs to be initialized before starting the server // User storage module needs to be initialized before starting the server
userModule.initUserStorage(dataRoot) initUserStorage(dataRoot)
.then(userModule.ensurePublicDirectoriesExist) .then(ensurePublicDirectoriesExist)
.then(userModule.migrateUserData) .then(migrateUserData)
.then(userModule.migrateSystemPrompts) .then(migrateSystemPrompts)
.then(verifySecuritySettings) .then(verifySecuritySettings)
.then(preSetupTasks) .then(preSetupTasks)
.finally(startServer); .finally(startServer);

View File

@@ -1,10 +1,10 @@
const { TEXTGEN_TYPES, OPENROUTER_HEADERS, FEATHERLESS_HEADERS } = require('./constants'); import { TEXTGEN_TYPES, OPENROUTER_HEADERS, FEATHERLESS_HEADERS } from './constants.js';
const { SECRET_KEYS, readSecret } = require('./endpoints/secrets'); import { SECRET_KEYS, readSecret } from './endpoints/secrets.js';
const { getConfigValue } = require('./util'); import { getConfigValue } from './util.js';
/** /**
* Gets the headers for the Mancer API. * Gets the headers for the Mancer API.
* @param {import('./users').UserDirectoryList} directories User directories * @param {import('./users.js').UserDirectoryList} directories User directories
* @returns {object} Headers for the request * @returns {object} Headers for the request
*/ */
function getMancerHeaders(directories) { function getMancerHeaders(directories) {
@@ -18,7 +18,7 @@ function getMancerHeaders(directories) {
/** /**
* Gets the headers for the TogetherAI API. * Gets the headers for the TogetherAI API.
* @param {import('./users').UserDirectoryList} directories User directories * @param {import('./users.js').UserDirectoryList} directories User directories
* @returns {object} Headers for the request * @returns {object} Headers for the request
*/ */
function getTogetherAIHeaders(directories) { function getTogetherAIHeaders(directories) {
@@ -31,7 +31,7 @@ function getTogetherAIHeaders(directories) {
/** /**
* Gets the headers for the InfermaticAI API. * Gets the headers for the InfermaticAI API.
* @param {import('./users').UserDirectoryList} directories User directories * @param {import('./users.js').UserDirectoryList} directories User directories
* @returns {object} Headers for the request * @returns {object} Headers for the request
*/ */
function getInfermaticAIHeaders(directories) { function getInfermaticAIHeaders(directories) {
@@ -44,7 +44,7 @@ function getInfermaticAIHeaders(directories) {
/** /**
* Gets the headers for the DreamGen API. * Gets the headers for the DreamGen API.
* @param {import('./users').UserDirectoryList} directories User directories * @param {import('./users.js').UserDirectoryList} directories User directories
* @returns {object} Headers for the request * @returns {object} Headers for the request
*/ */
function getDreamGenHeaders(directories) { function getDreamGenHeaders(directories) {
@@ -57,7 +57,7 @@ function getDreamGenHeaders(directories) {
/** /**
* Gets the headers for the OpenRouter API. * Gets the headers for the OpenRouter API.
* @param {import('./users').UserDirectoryList} directories User directories * @param {import('./users.js').UserDirectoryList} directories User directories
* @returns {object} Headers for the request * @returns {object} Headers for the request
*/ */
function getOpenRouterHeaders(directories) { function getOpenRouterHeaders(directories) {
@@ -69,7 +69,7 @@ function getOpenRouterHeaders(directories) {
/** /**
* Gets the headers for the vLLM API. * Gets the headers for the vLLM API.
* @param {import('./users').UserDirectoryList} directories User directories * @param {import('./users.js').UserDirectoryList} directories User directories
* @returns {object} Headers for the request * @returns {object} Headers for the request
*/ */
function getVllmHeaders(directories) { function getVllmHeaders(directories) {
@@ -82,7 +82,7 @@ function getVllmHeaders(directories) {
/** /**
* Gets the headers for the Aphrodite API. * Gets the headers for the Aphrodite API.
* @param {import('./users').UserDirectoryList} directories User directories * @param {import('./users.js').UserDirectoryList} directories User directories
* @returns {object} Headers for the request * @returns {object} Headers for the request
*/ */
function getAphroditeHeaders(directories) { function getAphroditeHeaders(directories) {
@@ -96,7 +96,7 @@ function getAphroditeHeaders(directories) {
/** /**
* Gets the headers for the Tabby API. * Gets the headers for the Tabby API.
* @param {import('./users').UserDirectoryList} directories User directories * @param {import('./users.js').UserDirectoryList} directories User directories
* @returns {object} Headers for the request * @returns {object} Headers for the request
*/ */
function getTabbyHeaders(directories) { function getTabbyHeaders(directories) {
@@ -110,7 +110,7 @@ function getTabbyHeaders(directories) {
/** /**
* Gets the headers for the LlamaCPP API. * Gets the headers for the LlamaCPP API.
* @param {import('./users').UserDirectoryList} directories User directories * @param {import('./users.js').UserDirectoryList} directories User directories
* @returns {object} Headers for the request * @returns {object} Headers for the request
*/ */
function getLlamaCppHeaders(directories) { function getLlamaCppHeaders(directories) {
@@ -123,7 +123,7 @@ function getLlamaCppHeaders(directories) {
/** /**
* Gets the headers for the Ooba API. * Gets the headers for the Ooba API.
* @param {import('./users').UserDirectoryList} directories * @param {import('./users.js').UserDirectoryList} directories
* @returns {object} Headers for the request * @returns {object} Headers for the request
*/ */
function getOobaHeaders(directories) { function getOobaHeaders(directories) {
@@ -136,7 +136,7 @@ function getOobaHeaders(directories) {
/** /**
* Gets the headers for the KoboldCpp API. * Gets the headers for the KoboldCpp API.
* @param {import('./users').UserDirectoryList} directories * @param {import('./users.js').UserDirectoryList} directories
* @returns {object} Headers for the request * @returns {object} Headers for the request
*/ */
function getKoboldCppHeaders(directories) { function getKoboldCppHeaders(directories) {
@@ -149,7 +149,7 @@ function getKoboldCppHeaders(directories) {
/** /**
* Gets the headers for the Featherless API. * Gets the headers for the Featherless API.
* @param {import('./users').UserDirectoryList} directories * @param {import('./users.js').UserDirectoryList} directories
* @returns {object} Headers for the request * @returns {object} Headers for the request
*/ */
function getFeatherlessHeaders(directories) { function getFeatherlessHeaders(directories) {
@@ -161,7 +161,7 @@ function getFeatherlessHeaders(directories) {
/** /**
* Gets the headers for the HuggingFace API. * Gets the headers for the HuggingFace API.
* @param {import('./users').UserDirectoryList} directories * @param {import('./users.js').UserDirectoryList} directories
* @returns {object} Headers for the request * @returns {object} Headers for the request
*/ */
function getHuggingFaceHeaders(directories) { function getHuggingFaceHeaders(directories) {
@@ -172,7 +172,7 @@ function getHuggingFaceHeaders(directories) {
}) : {}; }) : {};
} }
function getOverrideHeaders(urlHost) { export function getOverrideHeaders(urlHost) {
const requestOverrides = getConfigValue('requestOverrides', []); const requestOverrides = getConfigValue('requestOverrides', []);
const overrideHeaders = requestOverrides?.find((e) => e.hosts?.includes(urlHost))?.headers; const overrideHeaders = requestOverrides?.find((e) => e.hosts?.includes(urlHost))?.headers;
if (overrideHeaders && urlHost) { if (overrideHeaders && urlHost) {
@@ -188,7 +188,7 @@ function getOverrideHeaders(urlHost) {
* @param {object} args New request arguments * @param {object} args New request arguments
* @param {string|null} server API server for new request * @param {string|null} server API server for new request
*/ */
function setAdditionalHeaders(request, args, server) { export function setAdditionalHeaders(request, args, server) {
setAdditionalHeadersByType(args.headers, request.body.api_type, server, request.user.directories); setAdditionalHeadersByType(args.headers, request.body.api_type, server, request.user.directories);
} }
@@ -197,9 +197,9 @@ function setAdditionalHeaders(request, args, server) {
* @param {object} requestHeaders Request headers * @param {object} requestHeaders Request headers
* @param {string} type API type * @param {string} type API type
* @param {string|null} server API server for new request * @param {string|null} server API server for new request
* @param {import('./users').UserDirectoryList} directories User directories * @param {import('./users.js').UserDirectoryList} directories User directories
*/ */
function setAdditionalHeadersByType(requestHeaders, type, server, directories) { export function setAdditionalHeadersByType(requestHeaders, type, server, directories) {
const headerGetters = { const headerGetters = {
[TEXTGEN_TYPES.MANCER]: getMancerHeaders, [TEXTGEN_TYPES.MANCER]: getMancerHeaders,
[TEXTGEN_TYPES.VLLM]: getVllmHeaders, [TEXTGEN_TYPES.VLLM]: getVllmHeaders,
@@ -234,9 +234,3 @@ function setAdditionalHeadersByType(requestHeaders, type, server, directories) {
Object.assign(requestHeaders, headers); Object.assign(requestHeaders, headers);
} }
module.exports = {
getOverrideHeaders,
setAdditionalHeaders,
setAdditionalHeadersByType,
};

View File

@@ -1,8 +1,9 @@
const fs = require('fs'); import fs from 'node:fs';
import { Buffer } from 'node:buffer';
const encode = require('png-chunks-encode'); import encode from 'png-chunks-encode';
const extract = require('png-chunks-extract'); import extract from 'png-chunks-extract';
const PNGtext = require('png-chunk-text'); import PNGtext from 'png-chunk-text';
/** /**
* Writes Character metadata to a PNG image buffer. * Writes Character metadata to a PNG image buffer.
@@ -11,8 +12,8 @@ const PNGtext = require('png-chunk-text');
* @param {string} data Character data to write * @param {string} data Character data to write
* @returns {Buffer} PNG image buffer with metadata * @returns {Buffer} PNG image buffer with metadata
*/ */
const write = (image, data) => { export const write = (image, data) => {
const chunks = extract(image); const chunks = extract(new Uint8Array(image));
const tEXtChunks = chunks.filter(chunk => chunk.name === 'tEXt'); const tEXtChunks = chunks.filter(chunk => chunk.name === 'tEXt');
// Remove existing tEXt chunks // Remove existing tEXt chunks
@@ -36,7 +37,9 @@ const write = (image, data) => {
const base64EncodedData = Buffer.from(JSON.stringify(v3Data), 'utf8').toString('base64'); const base64EncodedData = Buffer.from(JSON.stringify(v3Data), 'utf8').toString('base64');
chunks.splice(-1, 0, PNGtext.encode('ccv3', base64EncodedData)); chunks.splice(-1, 0, PNGtext.encode('ccv3', base64EncodedData));
} catch (error) { } } catch (error) {
// Ignore errors when adding v3 chunk
}
const newBuffer = Buffer.from(encode(chunks)); const newBuffer = Buffer.from(encode(chunks));
return newBuffer; return newBuffer;
@@ -48,8 +51,8 @@ const write = (image, data) => {
* @param {Buffer} image PNG image buffer * @param {Buffer} image PNG image buffer
* @returns {string} Character data * @returns {string} Character data
*/ */
const read = (image) => { export const read = (image) => {
const chunks = extract(image); const chunks = extract(new Uint8Array(image));
const textChunks = chunks.filter((chunk) => chunk.name === 'tEXt').map((chunk) => PNGtext.decode(chunk.data)); const textChunks = chunks.filter((chunk) => chunk.name === 'tEXt').map((chunk) => PNGtext.decode(chunk.data));
@@ -80,7 +83,7 @@ const read = (image) => {
* @param {string} format File format * @param {string} format File format
* @returns {string} Character data * @returns {string} Character data
*/ */
const parse = (cardUrl, format) => { export const parse = (cardUrl, format) => {
let fileFormat = format === undefined ? 'png' : format; let fileFormat = format === undefined ? 'png' : format;
switch (fileFormat) { switch (fileFormat) {
@@ -93,8 +96,3 @@ const parse = (cardUrl, format) => {
throw new Error('Unsupported format'); throw new Error('Unsupported format');
}; };
module.exports = {
parse,
write,
read,
};

View File

@@ -1,18 +1,18 @@
const PUBLIC_DIRECTORIES = { export const PUBLIC_DIRECTORIES = {
images: 'public/img/', images: 'public/img/',
backups: 'backups/', backups: 'backups/',
sounds: 'public/sounds', sounds: 'public/sounds',
extensions: 'public/scripts/extensions', extensions: 'public/scripts/extensions',
}; };
const SETTINGS_FILE = 'settings.json'; export const SETTINGS_FILE = 'settings.json';
/** /**
* @type {import('./users').UserDirectoryList} * @type {import('./users.js').UserDirectoryList}
* @readonly * @readonly
* @enum {string} * @enum {string}
*/ */
const USER_DIRECTORY_TEMPLATE = Object.freeze({ export const USER_DIRECTORY_TEMPLATE = Object.freeze({
root: '', root: '',
thumbnails: 'thumbnails', thumbnails: 'thumbnails',
thumbnailsBg: 'thumbnails/bg', thumbnailsBg: 'thumbnails/bg',
@@ -45,10 +45,10 @@ const USER_DIRECTORY_TEMPLATE = Object.freeze({
}); });
/** /**
* @type {import('./users').User} * @type {import('./users.js').User}
* @readonly * @readonly
*/ */
const DEFAULT_USER = Object.freeze({ export const DEFAULT_USER = Object.freeze({
handle: 'default-user', handle: 'default-user',
name: 'User', name: 'User',
created: Date.now(), created: Date.now(),
@@ -58,7 +58,7 @@ const DEFAULT_USER = Object.freeze({
salt: '', salt: '',
}); });
const UNSAFE_EXTENSIONS = [ export const UNSAFE_EXTENSIONS = [
'.php', '.php',
'.exe', '.exe',
'.com', '.com',
@@ -135,7 +135,7 @@ const UNSAFE_EXTENSIONS = [
'.ws', '.ws',
]; ];
const GEMINI_SAFETY = [ export const GEMINI_SAFETY = [
{ {
category: 'HARM_CATEGORY_HARASSMENT', category: 'HARM_CATEGORY_HARASSMENT',
threshold: 'BLOCK_NONE', threshold: 'BLOCK_NONE',
@@ -158,7 +158,7 @@ const GEMINI_SAFETY = [
}, },
]; ];
const BISON_SAFETY = [ export const BISON_SAFETY = [
{ {
category: 'HARM_CATEGORY_DEROGATORY', category: 'HARM_CATEGORY_DEROGATORY',
threshold: 'BLOCK_NONE', threshold: 'BLOCK_NONE',
@@ -185,7 +185,7 @@ const BISON_SAFETY = [
}, },
]; ];
const CHAT_COMPLETION_SOURCES = { export const CHAT_COMPLETION_SOURCES = {
OPENAI: 'openai', OPENAI: 'openai',
WINDOWAI: 'windowai', WINDOWAI: 'windowai',
CLAUDE: 'claude', CLAUDE: 'claude',
@@ -205,10 +205,10 @@ const CHAT_COMPLETION_SOURCES = {
/** /**
* Path to multer file uploads under the data root. * Path to multer file uploads under the data root.
*/ */
const UPLOADS_DIRECTORY = '_uploads'; export const UPLOADS_DIRECTORY = '_uploads';
// TODO: this is copied from the client code; there should be a way to de-duplicate it eventually // TODO: this is copied from the client code; there should be a way to de-duplicate it eventually
const TEXTGEN_TYPES = { export const TEXTGEN_TYPES = {
OOBA: 'ooba', OOBA: 'ooba',
MANCER: 'mancer', MANCER: 'mancer',
VLLM: 'vllm', VLLM: 'vllm',
@@ -225,7 +225,7 @@ const TEXTGEN_TYPES = {
HUGGINGFACE: 'huggingface', HUGGINGFACE: 'huggingface',
}; };
const INFERMATICAI_KEYS = [ export const INFERMATICAI_KEYS = [
'model', 'model',
'prompt', 'prompt',
'max_tokens', 'max_tokens',
@@ -248,7 +248,7 @@ const INFERMATICAI_KEYS = [
'logprobs', 'logprobs',
]; ];
const FEATHERLESS_KEYS = [ export const FEATHERLESS_KEYS = [
'model', 'model',
'prompt', 'prompt',
'best_of', 'best_of',
@@ -290,9 +290,8 @@ const FEATHERLESS_KEYS = [
'guided_whitespace_pattern', 'guided_whitespace_pattern',
]; ];
// https://dreamgen.com/docs/api#openai-text // https://dreamgen.com/docs/api#openai-text
const DREAMGEN_KEYS = [ export const DREAMGEN_KEYS = [
'model', 'model',
'prompt', 'prompt',
'max_tokens', 'max_tokens',
@@ -309,7 +308,7 @@ const DREAMGEN_KEYS = [
]; ];
// https://docs.together.ai/reference/completions // https://docs.together.ai/reference/completions
const TOGETHERAI_KEYS = [ export const TOGETHERAI_KEYS = [
'model', 'model',
'prompt', 'prompt',
'max_tokens', 'max_tokens',
@@ -325,7 +324,7 @@ const TOGETHERAI_KEYS = [
]; ];
// https://github.com/jmorganca/ollama/blob/main/docs/api.md#request-with-options // https://github.com/jmorganca/ollama/blob/main/docs/api.md#request-with-options
const OLLAMA_KEYS = [ export const OLLAMA_KEYS = [
'num_predict', 'num_predict',
'num_ctx', 'num_ctx',
'stop', 'stop',
@@ -345,20 +344,20 @@ const OLLAMA_KEYS = [
'min_p', 'min_p',
]; ];
const AVATAR_WIDTH = 512; export const AVATAR_WIDTH = 512;
const AVATAR_HEIGHT = 768; export const AVATAR_HEIGHT = 768;
const OPENROUTER_HEADERS = { export const OPENROUTER_HEADERS = {
'HTTP-Referer': 'https://sillytavern.app', 'HTTP-Referer': 'https://sillytavern.app',
'X-Title': 'SillyTavern', 'X-Title': 'SillyTavern',
}; };
const FEATHERLESS_HEADERS = { export const FEATHERLESS_HEADERS = {
'HTTP-Referer': 'https://sillytavern.app', 'HTTP-Referer': 'https://sillytavern.app',
'X-Title': 'SillyTavern', 'X-Title': 'SillyTavern',
}; };
const OPENROUTER_KEYS = [ export const OPENROUTER_KEYS = [
'max_tokens', 'max_tokens',
'temperature', 'temperature',
'top_k', 'top_k',
@@ -378,7 +377,7 @@ const OPENROUTER_KEYS = [
]; ];
// https://github.com/vllm-project/vllm/blob/0f8a91401c89ac0a8018def3756829611b57727f/vllm/entrypoints/openai/protocol.py#L220 // https://github.com/vllm-project/vllm/blob/0f8a91401c89ac0a8018def3756829611b57727f/vllm/entrypoints/openai/protocol.py#L220
const VLLM_KEYS = [ export const VLLM_KEYS = [
'model', 'model',
'prompt', 'prompt',
'best_of', 'best_of',
@@ -419,27 +418,3 @@ const VLLM_KEYS = [
'guided_decoding_backend', 'guided_decoding_backend',
'guided_whitespace_pattern', 'guided_whitespace_pattern',
]; ];
module.exports = {
DEFAULT_USER,
SETTINGS_FILE,
PUBLIC_DIRECTORIES,
USER_DIRECTORY_TEMPLATE,
UNSAFE_EXTENSIONS,
UPLOADS_DIRECTORY,
GEMINI_SAFETY,
BISON_SAFETY,
TEXTGEN_TYPES,
CHAT_COMPLETION_SOURCES,
AVATAR_WIDTH,
AVATAR_HEIGHT,
TOGETHERAI_KEYS,
OLLAMA_KEYS,
INFERMATICAI_KEYS,
DREAMGEN_KEYS,
OPENROUTER_HEADERS,
OPENROUTER_KEYS,
VLLM_KEYS,
FEATHERLESS_KEYS,
FEATHERLESS_HEADERS,
};

View File

@@ -1,9 +1,10 @@
const { readSecret, SECRET_KEYS } = require('./secrets'); import fetch from 'node-fetch';
const fetch = require('node-fetch').default; import express from 'express';
const express = require('express');
const { jsonParser } = require('../express-common');
const router = express.Router(); import { readSecret, SECRET_KEYS } from './secrets.js';
import { jsonParser } from '../express-common.js';
export const router = express.Router();
router.post('/caption-image', jsonParser, async (request, response) => { router.post('/caption-image', jsonParser, async (request, response) => {
try { try {
@@ -41,7 +42,6 @@ router.post('/caption-image', jsonParser, async (request, response) => {
'anthropic-version': '2023-06-01', 'anthropic-version': '2023-06-01',
'x-api-key': request.body.reverse_proxy ? request.body.proxy_password : readSecret(request.user.directories, SECRET_KEYS.CLAUDE), 'x-api-key': request.body.reverse_proxy ? request.body.proxy_password : readSecret(request.user.directories, SECRET_KEYS.CLAUDE),
}, },
timeout: 0,
}); });
if (!result.ok) { if (!result.ok) {
@@ -50,6 +50,7 @@ router.post('/caption-image', jsonParser, async (request, response) => {
return response.status(result.status).send({ error: true }); return response.status(result.status).send({ error: true });
} }
/** @type {any} */
const generateResponseJson = await result.json(); const generateResponseJson = await result.json();
const caption = generateResponseJson.content[0].text; const caption = generateResponseJson.content[0].text;
console.log('Claude response:', generateResponseJson); console.log('Claude response:', generateResponseJson);
@@ -64,5 +65,3 @@ router.post('/caption-image', jsonParser, async (request, response) => {
response.status(500).send('Internal server error'); response.status(500).send('Internal server error');
} }
}); });
module.exports = { router };

View File

@@ -1,13 +1,15 @@
const path = require('path'); import path from 'node:path';
const fs = require('fs'); import fs from 'node:fs';
const mime = require('mime-types'); import { finished } from 'node:stream/promises';
const express = require('express');
const sanitize = require('sanitize-filename'); import mime from 'mime-types';
const fetch = require('node-fetch').default; import express from 'express';
const { finished } = require('stream/promises'); import sanitize from 'sanitize-filename';
const { UNSAFE_EXTENSIONS } = require('../constants'); import fetch from 'node-fetch';
const { jsonParser } = require('../express-common');
const { clientRelativePath } = require('../util'); import { UNSAFE_EXTENSIONS } from '../constants.js';
import { jsonParser } from '../express-common.js';
import { clientRelativePath } from '../util.js';
const VALID_CATEGORIES = ['bgm', 'ambient', 'blip', 'live2d', 'vrm', 'character', 'temp']; const VALID_CATEGORIES = ['bgm', 'ambient', 'blip', 'live2d', 'vrm', 'character', 'temp'];
@@ -16,7 +18,7 @@ const VALID_CATEGORIES = ['bgm', 'ambient', 'blip', 'live2d', 'vrm', 'character'
* @param {string} inputFilename Input filename * @param {string} inputFilename Input filename
* @returns {{error: boolean, message?: string}} Whether validation failed, and why if so * @returns {{error: boolean, message?: string}} Whether validation failed, and why if so
*/ */
function validateAssetFileName(inputFilename) { export function validateAssetFileName(inputFilename) {
if (!/^[a-zA-Z0-9_\-.]+$/.test(inputFilename)) { if (!/^[a-zA-Z0-9_\-.]+$/.test(inputFilename)) {
return { return {
error: true, error: true,
@@ -77,7 +79,7 @@ function getFiles(dir, files = []) {
/** /**
* Ensure that the asset folders exist. * Ensure that the asset folders exist.
* @param {import('../users').UserDirectoryList} directories - The user's directories * @param {import('../users.js').UserDirectoryList} directories - The user's directories
*/ */
function ensureFoldersExist(directories) { function ensureFoldersExist(directories) {
const folderPath = path.join(directories.assets); const folderPath = path.join(directories.assets);
@@ -93,7 +95,7 @@ function ensureFoldersExist(directories) {
} }
} }
const router = express.Router(); export const router = express.Router();
/** /**
* HTTP POST handler function to retrieve name of all files of a given folder path. * HTTP POST handler function to retrieve name of all files of a given folder path.
@@ -366,5 +368,3 @@ router.post('/character', jsonParser, async (request, response) => {
return response.sendStatus(500); return response.sendStatus(500);
} }
}); });
module.exports = { router, validateAssetFileName };

View File

@@ -1,16 +1,16 @@
const express = require('express'); import path from 'node:path';
const path = require('path'); import fs from 'node:fs';
const fs = require('fs');
const sanitize = require('sanitize-filename');
const writeFileAtomicSync = require('write-file-atomic').sync;
const { jsonParser, urlencodedParser } = require('../express-common');
const { AVATAR_WIDTH, AVATAR_HEIGHT } = require('../constants');
const { getImages, tryParse } = require('../util');
// image processing related library imports import express from 'express';
const jimp = require('jimp'); import sanitize from 'sanitize-filename';
import jimp from 'jimp';
import { sync as writeFileAtomicSync } from 'write-file-atomic';
const router = express.Router(); import { jsonParser, urlencodedParser } from '../express-common.js';
import { AVATAR_WIDTH, AVATAR_HEIGHT } from '../constants.js';
import { getImages, tryParse } from '../util.js';
export const router = express.Router();
router.post('/get', jsonParser, function (request, response) { router.post('/get', jsonParser, function (request, response) {
var images = getImages(request.user.directories.avatars); var images = getImages(request.user.directories.avatars);
@@ -58,5 +58,3 @@ router.post('/upload', urlencodedParser, async (request, response) => {
return response.status(400).send('Is not a valid image'); return response.status(400).send('Is not a valid image');
} }
}); });
module.exports = { router };

View File

@@ -1,9 +1,10 @@
const { readSecret, SECRET_KEYS } = require('./secrets'); import fetch from 'node-fetch';
const fetch = require('node-fetch').default; import { Router } from 'express';
const express = require('express');
const { jsonParser } = require('../express-common');
const router = express.Router(); import { readSecret, SECRET_KEYS } from './secrets.js';
import { jsonParser } from '../express-common.js';
export const router = Router();
router.post('/list', jsonParser, async (req, res) => { router.post('/list', jsonParser, async (req, res) => {
try { try {
@@ -86,7 +87,3 @@ router.post('/generate', jsonParser, async (req, res) => {
return res.sendStatus(500); return res.sendStatus(500);
} }
}); });
module.exports = {
router,
};

View File

@@ -1,13 +1,41 @@
const express = require('express'); import process from 'node:process';
const fetch = require('node-fetch').default; import express from 'express';
import fetch from 'node-fetch';
const { jsonParser } = require('../../express-common'); import { jsonParser } from '../../express-common.js';
const { CHAT_COMPLETION_SOURCES, GEMINI_SAFETY, BISON_SAFETY, OPENROUTER_HEADERS } = require('../../constants'); import {
const { forwardFetchResponse, getConfigValue, tryParse, uuidv4, mergeObjectWithYaml, excludeKeysByYaml, color } = require('../../util'); CHAT_COMPLETION_SOURCES,
const { convertClaudeMessages, convertGooglePrompt, convertTextCompletionPrompt, convertCohereMessages, convertMistralMessages, convertAI21Messages, mergeMessages } = require('../../prompt-converters'); GEMINI_SAFETY,
BISON_SAFETY,
OPENROUTER_HEADERS,
} from '../../constants.js';
import {
forwardFetchResponse,
getConfigValue,
tryParse,
uuidv4,
mergeObjectWithYaml,
excludeKeysByYaml,
color,
} from '../../util.js';
import {
convertClaudeMessages,
convertGooglePrompt,
convertTextCompletionPrompt,
convertCohereMessages,
convertMistralMessages,
convertAI21Messages,
mergeMessages,
} from '../../prompt-converters.js';
const { readSecret, SECRET_KEYS } = require('../secrets'); import { readSecret, SECRET_KEYS } from '../secrets.js';
const { getTokenizerModel, getSentencepiceTokenizer, getTiktokenTokenizer, sentencepieceTokenizers, TEXT_COMPLETION_MODELS } = require('../tokenizers'); import {
getTokenizerModel,
getSentencepiceTokenizer,
getTiktokenTokenizer,
sentencepieceTokenizers,
TEXT_COMPLETION_MODELS,
} from '../tokenizers.js';
const API_OPENAI = 'https://api.openai.com/v1'; const API_OPENAI = 'https://api.openai.com/v1';
const API_CLAUDE = 'https://api.anthropic.com/v1'; const API_CLAUDE = 'https://api.anthropic.com/v1';
@@ -41,43 +69,6 @@ function postProcessPrompt(messages, type, charName, userName) {
} }
} }
/**
* Ollama strikes back. Special boy #2's steaming routine.
* Wrap this abomination into proper SSE stream, again.
* @param {Response} jsonStream JSON stream
* @param {import('express').Request} request Express request
* @param {import('express').Response} response Express response
* @returns {Promise<any>} Nothing valuable
*/
async function parseCohereStream(jsonStream, request, response) {
try {
const stream = new CohereStream({ stream: jsonStream.body, eventShape: { type: 'json', messageTerminator: '\n' } });
for await (const json of stream.iterMessages()) {
if (json.message) {
const message = json.message || 'Unknown error';
const chunk = { error: { message: message } };
response.write(`data: ${JSON.stringify(chunk)}\n\n`);
} else if (json.event_type === 'text-generation') {
const text = json.text || '';
const chunk = { choices: [{ text }] };
response.write(`data: ${JSON.stringify(chunk)}\n\n`);
}
}
console.log('Streaming request finished');
response.write('data: [DONE]\n\n');
response.end();
} catch (error) {
console.log('Error forwarding streaming response:', error);
if (!response.headersSent) {
return response.status(500).send({ error: true });
} else {
return response.end();
}
}
}
/** /**
* Sends a request to Claude API. * Sends a request to Claude API.
* @param {express.Request} request Express request * @param {express.Request} request Express request
@@ -161,7 +152,6 @@ async function sendClaudeRequest(request, response) {
'x-api-key': apiKey, 'x-api-key': apiKey,
...additionalHeaders, ...additionalHeaders,
}, },
timeout: 0,
}); });
if (request.body.stream) { if (request.body.stream) {
@@ -174,6 +164,7 @@ async function sendClaudeRequest(request, response) {
return response.status(generateResponse.status).send({ error: true }); return response.status(generateResponse.status).send({ error: true });
} }
/** @type {any} */
const generateResponseJson = await generateResponse.json(); const generateResponseJson = await generateResponse.json();
const responseText = generateResponseJson?.content?.[0]?.text || ''; const responseText = generateResponseJson?.content?.[0]?.text || '';
console.log('Claude response:', generateResponseJson); console.log('Claude response:', generateResponseJson);
@@ -221,7 +212,6 @@ async function sendScaleRequest(request, response) {
'Content-Type': 'application/json', 'Content-Type': 'application/json',
'Authorization': `Basic ${apiKey}`, 'Authorization': `Basic ${apiKey}`,
}, },
timeout: 0,
}); });
if (!generateResponse.ok) { if (!generateResponse.ok) {
@@ -229,6 +219,7 @@ async function sendScaleRequest(request, response) {
return response.status(500).send({ error: true }); return response.status(500).send({ error: true });
} }
/** @type {any} */
const generateResponseJson = await generateResponse.json(); const generateResponseJson = await generateResponse.json();
console.log('Scale response:', generateResponseJson); console.log('Scale response:', generateResponseJson);
@@ -344,7 +335,6 @@ async function sendMakerSuiteRequest(request, response) {
'Content-Type': 'application/json', 'Content-Type': 'application/json',
}, },
signal: controller.signal, signal: controller.signal,
timeout: 0,
}); });
// have to do this because of their busted ass streaming endpoint // have to do this because of their busted ass streaming endpoint
if (stream) { if (stream) {
@@ -363,6 +353,7 @@ async function sendMakerSuiteRequest(request, response) {
return response.status(generateResponse.status).send({ error: true }); return response.status(generateResponse.status).send({ error: true });
} }
/** @type {any} */
const generateResponseJson = await generateResponse.json(); const generateResponseJson = await generateResponse.json();
const candidates = generateResponseJson?.candidates; const candidates = generateResponseJson?.candidates;
@@ -626,7 +617,7 @@ async function sendCohereRequest(request, response) {
} }
} }
const router = express.Router(); export const router = express.Router();
router.post('/status', jsonParser, async function (request, response_getstatus_openai) { router.post('/status', jsonParser, async function (request, response_getstatus_openai) {
if (!request.body) return response_getstatus_openai.sendStatus(400); if (!request.body) return response_getstatus_openai.sendStatus(400);
@@ -685,6 +676,7 @@ router.post('/status', jsonParser, async function (request, response_getstatus_o
}); });
if (response.ok) { if (response.ok) {
/** @type {any} */
const data = await response.json(); const data = await response.json();
response_getstatus_openai.send(data); response_getstatus_openai.send(data);
@@ -988,7 +980,6 @@ router.post('/generate', jsonParser, function (request, response) {
}, },
body: JSON.stringify(requestBody), body: JSON.stringify(requestBody),
signal: controller.signal, signal: controller.signal,
timeout: 0,
}; };
console.log(requestBody); console.log(requestBody);
@@ -1014,6 +1005,7 @@ router.post('/generate', jsonParser, function (request, response) {
} }
if (fetchResponse.ok) { if (fetchResponse.ok) {
/** @type {any} */
let json = await fetchResponse.json(); let json = await fetchResponse.json();
response.send(json); response.send(json);
console.log(json); console.log(json);
@@ -1069,6 +1061,3 @@ router.post('/generate', jsonParser, function (request, response) {
} }
}); });
module.exports = {
router,
};

View File

@@ -1,13 +1,13 @@
const express = require('express'); import fs from 'node:fs';
const fetch = require('node-fetch').default; import express from 'express';
const fs = require('fs'); import fetch from 'node-fetch';
const { jsonParser, urlencodedParser } = require('../../express-common'); import { jsonParser, urlencodedParser } from '../../express-common.js';
const { forwardFetchResponse, delay } = require('../../util'); import { forwardFetchResponse, delay } from '../../util.js';
const { getOverrideHeaders, setAdditionalHeaders, setAdditionalHeadersByType } = require('../../additional-headers'); import { getOverrideHeaders, setAdditionalHeaders, setAdditionalHeadersByType } from '../../additional-headers.js';
const { TEXTGEN_TYPES } = require('../../constants'); import { TEXTGEN_TYPES } from '../../constants.js';
const router = express.Router(); export const router = express.Router();
router.post('/generate', jsonParser, async function (request, response_generate) { router.post('/generate', jsonParser, async function (request, response_generate) {
if (!request.body) return response_generate.sendStatus(400); if (!request.body) return response_generate.sendStatus(400);
@@ -96,7 +96,7 @@ router.post('/generate', jsonParser, async function (request, response_generate)
for (let i = 0; i < MAX_RETRIES; i++) { for (let i = 0; i < MAX_RETRIES; i++) {
try { try {
const url = request.body.streaming ? `${request.body.api_server}/extra/generate/stream` : `${request.body.api_server}/v1/generate`; const url = request.body.streaming ? `${request.body.api_server}/extra/generate/stream` : `${request.body.api_server}/v1/generate`;
const response = await fetch(url, { method: 'POST', timeout: 0, ...args }); const response = await fetch(url, { method: 'POST', ...args });
if (request.body.streaming) { if (request.body.streaming) {
// Pipe remote SSE stream to Express response // Pipe remote SSE stream to Express response
@@ -156,6 +156,7 @@ router.post('/status', jsonParser, async function (request, response) {
const result = {}; const result = {};
/** @type {any} */
const [koboldUnitedResponse, koboldExtraResponse, koboldModelResponse] = await Promise.all([ const [koboldUnitedResponse, koboldExtraResponse, koboldModelResponse] = await Promise.all([
// We catch errors both from the response not having a successful HTTP status and from JSON parsing failing // We catch errors both from the response not having a successful HTTP status and from JSON parsing failing
@@ -237,5 +238,3 @@ router.post('/transcribe-audio', urlencodedParser, async function (request, resp
response.status(500).send('Internal server error'); response.status(500).send('Internal server error');
} }
}); });
module.exports = { router };

View File

@@ -1,11 +1,10 @@
const express = require('express'); import express from 'express';
const fetch = require('node-fetch').default; import fetch from 'node-fetch';
const { jsonParser } = require('../../express-common'); import { jsonParser } from '../../express-common.js';
import { readSecret, SECRET_KEYS } from '../secrets.js';
const { readSecret, SECRET_KEYS } = require('../secrets'); export const router = express.Router();
const router = express.Router();
router.post('/generate', jsonParser, async function (request, response) { router.post('/generate', jsonParser, async function (request, response) {
if (!request.body) return response.sendStatus(400); if (!request.body) return response.sendStatus(400);
@@ -71,7 +70,6 @@ router.post('/generate', jsonParser, async function (request, response) {
'Content-Type': 'application/json', 'Content-Type': 'application/json',
'cookie': `_jwt=${cookie}`, 'cookie': `_jwt=${cookie}`,
}, },
timeout: 0,
body: JSON.stringify(body), body: JSON.stringify(body),
}); });
@@ -81,6 +79,7 @@ router.post('/generate', jsonParser, async function (request, response) {
return response.status(500).send({ error: { message: result.statusText } }); return response.status(500).send({ error: { message: result.statusText } });
} }
/** @type {any} */
const data = await result.json(); const data = await result.json();
const output = data?.result?.data?.json?.outputs?.[0] || ''; const output = data?.result?.data?.json?.outputs?.[0] || '';
@@ -97,5 +96,3 @@ router.post('/generate', jsonParser, async function (request, response) {
return response.sendStatus(500); return response.sendStatus(500);
} }
}); });
module.exports = { router };

View File

@@ -1,14 +1,23 @@
const express = require('express'); import { Readable } from 'node:stream';
const fetch = require('node-fetch').default; import fetch from 'node-fetch';
const _ = require('lodash'); import express from 'express';
const Readable = require('stream').Readable; import _ from 'lodash';
const { jsonParser } = require('../../express-common'); import { jsonParser } from '../../express-common.js';
const { TEXTGEN_TYPES, TOGETHERAI_KEYS, OLLAMA_KEYS, INFERMATICAI_KEYS, OPENROUTER_KEYS, VLLM_KEYS, DREAMGEN_KEYS, FEATHERLESS_KEYS } = require('../../constants'); import {
const { forwardFetchResponse, trimV1, getConfigValue } = require('../../util'); TEXTGEN_TYPES,
const { setAdditionalHeaders } = require('../../additional-headers'); TOGETHERAI_KEYS,
OLLAMA_KEYS,
INFERMATICAI_KEYS,
OPENROUTER_KEYS,
VLLM_KEYS,
DREAMGEN_KEYS,
FEATHERLESS_KEYS,
} from '../../constants.js';
import { forwardFetchResponse, trimV1, getConfigValue } from '../../util.js';
import { setAdditionalHeaders } from '../../additional-headers.js';
const router = express.Router(); export const router = express.Router();
/** /**
* Special boy's steaming routine. Wrap this abomination into proper SSE stream. * Special boy's steaming routine. Wrap this abomination into proper SSE stream.
@@ -19,6 +28,10 @@ const router = express.Router();
*/ */
async function parseOllamaStream(jsonStream, request, response) { async function parseOllamaStream(jsonStream, request, response) {
try { try {
if (!jsonStream.body) {
throw new Error('No body in the response');
}
let partialData = ''; let partialData = '';
jsonStream.body.on('data', (data) => { jsonStream.body.on('data', (data) => {
const chunk = data.toString(); const chunk = data.toString();
@@ -144,6 +157,7 @@ router.post('/status', jsonParser, async function (request, response) {
return response.status(400); return response.status(400);
} }
/** @type {any} */
let data = await modelsReply.json(); let data = await modelsReply.json();
if (request.body.legacy_api) { if (request.body.legacy_api) {
@@ -181,6 +195,7 @@ router.post('/status', jsonParser, async function (request, response) {
const modelInfoReply = await fetch(modelInfoUrl, args); const modelInfoReply = await fetch(modelInfoUrl, args);
if (modelInfoReply.ok) { if (modelInfoReply.ok) {
/** @type {any} */
const modelInfo = await modelInfoReply.json(); const modelInfo = await modelInfoReply.json();
console.log('Ooba model info:', modelInfo); console.log('Ooba model info:', modelInfo);
@@ -197,6 +212,7 @@ router.post('/status', jsonParser, async function (request, response) {
const modelInfoReply = await fetch(modelInfoUrl, args); const modelInfoReply = await fetch(modelInfoUrl, args);
if (modelInfoReply.ok) { if (modelInfoReply.ok) {
/** @type {any} */
const modelInfo = await modelInfoReply.json(); const modelInfo = await modelInfoReply.json();
console.log('Tabby model info:', modelInfo); console.log('Tabby model info:', modelInfo);
@@ -350,6 +366,7 @@ router.post('/generate', jsonParser, async function (request, response) {
const completionsReply = await fetch(url, args); const completionsReply = await fetch(url, args);
if (completionsReply.ok) { if (completionsReply.ok) {
/** @type {any} */
const data = await completionsReply.json(); const data = await completionsReply.json();
console.log('Endpoint response:', data); console.log('Endpoint response:', data);
@@ -406,7 +423,6 @@ ollama.post('/download', jsonParser, async function (request, response) {
name: name, name: name,
stream: false, stream: false,
}), }),
timeout: 0,
}); });
if (!fetchResponse.ok) { if (!fetchResponse.ok) {
@@ -439,7 +455,6 @@ ollama.post('/caption-image', jsonParser, async function (request, response) {
images: [request.body.image], images: [request.body.image],
stream: false, stream: false,
}), }),
timeout: 0,
}); });
if (!fetchResponse.ok) { if (!fetchResponse.ok) {
@@ -447,6 +462,7 @@ ollama.post('/caption-image', jsonParser, async function (request, response) {
return response.status(500).send({ error: true }); return response.status(500).send({ error: true });
} }
/** @type {any} */
const data = await fetchResponse.json(); const data = await fetchResponse.json();
console.log('Ollama caption response:', data); console.log('Ollama caption response:', data);
@@ -478,7 +494,6 @@ llamacpp.post('/caption-image', jsonParser, async function (request, response) {
const fetchResponse = await fetch(`${baseUrl}/completion`, { const fetchResponse = await fetch(`${baseUrl}/completion`, {
method: 'POST', method: 'POST',
headers: { 'Content-Type': 'application/json' }, headers: { 'Content-Type': 'application/json' },
timeout: 0,
body: JSON.stringify({ body: JSON.stringify({
prompt: `USER:[img-1]${String(request.body.prompt).trim()}\nASSISTANT:`, prompt: `USER:[img-1]${String(request.body.prompt).trim()}\nASSISTANT:`,
image_data: [{ data: request.body.image, id: 1 }], image_data: [{ data: request.body.image, id: 1 }],
@@ -493,6 +508,7 @@ llamacpp.post('/caption-image', jsonParser, async function (request, response) {
return response.status(500).send({ error: true }); return response.status(500).send({ error: true });
} }
/** @type {any} */
const data = await fetchResponse.json(); const data = await fetchResponse.json();
console.log('LlamaCpp caption response:', data); console.log('LlamaCpp caption response:', data);
@@ -522,7 +538,6 @@ llamacpp.post('/props', jsonParser, async function (request, response) {
const fetchResponse = await fetch(`${baseUrl}/props`, { const fetchResponse = await fetch(`${baseUrl}/props`, {
method: 'GET', method: 'GET',
timeout: 0,
}); });
if (!fetchResponse.ok) { if (!fetchResponse.ok) {
@@ -557,7 +572,6 @@ llamacpp.post('/slots', jsonParser, async function (request, response) {
if (request.body.action === 'info') { if (request.body.action === 'info') {
fetchResponse = await fetch(`${baseUrl}/slots`, { fetchResponse = await fetch(`${baseUrl}/slots`, {
method: 'GET', method: 'GET',
timeout: 0,
}); });
} else { } else {
if (!/^\d+$/.test(request.body.id_slot)) { if (!/^\d+$/.test(request.body.id_slot)) {
@@ -570,7 +584,6 @@ llamacpp.post('/slots', jsonParser, async function (request, response) {
fetchResponse = await fetch(`${baseUrl}/slots/${request.body.id_slot}?action=${request.body.action}`, { fetchResponse = await fetch(`${baseUrl}/slots/${request.body.id_slot}?action=${request.body.action}`, {
method: 'POST', method: 'POST',
headers: { 'Content-Type': 'application/json' }, headers: { 'Content-Type': 'application/json' },
timeout: 0,
body: JSON.stringify({ body: JSON.stringify({
filename: request.body.action !== 'erase' ? `${request.body.filename}` : undefined, filename: request.body.action !== 'erase' ? `${request.body.filename}` : undefined,
}), }),
@@ -614,6 +627,7 @@ tabby.post('/download', jsonParser, async function (request, response) {
}); });
if (permissionResponse.ok) { if (permissionResponse.ok) {
/** @type {any} */
const permissionJson = await permissionResponse.json(); const permissionJson = await permissionResponse.json();
if (permissionJson['permission'] !== 'admin') { if (permissionJson['permission'] !== 'admin') {
@@ -641,5 +655,3 @@ tabby.post('/download', jsonParser, async function (request, response) {
router.use('/ollama', ollama); router.use('/ollama', ollama);
router.use('/llamacpp', llamacpp); router.use('/llamacpp', llamacpp);
router.use('/tabby', tabby); router.use('/tabby', tabby);
module.exports = { router };

View File

@@ -1,13 +1,14 @@
const fs = require('fs'); import fs from 'node:fs';
const path = require('path'); import path from 'node:path';
const express = require('express');
const sanitize = require('sanitize-filename');
const { jsonParser, urlencodedParser } = require('../express-common'); import express from 'express';
const { invalidateThumbnail } = require('./thumbnails'); import sanitize from 'sanitize-filename';
const { getImages } = require('../util');
const router = express.Router(); import { jsonParser, urlencodedParser } from '../express-common.js';
import { invalidateThumbnail } from './thumbnails.js';
import { getImages } from '../util.js';
export const router = express.Router();
router.post('/all', jsonParser, function (request, response) { router.post('/all', jsonParser, function (request, response) {
var images = getImages(request.user.directories.backgrounds); var images = getImages(request.user.directories.backgrounds);
@@ -72,5 +73,3 @@ router.post('/upload', urlencodedParser, function (request, response) {
response.sendStatus(500); response.sendStatus(500);
} }
}); });
module.exports = { router };

View File

@@ -1,23 +1,23 @@
const express = require('express'); import express from 'express';
const { jsonParser } = require('../express-common'); import { jsonParser } from '../express-common.js';
import { getPipeline, getRawImage } from '../transformers.mjs';
const TASK = 'image-to-text'; const TASK = 'image-to-text';
const router = express.Router(); export const router = express.Router();
router.post('/', jsonParser, async (req, res) => { router.post('/', jsonParser, async (req, res) => {
try { try {
const { image } = req.body; const { image } = req.body;
const module = await import('../transformers.mjs'); const rawImage = await getRawImage(image);
const rawImage = await module.default.getRawImage(image);
if (!rawImage) { if (!rawImage) {
console.log('Failed to parse captioned image'); console.log('Failed to parse captioned image');
return res.sendStatus(400); return res.sendStatus(400);
} }
const pipe = await module.default.getPipeline(TASK); const pipe = await getPipeline(TASK);
const result = await pipe(rawImage); const result = await pipe(rawImage);
const text = result[0].generated_text; const text = result[0].generated_text;
console.log('Image caption:', text); console.log('Image caption:', text);
@@ -28,5 +28,3 @@ router.post('/', jsonParser, async (req, res) => {
return res.sendStatus(500); return res.sendStatus(500);
} }
}); });
module.exports = { router };

View File

@@ -1,24 +1,25 @@
const path = require('path'); import path from 'node:path';
const fs = require('fs'); import fs from 'node:fs';
const fsPromises = require('fs').promises; import { promises as fsPromises } from 'node:fs';
const readline = require('readline'); import readline from 'node:readline';
const express = require('express'); import { Buffer } from 'node:buffer';
const sanitize = require('sanitize-filename');
const writeFileAtomicSync = require('write-file-atomic').sync;
const yaml = require('yaml');
const _ = require('lodash');
const mime = require('mime-types');
const jimp = require('jimp'); import express from 'express';
import sanitize from 'sanitize-filename';
import { sync as writeFileAtomicSync } from 'write-file-atomic';
import yaml from 'yaml';
import _ from 'lodash';
import mime from 'mime-types';
import jimp from 'jimp';
const { AVATAR_WIDTH, AVATAR_HEIGHT } = require('../constants'); import { AVATAR_WIDTH, AVATAR_HEIGHT } from '../constants.js';
const { jsonParser, urlencodedParser } = require('../express-common'); import { jsonParser, urlencodedParser } from '../express-common.js';
const { deepMerge, humanizedISO8601DateTime, tryParse, extractFileFromZipBuffer } = require('../util'); import { deepMerge, humanizedISO8601DateTime, tryParse, extractFileFromZipBuffer } from '../util.js';
const { TavernCardValidator } = require('../validator/TavernCardValidator'); import { TavernCardValidator } from '../validator/TavernCardValidator.js';
const characterCardParser = require('../character-card-parser.js'); import { parse, write } from '../character-card-parser.js';
const { readWorldInfoFile } = require('./worldinfo'); import { readWorldInfoFile } from './worldinfo.js';
const { invalidateThumbnail } = require('./thumbnails'); import { invalidateThumbnail } from './thumbnails.js';
const { importRisuSprites } = require('./sprites'); import { importRisuSprites } from './sprites.js';
const defaultAvatarPath = './public/img/ai4.png'; const defaultAvatarPath = './public/img/ai4.png';
// KV-store for parsed character data // KV-store for parsed character data
@@ -37,7 +38,7 @@ async function readCharacterData(inputFile, inputFormat = 'png') {
return characterDataCache.get(cacheKey); return characterDataCache.get(cacheKey);
} }
const result = characterCardParser.parse(inputFile, inputFormat); const result = parse(inputFile, inputFormat);
characterDataCache.set(cacheKey, result); characterDataCache.set(cacheKey, result);
return result; return result;
} }
@@ -76,7 +77,7 @@ async function writeCharacterData(inputFile, data, outputFile, request, crop = u
const inputImage = await getInputImage(); const inputImage = await getInputImage();
// Get the chunks // Get the chunks
const outputImage = characterCardParser.write(inputImage, data); const outputImage = write(inputImage, data);
const outputImagePath = path.join(request.user.directories.characters, `${outputFile}.png`); const outputImagePath = path.join(request.user.directories.characters, `${outputFile}.png`);
writeFileAtomicSync(outputImagePath, outputImage); writeFileAtomicSync(outputImagePath, outputImage);
@@ -188,7 +189,7 @@ const calculateDataSize = (data) => {
* processCharacter - Process a given character, read its data and calculate its statistics. * processCharacter - Process a given character, read its data and calculate its statistics.
* *
* @param {string} item The name of the character. * @param {string} item The name of the character.
* @param {import('../users').UserDirectoryList} directories User directories * @param {import('../users.js').UserDirectoryList} directories User directories
* @return {Promise<object>} A Promise that resolves when the character processing is done. * @return {Promise<object>} A Promise that resolves when the character processing is done.
*/ */
const processCharacter = async (item, directories) => { const processCharacter = async (item, directories) => {
@@ -232,7 +233,7 @@ const processCharacter = async (item, directories) => {
/** /**
* Convert a character object to Spec V2 format. * Convert a character object to Spec V2 format.
* @param {object} jsonObject Character object * @param {object} jsonObject Character object
* @param {import('../users').UserDirectoryList} directories User directories * @param {import('../users.js').UserDirectoryList} directories User directories
* @param {boolean} hoistDate Will set the chat and create_date fields to the current date if they are missing * @param {boolean} hoistDate Will set the chat and create_date fields to the current date if they are missing
* @returns {object} Character object in Spec V2 format * @returns {object} Character object in Spec V2 format
*/ */
@@ -252,7 +253,7 @@ function getCharaCardV2(jsonObject, directories, hoistDate = true) {
/** /**
* Convert a character object to Spec V2 format. * Convert a character object to Spec V2 format.
* @param {object} char Character object * @param {object} char Character object
* @param {import('../users').UserDirectoryList} directories User directories * @param {import('../users.js').UserDirectoryList} directories User directories
* @returns {object} Character object in Spec V2 format * @returns {object} Character object in Spec V2 format
*/ */
function convertToV2(char, directories) { function convertToV2(char, directories) {
@@ -342,7 +343,7 @@ function readFromV2(char) {
/** /**
* Format character data to Spec V2 format. * Format character data to Spec V2 format.
* @param {object} data Character data * @param {object} data Character data
* @param {import('../users').UserDirectoryList} directories User directories * @param {import('../users.js').UserDirectoryList} directories User directories
* @returns * @returns
*/ */
function charaFormatData(data, directories) { function charaFormatData(data, directories) {
@@ -715,7 +716,7 @@ async function importFromPng(uploadPath, { request }, preservedFileName) {
return ''; return '';
} }
const router = express.Router(); export const router = express.Router();
router.post('/create', urlencodedParser, async function (request, response) { router.post('/create', urlencodedParser, async function (request, response) {
try { try {
@@ -1081,7 +1082,7 @@ router.post('/chats', jsonParser, async function (request, response) {
/** /**
* Gets the name for the uploaded PNG file. * Gets the name for the uploaded PNG file.
* @param {string} file File name * @param {string} file File name
* @param {import('../users').UserDirectoryList} directories User directories * @param {import('../users.js').UserDirectoryList} directories User directories
* @returns {string} - The name for the uploaded PNG file * @returns {string} - The name for the uploaded PNG file
*/ */
function getPngName(file, directories) { function getPngName(file, directories) {
@@ -1231,5 +1232,3 @@ router.post('/export', jsonParser, async function (request, response) {
response.sendStatus(500); response.sendStatus(500);
} }
}); });
module.exports = { router };

View File

@@ -1,13 +1,15 @@
const fs = require('fs'); import fs from 'node:fs';
const path = require('path'); import path from 'node:path';
const readline = require('readline'); import readline from 'node:readline';
const express = require('express'); import process from 'node:process';
const sanitize = require('sanitize-filename');
const writeFileAtomicSync = require('write-file-atomic').sync;
const _ = require('lodash');
const { jsonParser, urlencodedParser } = require('../express-common'); import express from 'express';
const { getConfigValue, humanizedISO8601DateTime, tryParse, generateTimestamp, removeOldBackups } = require('../util'); import sanitize from 'sanitize-filename';
import { sync as writeFileAtomicSync } from 'write-file-atomic';
import _ from 'lodash';
import { jsonParser, urlencodedParser } from '../express-common.js';
import { getConfigValue, humanizedISO8601DateTime, tryParse, generateTimestamp, removeOldBackups } from '../util.js';
/** /**
* Saves a chat to the backups directory. * Saves a chat to the backups directory.
@@ -188,7 +190,7 @@ function flattenChubChat(userName, characterName, lines) {
return (lines ?? []).map(convert).join('\n'); return (lines ?? []).map(convert).join('\n');
} }
const router = express.Router(); export const router = express.Router();
router.post('/save', jsonParser, function (request, response) { router.post('/save', jsonParser, function (request, response) {
try { try {
@@ -514,5 +516,3 @@ router.post('/group/save', jsonParser, (request, response) => {
getBackupFunction(request.user.profile.handle)(request.user.directories.backups, String(id), jsonlData); getBackupFunction(request.user.profile.handle)(request.user.directories.backups, String(id), jsonlData);
return response.send({ ok: true }); return response.send({ ok: true });
}); });
module.exports = { router };

View File

@@ -1,9 +1,11 @@
const express = require('express'); import express from 'express';
const { jsonParser } = require('../express-common');
import { getPipeline } from '../transformers.mjs';
import { jsonParser } from '../express-common.js';
const TASK = 'text-classification'; const TASK = 'text-classification';
const router = express.Router(); export const router = express.Router();
/** /**
* @type {Map<string, object>} Cache for classification results * @type {Map<string, object>} Cache for classification results
@@ -12,8 +14,7 @@ const cacheObject = new Map();
router.post('/labels', jsonParser, async (req, res) => { router.post('/labels', jsonParser, async (req, res) => {
try { try {
const module = await import('../transformers.mjs'); const pipe = await getPipeline(TASK);
const pipe = await module.default.getPipeline(TASK);
const result = Object.keys(pipe.model.config.label2id); const result = Object.keys(pipe.model.config.label2id);
return res.json({ labels: result }); return res.json({ labels: result });
} catch (error) { } catch (error) {
@@ -35,8 +36,7 @@ router.post('/', jsonParser, async (req, res) => {
if (cacheObject.has(text)) { if (cacheObject.has(text)) {
return cacheObject.get(text); return cacheObject.get(text);
} else { } else {
const module = await import('../transformers.mjs'); const pipe = await getPipeline(TASK);
const pipe = await module.default.getPipeline(TASK);
const result = await pipe(text, { topk: 5 }); const result = await pipe(text, { topk: 5 });
result.sort((a, b) => b.score - a.score); result.sort((a, b) => b.score - a.score);
cacheObject.set(text, result); cacheObject.set(text, result);
@@ -54,5 +54,3 @@ router.post('/', jsonParser, async (req, res) => {
return res.sendStatus(500); return res.sendStatus(500);
} }
}); });
module.exports = { router };

View File

@@ -1,16 +1,21 @@
const fs = require('fs'); import fs from 'node:fs';
const path = require('path'); import path from 'node:path';
const express = require('express'); import process from 'node:process';
const fetch = require('node-fetch').default; import { Buffer } from 'node:buffer';
const sanitize = require('sanitize-filename');
const { getConfigValue, color } = require('../util'); import express from 'express';
const { jsonParser } = require('../express-common'); import fetch from 'node-fetch';
const writeFileAtomicSync = require('write-file-atomic').sync; import sanitize from 'sanitize-filename';
import { sync as writeFileAtomicSync } from 'write-file-atomic';
import { getConfigValue, color } from '../util.js';
import { jsonParser } from '../express-common.js';
import { write } from '../character-card-parser.js';
const contentDirectory = path.join(process.cwd(), 'default/content'); const contentDirectory = path.join(process.cwd(), 'default/content');
const scaffoldDirectory = path.join(process.cwd(), 'default/scaffold'); const scaffoldDirectory = path.join(process.cwd(), 'default/scaffold');
const contentIndexPath = path.join(contentDirectory, 'index.json'); const contentIndexPath = path.join(contentDirectory, 'index.json');
const scaffoldIndexPath = path.join(scaffoldDirectory, 'index.json'); const scaffoldIndexPath = path.join(scaffoldDirectory, 'index.json');
const characterCardParser = require('../character-card-parser.js');
const WHITELIST_GENERIC_URL_DOWNLOAD_SOURCES = getConfigValue('whitelistImportDomains', []); const WHITELIST_GENERIC_URL_DOWNLOAD_SOURCES = getConfigValue('whitelistImportDomains', []);
@@ -26,7 +31,7 @@ const WHITELIST_GENERIC_URL_DOWNLOAD_SOURCES = getConfigValue('whitelistImportDo
* @typedef {string} ContentType * @typedef {string} ContentType
* @enum {string} * @enum {string}
*/ */
const CONTENT_TYPES = { export const CONTENT_TYPES = {
SETTINGS: 'settings', SETTINGS: 'settings',
CHARACTER: 'character', CHARACTER: 'character',
SPRITES: 'sprites', SPRITES: 'sprites',
@@ -48,10 +53,10 @@ const CONTENT_TYPES = {
/** /**
* Gets the default presets from the content directory. * Gets the default presets from the content directory.
* @param {import('../users').UserDirectoryList} directories User directories * @param {import('../users.js').UserDirectoryList} directories User directories
* @returns {object[]} Array of default presets * @returns {object[]} Array of default presets
*/ */
function getDefaultPresets(directories) { export function getDefaultPresets(directories) {
try { try {
const contentIndex = getContentIndex(); const contentIndex = getContentIndex();
const presets = []; const presets = [];
@@ -76,7 +81,7 @@ function getDefaultPresets(directories) {
* @param {string} filename Name of the file to get * @param {string} filename Name of the file to get
* @returns {object | null} JSON object or null if the file doesn't exist * @returns {object | null} JSON object or null if the file doesn't exist
*/ */
function getDefaultPresetFile(filename) { export function getDefaultPresetFile(filename) {
try { try {
const contentPath = path.join(contentDirectory, filename); const contentPath = path.join(contentDirectory, filename);
@@ -95,7 +100,7 @@ function getDefaultPresetFile(filename) {
/** /**
* Seeds content for a user. * Seeds content for a user.
* @param {ContentItem[]} contentIndex Content index * @param {ContentItem[]} contentIndex Content index
* @param {import('../users').UserDirectoryList} directories User directories * @param {import('../users.js').UserDirectoryList} directories User directories
* @param {string[]} forceCategories List of categories to force check (even if content check is skipped) * @param {string[]} forceCategories List of categories to force check (even if content check is skipped)
* @returns {Promise<boolean>} Whether any content was added * @returns {Promise<boolean>} Whether any content was added
*/ */
@@ -154,11 +159,11 @@ async function seedContentForUser(contentIndex, directories, forceCategories) {
/** /**
* Checks for new content and seeds it for all users. * Checks for new content and seeds it for all users.
* @param {import('../users').UserDirectoryList[]} directoriesList List of user directories * @param {import('../users.js').UserDirectoryList[]} directoriesList List of user directories
* @param {string[]} forceCategories List of categories to force check (even if content check is skipped) * @param {string[]} forceCategories List of categories to force check (even if content check is skipped)
* @returns {Promise<void>} * @returns {Promise<void>}
*/ */
async function checkForNewContent(directoriesList, forceCategories = []) { export async function checkForNewContent(directoriesList, forceCategories = []) {
try { try {
const contentCheckSkip = getConfigValue('skipContentCheck', false); const contentCheckSkip = getConfigValue('skipContentCheck', false);
if (contentCheckSkip && forceCategories?.length === 0) { if (contentCheckSkip && forceCategories?.length === 0) {
@@ -224,7 +229,7 @@ function getContentIndex() {
* @param {'json'|'string'|'raw'} format Format of content * @param {'json'|'string'|'raw'} format Format of content
* @returns {string[]|Buffer[]} Array of content * @returns {string[]|Buffer[]} Array of content
*/ */
function getContentOfType(type, format) { export function getContentOfType(type, format) {
const contentIndex = getContentIndex(); const contentIndex = getContentIndex();
const indexItems = contentIndex.filter((item) => item.type === type && item.folder); const indexItems = contentIndex.filter((item) => item.type === type && item.folder);
const files = []; const files = [];
@@ -256,7 +261,7 @@ function getContentOfType(type, format) {
/** /**
* Gets the target directory for the specified asset type. * Gets the target directory for the specified asset type.
* @param {ContentType} type Asset type * @param {ContentType} type Asset type
* @param {import('../users').UserDirectoryList} directories User directories * @param {import('../users.js').UserDirectoryList} directories User directories
* @returns {string | null} Target directory * @returns {string | null} Target directory
*/ */
function getTargetByType(type, directories) { function getTargetByType(type, directories) {
@@ -375,6 +380,7 @@ async function downloadPygmalionCharacter(id) {
throw new Error('Failed to download character'); throw new Error('Failed to download character');
} }
/** @type {any} */
const jsonData = await result.json(); const jsonData = await result.json();
const characterData = jsonData?.character; const characterData = jsonData?.character;
@@ -394,7 +400,7 @@ async function downloadPygmalionCharacter(id) {
const avatarResult = await fetch(avatarUrl); const avatarResult = await fetch(avatarUrl);
const avatarBuffer = await avatarResult.buffer(); const avatarBuffer = await avatarResult.buffer();
const cardBuffer = characterCardParser.write(avatarBuffer, JSON.stringify(characterData)); const cardBuffer = write(avatarBuffer, JSON.stringify(characterData));
return { return {
buffer: cardBuffer, buffer: cardBuffer,
@@ -467,6 +473,7 @@ async function downloadJannyCharacter(uuid) {
}); });
if (result.ok) { if (result.ok) {
/** @type {any} */
const downloadResult = await result.json(); const downloadResult = await result.json();
if (downloadResult.status === 'ok') { if (downloadResult.status === 'ok') {
const imageResult = await fetch(downloadResult.downloadUrl); const imageResult = await fetch(downloadResult.downloadUrl);
@@ -618,7 +625,7 @@ function isHostWhitelisted(host) {
return WHITELIST_GENERIC_URL_DOWNLOAD_SOURCES.includes(host); return WHITELIST_GENERIC_URL_DOWNLOAD_SOURCES.includes(host);
} }
const router = express.Router(); export const router = express.Router();
router.post('/importURL', jsonParser, async (request, response) => { router.post('/importURL', jsonParser, async (request, response) => {
if (!request.body.url) { if (!request.body.url) {
@@ -753,12 +760,3 @@ router.post('/importUUID', jsonParser, async (request, response) => {
return response.sendStatus(500); return response.sendStatus(500);
} }
}); });
module.exports = {
CONTENT_TYPES,
checkForNewContent,
getDefaultPresets,
getDefaultPresetFile,
getContentOfType,
router,
};

View File

@@ -1,10 +1,12 @@
const path = require('path'); import path from 'node:path';
const fs = require('fs'); import fs from 'node:fs';
const express = require('express');
const { default: simpleGit } = require('simple-git'); import express from 'express';
const sanitize = require('sanitize-filename'); import sanitize from 'sanitize-filename';
const { PUBLIC_DIRECTORIES } = require('../constants'); import { default as simpleGit } from 'simple-git';
const { jsonParser } = require('../express-common');
import { PUBLIC_DIRECTORIES } from '../constants.js';
import { jsonParser } from '../express-common.js';
/** /**
* This function extracts the extension information from the manifest file. * This function extracts the extension information from the manifest file.
@@ -47,7 +49,7 @@ async function checkIfRepoIsUpToDate(extensionPath) {
}; };
} }
const router = express.Router(); export const router = express.Router();
/** /**
* HTTP POST handler function to clone a git repository from a provided URL, read the extension manifest, * HTTP POST handler function to clone a git repository from a provided URL, read the extension manifest,
@@ -240,5 +242,3 @@ router.get('/discover', jsonParser, function (request, response) {
return response.send(extensions); return response.send(extensions);
}); });
module.exports = { router };

View File

@@ -1,12 +1,15 @@
const path = require('path'); import path from 'node:path';
const fs = require('fs'); import fs from 'node:fs';
const writeFileSyncAtomic = require('write-file-atomic').sync;
const express = require('express'); import express from 'express';
const sanitize = require('sanitize-filename'); import sanitize from 'sanitize-filename';
const router = express.Router(); import { sync as writeFileSyncAtomic } from 'write-file-atomic';
const { validateAssetFileName } = require('./assets');
const { jsonParser } = require('../express-common'); import { validateAssetFileName } from './assets.js';
const { clientRelativePath } = require('../util'); import { jsonParser } from '../express-common.js';
import { clientRelativePath } from '../util.js';
export const router = express.Router();
router.post('/sanitize-filename', jsonParser, async (request, response) => { router.post('/sanitize-filename', jsonParser, async (request, response) => {
try { try {
@@ -97,5 +100,3 @@ router.post('/verify', jsonParser, async (request, response) => {
return response.sendStatus(500); return response.sendStatus(500);
} }
}); });
module.exports = { router };

View File

@@ -1,12 +1,13 @@
const { readSecret, SECRET_KEYS } = require('./secrets'); import fetch from 'node-fetch';
const fetch = require('node-fetch').default; import express from 'express';
const express = require('express');
const { jsonParser } = require('../express-common'); import { readSecret, SECRET_KEYS } from './secrets.js';
const { GEMINI_SAFETY } = require('../constants'); import { jsonParser } from '../express-common.js';
import { GEMINI_SAFETY } from '../constants.js';
const API_MAKERSUITE = 'https://generativelanguage.googleapis.com'; const API_MAKERSUITE = 'https://generativelanguage.googleapis.com';
const router = express.Router(); export const router = express.Router();
router.post('/caption-image', jsonParser, async (request, response) => { router.post('/caption-image', jsonParser, async (request, response) => {
try { try {
@@ -39,7 +40,6 @@ router.post('/caption-image', jsonParser, async (request, response) => {
headers: { headers: {
'Content-Type': 'application/json', 'Content-Type': 'application/json',
}, },
timeout: 0,
}); });
if (!result.ok) { if (!result.ok) {
@@ -48,6 +48,7 @@ router.post('/caption-image', jsonParser, async (request, response) => {
return response.status(result.status).send({ error: true }); return response.status(result.status).send({ error: true });
} }
/** @type {any} */
const data = await result.json(); const data = await result.json();
console.log('Multimodal captioning response', data); console.log('Multimodal captioning response', data);
@@ -67,5 +68,3 @@ router.post('/caption-image', jsonParser, async (request, response) => {
response.status(500).send('Internal server error'); response.status(500).send('Internal server error');
} }
}); });
module.exports = { router };

View File

@@ -1,13 +1,14 @@
const fs = require('fs'); import fs from 'node:fs';
const path = require('path'); import path from 'node:path';
const express = require('express');
const sanitize = require('sanitize-filename');
const writeFileAtomicSync = require('write-file-atomic').sync;
const { jsonParser } = require('../express-common'); import express from 'express';
const { humanizedISO8601DateTime } = require('../util'); import sanitize from 'sanitize-filename';
import { sync as writeFileAtomicSync } from 'write-file-atomic';
const router = express.Router(); import { jsonParser } from '../express-common.js';
import { humanizedISO8601DateTime } from '../util.js';
export const router = express.Router();
router.post('/all', jsonParser, (request, response) => { router.post('/all', jsonParser, (request, response) => {
const groups = []; const groups = [];
@@ -131,5 +132,3 @@ router.post('/delete', jsonParser, async (request, response) => {
return response.send({ ok: true }); return response.send({ ok: true });
}); });
module.exports = { router };

View File

@@ -1,14 +1,14 @@
const fetch = require('node-fetch').default; import fetch from 'node-fetch';
const express = require('express'); import express from 'express';
const { AIHorde, ModelGenerationInputStableSamplers, ModelInterrogationFormTypes, HordeAsyncRequestStates } = require('@zeldafan0225/ai_horde'); import { AIHorde, ModelGenerationInputStableSamplers, ModelInterrogationFormTypes, HordeAsyncRequestStates } from '@zeldafan0225/ai_horde';
const { getVersion, delay, Cache } = require('../util'); import { getVersion, delay, Cache } from '../util.js';
const { readSecret, SECRET_KEYS } = require('./secrets'); import { readSecret, SECRET_KEYS } from './secrets.js';
const { jsonParser } = require('../express-common'); import { jsonParser } from '../express-common.js';
const ANONYMOUS_KEY = '0000000000'; const ANONYMOUS_KEY = '0000000000';
const HORDE_TEXT_MODEL_METADATA_URL = 'https://raw.githubusercontent.com/db0/AI-Horde-text-model-reference/main/db.json'; const HORDE_TEXT_MODEL_METADATA_URL = 'https://raw.githubusercontent.com/db0/AI-Horde-text-model-reference/main/db.json';
const cache = new Cache(60 * 1000); const cache = new Cache(60 * 1000);
const router = express.Router(); export const router = express.Router();
/** /**
* Returns the AIHorde client agent. * Returns the AIHorde client agent.
@@ -403,5 +403,3 @@ router.post('/generate-image', jsonParser, async (request, response) => {
return response.sendStatus(500); return response.sendStatus(500);
} }
}); });
module.exports = { router };

View File

@@ -1,10 +1,12 @@
const fs = require('fs'); import fs from 'node:fs';
const path = require('path'); import path from 'node:path';
const express = require('express'); import { Buffer } from 'node:buffer';
const sanitize = require('sanitize-filename');
const { jsonParser } = require('../express-common'); import express from 'express';
const { clientRelativePath, removeFileExtension, getImages } = require('../util'); import sanitize from 'sanitize-filename';
import { jsonParser } from '../express-common.js';
import { clientRelativePath, removeFileExtension, getImages } from '../util.js';
/** /**
* Ensure the directory for the provided file path exists. * Ensure the directory for the provided file path exists.
@@ -21,7 +23,7 @@ function ensureDirectoryExistence(filePath) {
fs.mkdirSync(dirname); fs.mkdirSync(dirname);
} }
const router = express.Router(); export const router = express.Router();
/** /**
* Endpoint to handle image uploads. * Endpoint to handle image uploads.
@@ -66,7 +68,7 @@ router.post('/upload', jsonParser, async (request, response) => {
ensureDirectoryExistence(pathToNewFile); ensureDirectoryExistence(pathToNewFile);
const imageBuffer = Buffer.from(base64Data, 'base64'); const imageBuffer = Buffer.from(base64Data, 'base64');
await fs.promises.writeFile(pathToNewFile, imageBuffer); await fs.promises.writeFile(pathToNewFile, new Uint8Array(imageBuffer));
response.send({ path: clientRelativePath(request.user.directories.root, pathToNewFile) }); response.send({ path: clientRelativePath(request.user.directories.root, pathToNewFile) });
} catch (error) { } catch (error) {
console.log(error); console.log(error);
@@ -89,5 +91,3 @@ router.post('/list/:folder', (request, response) => {
return response.status(500).send({ error: 'Unable to retrieve files' }); return response.status(500).send({ error: 'Unable to retrieve files' });
} }
}); });
module.exports = { router };

View File

@@ -1,11 +1,11 @@
const path = require('path'); import path from 'node:path';
const express = require('express'); import express from 'express';
const sanitize = require('sanitize-filename'); import sanitize from 'sanitize-filename';
const writeFileAtomicSync = require('write-file-atomic').sync; import { sync as writeFileAtomicSync } from 'write-file-atomic';
const { jsonParser } = require('../express-common'); import { jsonParser } from '../express-common.js';
const router = express.Router(); export const router = express.Router();
router.post('/save', jsonParser, (request, response) => { router.post('/save', jsonParser, (request, response) => {
if (!request.body || !request.body.name) { if (!request.body || !request.body.name) {
@@ -17,5 +17,3 @@ router.post('/save', jsonParser, (request, response) => {
return response.sendStatus(200); return response.sendStatus(200);
}); });
module.exports = { router };

View File

@@ -1,9 +1,12 @@
const fetch = require('node-fetch').default; import util from 'node:util';
const express = require('express'); import { Buffer } from 'node:buffer';
const util = require('util');
const { readSecret, SECRET_KEYS } = require('./secrets'); import fetch from 'node-fetch';
const { readAllChunks, extractFileFromZipBuffer, forwardFetchResponse } = require('../util'); import express from 'express';
const { jsonParser } = require('../express-common');
import { readSecret, SECRET_KEYS } from './secrets.js';
import { readAllChunks, extractFileFromZipBuffer, forwardFetchResponse } from '../util.js';
import { jsonParser } from '../express-common.js';
const API_NOVELAI = 'https://api.novelai.net'; const API_NOVELAI = 'https://api.novelai.net';
const TEXT_NOVELAI = 'https://text.novelai.net'; const TEXT_NOVELAI = 'https://text.novelai.net';
@@ -110,7 +113,7 @@ function getRepPenaltyWhitelist(model) {
return null; return null;
} }
const router = express.Router(); export const router = express.Router();
router.post('/status', jsonParser, async function (req, res) { router.post('/status', jsonParser, async function (req, res) {
if (!req.body) return res.sendStatus(400); if (!req.body) return res.sendStatus(400);
@@ -249,7 +252,7 @@ router.post('/generate', jsonParser, async function (req, res) {
try { try {
const baseURL = (req.body.model.includes('kayra') || req.body.model.includes('erato')) ? TEXT_NOVELAI : API_NOVELAI; const baseURL = (req.body.model.includes('kayra') || req.body.model.includes('erato')) ? TEXT_NOVELAI : API_NOVELAI;
const url = req.body.streaming ? `${baseURL}/ai/generate-stream` : `${baseURL}/ai/generate`; const url = req.body.streaming ? `${baseURL}/ai/generate-stream` : `${baseURL}/ai/generate`;
const response = await fetch(url, { method: 'POST', timeout: 0, ...args }); const response = await fetch(url, { method: 'POST', ...args });
if (req.body.streaming) { if (req.body.streaming) {
// Pipe remote SSE stream to Express response // Pipe remote SSE stream to Express response
@@ -271,6 +274,7 @@ router.post('/generate', jsonParser, async function (req, res) {
return res.status(response.status).send({ error: { message } }); return res.status(response.status).send({ error: { message } });
} }
/** @type {any} */
const data = await response.json(); const data = await response.json();
console.log('NovelAI Output', data?.output); console.log('NovelAI Output', data?.output);
return res.send(data); return res.send(data);
@@ -413,7 +417,6 @@ router.post('/generate-voice', jsonParser, async (request, response) => {
'Authorization': `Bearer ${token}`, 'Authorization': `Bearer ${token}`,
'Accept': 'audio/mpeg', 'Accept': 'audio/mpeg',
}, },
timeout: 0,
}); });
if (!result.ok) { if (!result.ok) {
@@ -423,7 +426,7 @@ router.post('/generate-voice', jsonParser, async (request, response) => {
} }
const chunks = await readAllChunks(result.body); const chunks = await readAllChunks(result.body);
const buffer = Buffer.concat(chunks); const buffer = Buffer.concat(chunks.map(chunk => new Uint8Array(chunk)));
response.setHeader('Content-Type', 'audio/mpeg'); response.setHeader('Content-Type', 'audio/mpeg');
return response.send(buffer); return response.send(buffer);
} }
@@ -432,5 +435,3 @@ router.post('/generate-voice', jsonParser, async (request, response) => {
return response.sendStatus(500); return response.sendStatus(500);
} }
}); });
module.exports = { router };

View File

@@ -1,14 +1,17 @@
const { readSecret, SECRET_KEYS } = require('./secrets'); import fs from 'node:fs';
const fetch = require('node-fetch').default; import { Buffer } from 'node:buffer';
const express = require('express');
const FormData = require('form-data');
const fs = require('fs');
const { jsonParser, urlencodedParser } = require('../express-common');
const { getConfigValue, mergeObjectWithYaml, excludeKeysByYaml, trimV1 } = require('../util');
const { setAdditionalHeaders } = require('../additional-headers');
const { OPENROUTER_HEADERS } = require('../constants');
const router = express.Router(); import fetch from 'node-fetch';
import FormData from 'form-data';
import express from 'express';
import { jsonParser, urlencodedParser } from '../express-common.js';
import { getConfigValue, mergeObjectWithYaml, excludeKeysByYaml, trimV1 } from '../util.js';
import { setAdditionalHeaders } from '../additional-headers.js';
import { readSecret, SECRET_KEYS } from './secrets.js';
import { OPENROUTER_HEADERS } from '../constants.js';
export const router = express.Router();
router.post('/caption-image', jsonParser, async (request, response) => { router.post('/caption-image', jsonParser, async (request, response) => {
try { try {
@@ -151,7 +154,6 @@ router.post('/caption-image', jsonParser, async (request, response) => {
...headers, ...headers,
}, },
body: JSON.stringify(body), body: JSON.stringify(body),
timeout: 0,
}); });
if (!result.ok) { if (!result.ok) {
@@ -160,6 +162,7 @@ router.post('/caption-image', jsonParser, async (request, response) => {
return response.status(500).send(text); return response.status(500).send(text);
} }
/** @type {any} */
const data = await result.json(); const data = await result.json();
console.log('Multimodal captioning response', data); console.log('Multimodal captioning response', data);
const caption = data?.choices[0]?.message?.content; const caption = data?.choices[0]?.message?.content;
@@ -281,7 +284,6 @@ router.post('/generate-image', jsonParser, async (request, response) => {
Authorization: `Bearer ${key}`, Authorization: `Bearer ${key}`,
}, },
body: JSON.stringify(request.body), body: JSON.stringify(request.body),
timeout: 0,
}); });
if (!result.ok) { if (!result.ok) {
@@ -341,5 +343,3 @@ custom.post('/generate-voice', jsonParser, async (request, response) => {
}); });
router.use('/custom', custom); router.use('/custom', custom);
module.exports = { router };

View File

@@ -1,7 +1,7 @@
const express = require('express'); import express from 'express';
const { jsonParser } = require('../express-common'); import { jsonParser } from '../express-common.js';
const router = express.Router(); export const router = express.Router();
const API_OPENROUTER = 'https://openrouter.ai/api/v1'; const API_OPENROUTER = 'https://openrouter.ai/api/v1';
router.post('/models/multimodal', jsonParser, async (_req, res) => { router.post('/models/multimodal', jsonParser, async (_req, res) => {
@@ -28,5 +28,3 @@ router.post('/models/multimodal', jsonParser, async (_req, res) => {
return res.sendStatus(500); return res.sendStatus(500);
} }
}); });
module.exports = { router };

View File

@@ -1,15 +1,17 @@
const fs = require('fs'); import fs from 'node:fs';
const path = require('path'); import path from 'node:path';
const express = require('express');
const sanitize = require('sanitize-filename'); import express from 'express';
const writeFileAtomicSync = require('write-file-atomic').sync; import sanitize from 'sanitize-filename';
const { getDefaultPresetFile, getDefaultPresets } = require('./content-manager'); import { sync as writeFileAtomicSync } from 'write-file-atomic';
const { jsonParser } = require('../express-common');
import { getDefaultPresetFile, getDefaultPresets } from './content-manager.js';
import { jsonParser } from '../express-common.js';
/** /**
* Gets the folder and extension for the preset settings based on the API source ID. * Gets the folder and extension for the preset settings based on the API source ID.
* @param {string} apiId API source ID * @param {string} apiId API source ID
* @param {import('../users').UserDirectoryList} directories User directories * @param {import('../users.js').UserDirectoryList} directories User directories
* @returns {object} Object containing the folder and extension for the preset settings * @returns {object} Object containing the folder and extension for the preset settings
*/ */
function getPresetSettingsByAPI(apiId, directories) { function getPresetSettingsByAPI(apiId, directories) {
@@ -34,7 +36,7 @@ function getPresetSettingsByAPI(apiId, directories) {
} }
} }
const router = express.Router(); export const router = express.Router();
router.post('/save', jsonParser, function (request, response) { router.post('/save', jsonParser, function (request, response) {
const name = sanitize(request.body.name); const name = sanitize(request.body.name);
@@ -127,5 +129,3 @@ router.post('/delete-openai', jsonParser, function (request, response) {
return response.send({ error: true }); return response.send({ error: true });
}); });
module.exports = { router };

View File

@@ -1,12 +1,13 @@
const fs = require('fs'); import fs from 'node:fs';
const path = require('path'); import path from 'node:path';
const express = require('express');
const sanitize = require('sanitize-filename');
const writeFileAtomicSync = require('write-file-atomic').sync;
const { jsonParser } = require('../express-common'); import express from 'express';
import sanitize from 'sanitize-filename';
import { sync as writeFileAtomicSync } from 'write-file-atomic';
const router = express.Router(); import { jsonParser } from '../express-common.js';
export const router = express.Router();
router.post('/save', jsonParser, (request, response) => { router.post('/save', jsonParser, (request, response) => {
if (!request.body || !request.body.name) { if (!request.body || !request.body.name) {
@@ -31,5 +32,3 @@ router.post('/delete', jsonParser, (request, response) => {
return response.sendStatus(200); return response.sendStatus(200);
}); });
module.exports = { router };

View File

@@ -1,9 +1,11 @@
const fetch = require('node-fetch').default; import fetch from 'node-fetch';
const express = require('express'); import express from 'express';
const { readSecret, SECRET_KEYS } = require('./secrets');
const { jsonParser } = require('../express-common');
const router = express.Router(); import { decode } from 'html-entities';
import { readSecret, SECRET_KEYS } from './secrets.js';
import { jsonParser } from '../express-common.js';
export const router = express.Router();
// Cosplay as Chrome // Cosplay as Chrome
const visitHeaders = { const visitHeaders = {
@@ -29,7 +31,6 @@ const visitHeaders = {
* @returns {Promise<string>} Transcript text * @returns {Promise<string>} Transcript text
*/ */
async function extractTranscript(videoPageBody, lang) { async function extractTranscript(videoPageBody, lang) {
const he = require('he');
const RE_XML_TRANSCRIPT = /<text start="([^"]*)" dur="([^"]*)">([^<]*)<\/text>/g; const RE_XML_TRANSCRIPT = /<text start="([^"]*)" dur="([^"]*)">([^<]*)<\/text>/g;
const splittedHTML = videoPageBody.split('"captions":'); const splittedHTML = videoPageBody.split('"captions":');
@@ -84,7 +85,7 @@ async function extractTranscript(videoPageBody, lang) {
lang: lang ?? captions.captionTracks[0].languageCode, lang: lang ?? captions.captionTracks[0].languageCode,
})); }));
// The text is double-encoded // The text is double-encoded
const transcriptText = transcript.map((line) => he.decode(he.decode(line.text))).join(' '); const transcriptText = transcript.map((line) => decode(decode(line.text))).join(' ');
return transcriptText; return transcriptText;
} }
@@ -263,5 +264,3 @@ router.post('/visit', jsonParser, async (request, response) => {
return response.sendStatus(500); return response.sendStatus(500);
} }
}); });
module.exports = { router };

View File

@@ -1,12 +1,13 @@
const fs = require('fs'); import fs from 'node:fs';
const path = require('path'); import path from 'node:path';
const express = require('express');
const { getConfigValue } = require('../util');
const writeFileAtomicSync = require('write-file-atomic').sync;
const { jsonParser } = require('../express-common');
const SECRETS_FILE = 'secrets.json'; import express from 'express';
const SECRET_KEYS = { import { sync as writeFileAtomicSync } from 'write-file-atomic';
import { getConfigValue } from '../util.js';
import { jsonParser } from '../express-common.js';
export const SECRETS_FILE = 'secrets.json';
export const SECRET_KEYS = {
HORDE: 'api_key_horde', HORDE: 'api_key_horde',
MANCER: 'api_key_mancer', MANCER: 'api_key_mancer',
VLLM: 'api_key_vllm', VLLM: 'api_key_vllm',
@@ -58,11 +59,11 @@ const EXPORTABLE_KEYS = [
/** /**
* Writes a secret to the secrets file * Writes a secret to the secrets file
* @param {import('../users').UserDirectoryList} directories User directories * @param {import('../users.js').UserDirectoryList} directories User directories
* @param {string} key Secret key * @param {string} key Secret key
* @param {string} value Secret value * @param {string} value Secret value
*/ */
function writeSecret(directories, key, value) { export function writeSecret(directories, key, value) {
const filePath = path.join(directories.root, SECRETS_FILE); const filePath = path.join(directories.root, SECRETS_FILE);
if (!fs.existsSync(filePath)) { if (!fs.existsSync(filePath)) {
@@ -78,11 +79,11 @@ function writeSecret(directories, key, value) {
/** /**
* Deletes a secret from the secrets file * Deletes a secret from the secrets file
* @param {import('../users').UserDirectoryList} directories User directories * @param {import('../users.js').UserDirectoryList} directories User directories
* @param {string} key Secret key * @param {string} key Secret key
* @returns * @returns
*/ */
function deleteSecret(directories, key) { export function deleteSecret(directories, key) {
const filePath = path.join(directories.root, SECRETS_FILE); const filePath = path.join(directories.root, SECRETS_FILE);
if (!fs.existsSync(filePath)) { if (!fs.existsSync(filePath)) {
@@ -97,11 +98,11 @@ function deleteSecret(directories, key) {
/** /**
* Reads a secret from the secrets file * Reads a secret from the secrets file
* @param {import('../users').UserDirectoryList} directories User directories * @param {import('../users.js').UserDirectoryList} directories User directories
* @param {string} key Secret key * @param {string} key Secret key
* @returns {string} Secret value * @returns {string} Secret value
*/ */
function readSecret(directories, key) { export function readSecret(directories, key) {
const filePath = path.join(directories.root, SECRETS_FILE); const filePath = path.join(directories.root, SECRETS_FILE);
if (!fs.existsSync(filePath)) { if (!fs.existsSync(filePath)) {
@@ -115,10 +116,10 @@ function readSecret(directories, key) {
/** /**
* Reads the secret state from the secrets file * Reads the secret state from the secrets file
* @param {import('../users').UserDirectoryList} directories User directories * @param {import('../users.js').UserDirectoryList} directories User directories
* @returns {object} Secret state * @returns {object} Secret state
*/ */
function readSecretState(directories) { export function readSecretState(directories) {
const filePath = path.join(directories.root, SECRETS_FILE); const filePath = path.join(directories.root, SECRETS_FILE);
if (!fs.existsSync(filePath)) { if (!fs.existsSync(filePath)) {
@@ -138,10 +139,10 @@ function readSecretState(directories) {
/** /**
* Reads all secrets from the secrets file * Reads all secrets from the secrets file
* @param {import('../users').UserDirectoryList} directories User directories * @param {import('../users.js').UserDirectoryList} directories User directories
* @returns {Record<string, string> | undefined} Secrets * @returns {Record<string, string> | undefined} Secrets
*/ */
function getAllSecrets(directories) { export function getAllSecrets(directories) {
const filePath = path.join(directories.root, SECRETS_FILE); const filePath = path.join(directories.root, SECRETS_FILE);
if (!fs.existsSync(filePath)) { if (!fs.existsSync(filePath)) {
@@ -154,7 +155,7 @@ function getAllSecrets(directories) {
return secrets; return secrets;
} }
const router = express.Router(); export const router = express.Router();
router.post('/write', jsonParser, (request, response) => { router.post('/write', jsonParser, (request, response) => {
const key = request.body.key; const key = request.body.key;
@@ -218,13 +219,3 @@ router.post('/find', jsonParser, (request, response) => {
return response.sendStatus(500); return response.sendStatus(500);
} }
}); });
module.exports = {
writeSecret,
readSecret,
deleteSecret,
readSecretState,
getAllSecrets,
SECRET_KEYS,
router,
};

View File

@@ -1,12 +1,14 @@
const fs = require('fs'); import fs from 'node:fs';
const path = require('path'); import path from 'node:path';
const express = require('express');
const _ = require('lodash'); import express from 'express';
const writeFileAtomicSync = require('write-file-atomic').sync; import _ from 'lodash';
const { SETTINGS_FILE } = require('../constants'); import { sync as writeFileAtomicSync } from 'write-file-atomic';
const { getConfigValue, generateTimestamp, removeOldBackups } = require('../util');
const { jsonParser } = require('../express-common'); import { SETTINGS_FILE } from '../constants.js';
const { getAllUserHandles, getUserDirectories } = require('../users'); import { getConfigValue, generateTimestamp, removeOldBackups } from '../util.js';
import { jsonParser } from '../express-common.js';
import { getAllUserHandles, getUserDirectories } from '../users.js';
const ENABLE_EXTENSIONS = getConfigValue('enableExtensions', true); const ENABLE_EXTENSIONS = getConfigValue('enableExtensions', true);
const ENABLE_EXTENSIONS_AUTO_UPDATE = getConfigValue('enableExtensionsAutoUpdate', true); const ENABLE_EXTENSIONS_AUTO_UPDATE = getConfigValue('enableExtensionsAutoUpdate', true);
@@ -190,7 +192,7 @@ function getLatestBackup(handle) {
return path.join(userDirectories.backups, latestBackup); return path.join(userDirectories.backups, latestBackup);
} }
const router = express.Router(); export const router = express.Router();
router.post('/save', jsonParser, function (request, response) { router.post('/save', jsonParser, function (request, response) {
try { try {
@@ -357,8 +359,6 @@ router.post('/restore-snapshot', jsonParser, async (request, response) => {
/** /**
* Initializes the settings endpoint * Initializes the settings endpoint
*/ */
async function init() { export async function init() {
await backupSettings(); await backupSettings();
} }
module.exports = { router, init };

View File

@@ -1,7 +1,10 @@
const express = require('express'); import { Buffer } from 'node:buffer';
const { jsonParser } = require('../express-common'); import express from 'express';
import wavefile from 'wavefile';
import { jsonParser } from '../express-common.js';
import { getPipeline } from '../transformers.mjs';
const router = express.Router(); export const router = express.Router();
/** /**
* Gets the audio data from a base64-encoded audio file. * Gets the audio data from a base64-encoded audio file.
@@ -9,7 +12,6 @@ const router = express.Router();
* @returns {Float64Array} Audio data * @returns {Float64Array} Audio data
*/ */
function getWaveFile(audio) { function getWaveFile(audio) {
const wavefile = require('wavefile');
const wav = new wavefile.WaveFile(); const wav = new wavefile.WaveFile();
wav.fromDataURI(audio); wav.fromDataURI(audio);
wav.toBitDepth('32f'); wav.toBitDepth('32f');
@@ -36,8 +38,7 @@ router.post('/recognize', jsonParser, async (req, res) => {
try { try {
const TASK = 'automatic-speech-recognition'; const TASK = 'automatic-speech-recognition';
const { model, audio, lang } = req.body; const { model, audio, lang } = req.body;
const module = await import('../transformers.mjs'); const pipe = await getPipeline(TASK, model);
const pipe = await module.default.getPipeline(TASK, model);
const wav = getWaveFile(audio); const wav = getWaveFile(audio);
const start = performance.now(); const start = performance.now();
const result = await pipe(wav, { language: lang || null, task: 'transcribe' }); const result = await pipe(wav, { language: lang || null, task: 'transcribe' });
@@ -54,11 +55,9 @@ router.post('/recognize', jsonParser, async (req, res) => {
router.post('/synthesize', jsonParser, async (req, res) => { router.post('/synthesize', jsonParser, async (req, res) => {
try { try {
const wavefile = require('wavefile');
const TASK = 'text-to-speech'; const TASK = 'text-to-speech';
const { text, model, speaker } = req.body; const { text, model, speaker } = req.body;
const module = await import('../transformers.mjs'); const pipe = await getPipeline(TASK, model);
const pipe = await module.default.getPipeline(TASK, model);
const speaker_embeddings = speaker const speaker_embeddings = speaker
? new Float32Array(new Uint8Array(Buffer.from(speaker.startsWith('data:') ? speaker.split(',')[1] : speaker, 'base64')).buffer) ? new Float32Array(new Uint8Array(Buffer.from(speaker.startsWith('data:') ? speaker.split(',')[1] : speaker, 'base64')).buffer)
: null; : null;
@@ -78,5 +77,3 @@ router.post('/synthesize', jsonParser, async (req, res) => {
return res.sendStatus(500); return res.sendStatus(500);
} }
}); });
module.exports = { router };

View File

@@ -1,16 +1,17 @@
import fs from 'node:fs';
import path from 'node:path';
const fs = require('fs'); import express from 'express';
const path = require('path'); import mime from 'mime-types';
const express = require('express'); import sanitize from 'sanitize-filename';
const mime = require('mime-types'); import { sync as writeFileAtomicSync } from 'write-file-atomic';
const sanitize = require('sanitize-filename');
const writeFileAtomicSync = require('write-file-atomic').sync; import { getImageBuffers } from '../util.js';
const { getImageBuffers } = require('../util'); import { jsonParser, urlencodedParser } from '../express-common.js';
const { jsonParser, urlencodedParser } = require('../express-common');
/** /**
* Gets the path to the sprites folder for the provided character name * Gets the path to the sprites folder for the provided character name
* @param {import('../users').UserDirectoryList} directories - User directories * @param {import('../users.js').UserDirectoryList} directories - User directories
* @param {string} name - The name of the character * @param {string} name - The name of the character
* @param {boolean} isSubfolder - Whether the name contains a subfolder * @param {boolean} isSubfolder - Whether the name contains a subfolder
* @returns {string | null} The path to the sprites folder. Null if the name is invalid. * @returns {string | null} The path to the sprites folder. Null if the name is invalid.
@@ -41,11 +42,11 @@ function getSpritesPath(directories, name, isSubfolder) {
* Imports base64 encoded sprites from RisuAI character data. * Imports base64 encoded sprites from RisuAI character data.
* The sprites are saved in the character's sprites folder. * The sprites are saved in the character's sprites folder.
* The additionalAssets and emotions are removed from the data. * The additionalAssets and emotions are removed from the data.
* @param {import('../users').UserDirectoryList} directories User directories * @param {import('../users.js').UserDirectoryList} directories User directories
* @param {object} data RisuAI character data * @param {object} data RisuAI character data
* @returns {void} * @returns {void}
*/ */
function importRisuSprites(directories, data) { export function importRisuSprites(directories, data) {
try { try {
const name = data?.data?.name; const name = data?.data?.name;
const risuData = data?.data?.extensions?.risuai; const risuData = data?.data?.extensions?.risuai;
@@ -106,7 +107,7 @@ function importRisuSprites(directories, data) {
} }
} }
const router = express.Router(); export const router = express.Router();
router.get('/get', jsonParser, function (request, response) { router.get('/get', jsonParser, function (request, response) {
const name = String(request.query.name); const name = String(request.query.name);
@@ -259,8 +260,3 @@ router.post('/upload', urlencodedParser, async (request, response) => {
return response.sendStatus(500); return response.sendStatus(500);
} }
}); });
module.exports = {
router,
importRisuSprites,
};

View File

@@ -1,13 +1,15 @@
const express = require('express'); import fs from 'node:fs';
const fetch = require('node-fetch').default; import path from 'node:path';
const sanitize = require('sanitize-filename');
const { getBasicAuthHeader, delay, getHexString } = require('../util.js'); import express from 'express';
const fs = require('fs'); import fetch from 'node-fetch';
const path = require('path'); import sanitize from 'sanitize-filename';
const writeFileAtomicSync = require('write-file-atomic').sync; import { sync as writeFileAtomicSync } from 'write-file-atomic';
const { jsonParser } = require('../express-common'); import FormData from 'form-data';
const { readSecret, SECRET_KEYS } = require('./secrets.js');
const FormData = require('form-data'); import { getBasicAuthHeader, delay } from '../util.js';
import { jsonParser } from '../express-common.js';
import { readSecret, SECRET_KEYS } from './secrets.js';
/** /**
* Gets the comfy workflows. * Gets the comfy workflows.
@@ -21,7 +23,7 @@ function getComfyWorkflows(directories) {
.sort(Intl.Collator().compare); .sort(Intl.Collator().compare);
} }
const router = express.Router(); export const router = express.Router();
router.post('/ping', jsonParser, async (request, response) => { router.post('/ping', jsonParser, async (request, response) => {
try { try {
@@ -63,6 +65,7 @@ router.post('/upscalers', jsonParser, async (request, response) => {
throw new Error('SD WebUI returned an error.'); throw new Error('SD WebUI returned an error.');
} }
/** @type {any} */
const data = await result.json(); const data = await result.json();
const names = data.map(x => x.name); const names = data.map(x => x.name);
return names; return names;
@@ -83,6 +86,7 @@ router.post('/upscalers', jsonParser, async (request, response) => {
throw new Error('SD WebUI returned an error.'); throw new Error('SD WebUI returned an error.');
} }
/** @type {any} */
const data = await result.json(); const data = await result.json();
const names = data.map(x => x.name); const names = data.map(x => x.name);
return names; return names;
@@ -116,6 +120,7 @@ router.post('/vaes', jsonParser, async (request, response) => {
throw new Error('SD WebUI returned an error.'); throw new Error('SD WebUI returned an error.');
} }
/** @type {any} */
const data = await result.json(); const data = await result.json();
const names = data.map(x => x.model_name); const names = data.map(x => x.model_name);
return response.send(names); return response.send(names);
@@ -141,6 +146,7 @@ router.post('/samplers', jsonParser, async (request, response) => {
throw new Error('SD WebUI returned an error.'); throw new Error('SD WebUI returned an error.');
} }
/** @type {any} */
const data = await result.json(); const data = await result.json();
const names = data.map(x => x.name); const names = data.map(x => x.name);
return response.send(names); return response.send(names);
@@ -167,6 +173,7 @@ router.post('/schedulers', jsonParser, async (request, response) => {
throw new Error('SD WebUI returned an error.'); throw new Error('SD WebUI returned an error.');
} }
/** @type {any} */
const data = await result.json(); const data = await result.json();
const names = data.map(x => x.name); const names = data.map(x => x.name);
return response.send(names); return response.send(names);
@@ -192,6 +199,7 @@ router.post('/models', jsonParser, async (request, response) => {
throw new Error('SD WebUI returned an error.'); throw new Error('SD WebUI returned an error.');
} }
/** @type {any} */
const data = await result.json(); const data = await result.json();
const models = data.map(x => ({ value: x.title, text: x.title })); const models = data.map(x => ({ value: x.title, text: x.title }));
return response.send(models); return response.send(models);
@@ -212,6 +220,7 @@ router.post('/get-model', jsonParser, async (request, response) => {
'Authorization': getBasicAuthHeader(request.body.auth), 'Authorization': getBasicAuthHeader(request.body.auth),
}, },
}); });
/** @type {any} */
const data = await result.json(); const data = await result.json();
return response.send(data['sd_model_checkpoint']); return response.send(data['sd_model_checkpoint']);
} catch (error) { } catch (error) {
@@ -231,7 +240,6 @@ router.post('/set-model', jsonParser, async (request, response) => {
headers: { headers: {
'Authorization': getBasicAuthHeader(request.body.auth), 'Authorization': getBasicAuthHeader(request.body.auth),
}, },
timeout: 0,
}); });
const data = await result.json(); const data = await result.json();
return data; return data;
@@ -251,7 +259,6 @@ router.post('/set-model', jsonParser, async (request, response) => {
'Content-Type': 'application/json', 'Content-Type': 'application/json',
'Authorization': getBasicAuthHeader(request.body.auth), 'Authorization': getBasicAuthHeader(request.body.auth),
}, },
timeout: 0,
}); });
if (!result.ok) { if (!result.ok) {
@@ -262,6 +269,7 @@ router.post('/set-model', jsonParser, async (request, response) => {
const CHECK_INTERVAL = 2000; const CHECK_INTERVAL = 2000;
for (let attempt = 0; attempt < MAX_ATTEMPTS; attempt++) { for (let attempt = 0; attempt < MAX_ATTEMPTS; attempt++) {
/** @type {any} */
const progressState = await getProgress(); const progressState = await getProgress();
const progress = progressState['progress']; const progress = progressState['progress'];
@@ -306,8 +314,6 @@ router.post('/generate', jsonParser, async (request, response) => {
'Content-Type': 'application/json', 'Content-Type': 'application/json',
'Authorization': getBasicAuthHeader(request.body.auth), 'Authorization': getBasicAuthHeader(request.body.auth),
}, },
timeout: 0,
// @ts-ignore
signal: controller.signal, signal: controller.signal,
}); });
@@ -343,6 +349,7 @@ router.post('/sd-next/upscalers', jsonParser, async (request, response) => {
// Vlad doesn't provide Latent Upscalers in the API, so we have to hardcode them here // Vlad doesn't provide Latent Upscalers in the API, so we have to hardcode them here
const latentUpscalers = ['Latent', 'Latent (antialiased)', 'Latent (bicubic)', 'Latent (bicubic antialiased)', 'Latent (nearest)', 'Latent (nearest-exact)']; const latentUpscalers = ['Latent', 'Latent (antialiased)', 'Latent (bicubic)', 'Latent (bicubic antialiased)', 'Latent (nearest)', 'Latent (nearest-exact)'];
/** @type {any} */
const data = await result.json(); const data = await result.json();
const names = data.map(x => x.name); const names = data.map(x => x.name);
@@ -385,6 +392,7 @@ comfy.post('/samplers', jsonParser, async (request, response) => {
throw new Error('ComfyUI returned an error.'); throw new Error('ComfyUI returned an error.');
} }
/** @type {any} */
const data = await result.json(); const data = await result.json();
return response.send(data.KSampler.input.required.sampler_name[0]); return response.send(data.KSampler.input.required.sampler_name[0]);
} catch (error) { } catch (error) {
@@ -402,6 +410,7 @@ comfy.post('/models', jsonParser, async (request, response) => {
if (!result.ok) { if (!result.ok) {
throw new Error('ComfyUI returned an error.'); throw new Error('ComfyUI returned an error.');
} }
/** @type {any} */
const data = await result.json(); const data = await result.json();
return response.send(data.CheckpointLoaderSimple.input.required.ckpt_name[0].map(it => ({ value: it, text: it }))); return response.send(data.CheckpointLoaderSimple.input.required.ckpt_name[0].map(it => ({ value: it, text: it })));
} catch (error) { } catch (error) {
@@ -420,6 +429,7 @@ comfy.post('/schedulers', jsonParser, async (request, response) => {
throw new Error('ComfyUI returned an error.'); throw new Error('ComfyUI returned an error.');
} }
/** @type {any} */
const data = await result.json(); const data = await result.json();
return response.send(data.KSampler.input.required.scheduler[0]); return response.send(data.KSampler.input.required.scheduler[0]);
} catch (error) { } catch (error) {
@@ -438,6 +448,7 @@ comfy.post('/vaes', jsonParser, async (request, response) => {
throw new Error('ComfyUI returned an error.'); throw new Error('ComfyUI returned an error.');
} }
/** @type {any} */
const data = await result.json(); const data = await result.json();
return response.send(data.VAELoader.input.required.vae_name[0]); return response.send(data.VAELoader.input.required.vae_name[0]);
} catch (error) { } catch (error) {
@@ -519,6 +530,7 @@ comfy.post('/generate', jsonParser, async (request, response) => {
throw new Error('ComfyUI returned an error.'); throw new Error('ComfyUI returned an error.');
} }
/** @type {any} */
const data = await promptResult.json(); const data = await promptResult.json();
const id = data.prompt_id; const id = data.prompt_id;
let item; let item;
@@ -529,6 +541,7 @@ comfy.post('/generate', jsonParser, async (request, response) => {
if (!result.ok) { if (!result.ok) {
throw new Error('ComfyUI returned an error.'); throw new Error('ComfyUI returned an error.');
} }
/** @type {any} */
const history = await result.json(); const history = await result.json();
item = history[id]; item = history[id];
if (item) { if (item) {
@@ -631,6 +644,7 @@ together.post('/generate', jsonParser, async (request, response) => {
return response.sendStatus(500); return response.sendStatus(500);
} }
/** @type {any} */
const data = await result.json(); const data = await result.json();
console.log('TogetherAI response:', data); console.log('TogetherAI response:', data);
@@ -679,6 +693,8 @@ drawthings.post('/get-model', jsonParser, async (request, response) => {
const result = await fetch(url, { const result = await fetch(url, {
method: 'GET', method: 'GET',
}); });
/** @type {any} */
const data = await result.json(); const data = await result.json();
return response.send(data['model']); return response.send(data['model']);
@@ -696,6 +712,8 @@ drawthings.post('/get-upscaler', jsonParser, async (request, response) => {
const result = await fetch(url, { const result = await fetch(url, {
method: 'GET', method: 'GET',
}); });
/** @type {any} */
const data = await result.json(); const data = await result.json();
return response.send(data['upscaler']); return response.send(data['upscaler']);
@@ -724,7 +742,6 @@ drawthings.post('/generate', jsonParser, async (request, response) => {
'Content-Type': 'application/json', 'Content-Type': 'application/json',
'Authorization': auth, 'Authorization': auth,
}, },
timeout: 0,
}); });
if (!result.ok) { if (!result.ok) {
@@ -846,7 +863,6 @@ stability.post('/generate', jsonParser, async (request, response) => {
'Accept': 'image/*', 'Accept': 'image/*',
}, },
body: formData, body: formData,
timeout: 0,
}); });
if (!result.ok) { if (!result.ok) {
@@ -993,5 +1009,3 @@ router.use('/pollinations', pollinations);
router.use('/stability', stability); router.use('/stability', stability);
router.use('/blockentropy', blockentropy); router.use('/blockentropy', blockentropy);
router.use('/huggingface', huggingface); router.use('/huggingface', huggingface);
module.exports = { router };

View File

@@ -1,14 +1,15 @@
const fs = require('fs'); import fs from 'node:fs';
const path = require('path'); import path from 'node:path';
const express = require('express'); import crypto from 'node:crypto';
const writeFileAtomic = require('write-file-atomic');
const crypto = require('crypto'); import express from 'express';
import writeFileAtomic from 'write-file-atomic';
const readFile = fs.promises.readFile; const readFile = fs.promises.readFile;
const readdir = fs.promises.readdir; const readdir = fs.promises.readdir;
const { jsonParser } = require('../express-common'); import { jsonParser } from '../express-common.js';
const { getAllUserHandles, getUserDirectories } = require('../users'); import { getAllUserHandles, getUserDirectories } from '../users.js';
const STATS_FILE = 'stats.json'; const STATS_FILE = 'stats.json';
@@ -146,7 +147,7 @@ async function collectAndCreateStats(chatsPath, charactersPath) {
* @param {string} chatsPath Path to the directory containing the chat files. * @param {string} chatsPath Path to the directory containing the chat files.
* @param {string} charactersPath Path to the directory containing the character files. * @param {string} charactersPath Path to the directory containing the character files.
*/ */
async function recreateStats(handle, chatsPath, charactersPath) { export async function recreateStats(handle, chatsPath, charactersPath) {
console.log('Collecting and creating stats for user:', handle); console.log('Collecting and creating stats for user:', handle);
const stats = await collectAndCreateStats(chatsPath, charactersPath); const stats = await collectAndCreateStats(chatsPath, charactersPath);
STATS.set(handle, stats); STATS.set(handle, stats);
@@ -157,7 +158,7 @@ async function recreateStats(handle, chatsPath, charactersPath) {
* Loads the stats file into memory. If the file doesn't exist or is invalid, * Loads the stats file into memory. If the file doesn't exist or is invalid,
* initializes stats by collecting and creating them for each character. * initializes stats by collecting and creating them for each character.
*/ */
async function init() { export async function init() {
try { try {
const userHandles = await getAllUserHandles(); const userHandles = await getAllUserHandles();
for (const handle of userHandles) { for (const handle of userHandles) {
@@ -209,7 +210,7 @@ async function saveStatsToFile() {
* Attempts to save charStats to a file and then terminates the process. * Attempts to save charStats to a file and then terminates the process.
* If an error occurs during the file write, it logs the error before exiting. * If an error occurs during the file write, it logs the error before exiting.
*/ */
async function onExit() { export async function onExit() {
try { try {
await saveStatsToFile(); await saveStatsToFile();
} catch (err) { } catch (err) {
@@ -434,7 +435,7 @@ function calculateTotalGenTimeAndWordCount(
}; };
} }
const router = express.Router(); export const router = express.Router();
/** /**
* Handle a POST request to get the stats object * Handle a POST request to get the stats object
@@ -465,10 +466,3 @@ router.post('/update', jsonParser, function (request, response) {
setCharStats(request.user.profile.handle, request.body); setCharStats(request.user.profile.handle, request.body);
return response.sendStatus(200); return response.sendStatus(200);
}); });
module.exports = {
router,
recreateStats,
init,
onExit,
};

View File

@@ -1,11 +1,13 @@
const express = require('express'); import path from 'node:path';
const path = require('path'); import fs from 'node:fs';
const fs = require('fs');
const sanitize = require('sanitize-filename');
const writeFileAtomicSync = require('write-file-atomic').sync;
const { jsonParser } = require('../express-common');
const router = express.Router(); import express from 'express';
import sanitize from 'sanitize-filename';
import { sync as writeFileAtomicSync } from 'write-file-atomic';
import { jsonParser } from '../express-common.js';
export const router = express.Router();
router.post('/save', jsonParser, (request, response) => { router.post('/save', jsonParser, (request, response) => {
if (!request.body || !request.body.name) { if (!request.body || !request.body.name) {
@@ -36,5 +38,3 @@ router.post('/delete', jsonParser, function (request, response) {
return response.sendStatus(500); return response.sendStatus(500);
} }
}); });
module.exports = { router };

View File

@@ -1,14 +1,16 @@
const fs = require('fs'); import fs from 'node:fs';
const fsPromises = require('fs').promises; import { promises as fsPromises } from 'node:fs';
const path = require('path'); import path from 'node:path';
const mime = require('mime-types');
const express = require('express'); import mime from 'mime-types';
const sanitize = require('sanitize-filename'); import express from 'express';
const jimp = require('jimp'); import sanitize from 'sanitize-filename';
const writeFileAtomicSync = require('write-file-atomic').sync; import jimp from 'jimp';
const { getAllUserHandles, getUserDirectories } = require('../users'); import { sync as writeFileAtomicSync } from 'write-file-atomic';
const { getConfigValue } = require('../util');
const { jsonParser } = require('../express-common'); import { getAllUserHandles, getUserDirectories } from '../users.js';
import { getConfigValue } from '../util.js';
import { jsonParser } from '../express-common.js';
const thumbnailsDisabled = getConfigValue('disableThumbnails', false); const thumbnailsDisabled = getConfigValue('disableThumbnails', false);
const quality = getConfigValue('thumbnailsQuality', 95); const quality = getConfigValue('thumbnailsQuality', 95);
@@ -16,7 +18,7 @@ const pngFormat = getConfigValue('avatarThumbnailsPng', false);
/** /**
* Gets a path to thumbnail folder based on the type. * Gets a path to thumbnail folder based on the type.
* @param {import('../users').UserDirectoryList} directories User directories * @param {import('../users.js').UserDirectoryList} directories User directories
* @param {'bg' | 'avatar'} type Thumbnail type * @param {'bg' | 'avatar'} type Thumbnail type
* @returns {string} Path to the thumbnails folder * @returns {string} Path to the thumbnails folder
*/ */
@@ -37,7 +39,7 @@ function getThumbnailFolder(directories, type) {
/** /**
* Gets a path to the original images folder based on the type. * Gets a path to the original images folder based on the type.
* @param {import('../users').UserDirectoryList} directories User directories * @param {import('../users.js').UserDirectoryList} directories User directories
* @param {'bg' | 'avatar'} type Thumbnail type * @param {'bg' | 'avatar'} type Thumbnail type
* @returns {string} Path to the original images folder * @returns {string} Path to the original images folder
*/ */
@@ -58,11 +60,11 @@ function getOriginalFolder(directories, type) {
/** /**
* Removes the generated thumbnail from the disk. * Removes the generated thumbnail from the disk.
* @param {import('../users').UserDirectoryList} directories User directories * @param {import('../users.js').UserDirectoryList} directories User directories
* @param {'bg' | 'avatar'} type Type of the thumbnail * @param {'bg' | 'avatar'} type Type of the thumbnail
* @param {string} file Name of the file * @param {string} file Name of the file
*/ */
function invalidateThumbnail(directories, type, file) { export function invalidateThumbnail(directories, type, file) {
const folder = getThumbnailFolder(directories, type); const folder = getThumbnailFolder(directories, type);
if (folder === undefined) throw new Error('Invalid thumbnail type'); if (folder === undefined) throw new Error('Invalid thumbnail type');
@@ -75,7 +77,7 @@ function invalidateThumbnail(directories, type, file) {
/** /**
* Generates a thumbnail for the given file. * Generates a thumbnail for the given file.
* @param {import('../users').UserDirectoryList} directories User directories * @param {import('../users.js').UserDirectoryList} directories User directories
* @param {'bg' | 'avatar'} type Type of the thumbnail * @param {'bg' | 'avatar'} type Type of the thumbnail
* @param {string} file Name of the file * @param {string} file Name of the file
* @returns * @returns
@@ -141,7 +143,7 @@ async function generateThumbnail(directories, type, file) {
* Ensures that the thumbnail cache for backgrounds is valid. * Ensures that the thumbnail cache for backgrounds is valid.
* @returns {Promise<void>} Promise that resolves when the cache is validated * @returns {Promise<void>} Promise that resolves when the cache is validated
*/ */
async function ensureThumbnailCache() { export async function ensureThumbnailCache() {
const userHandles = await getAllUserHandles(); const userHandles = await getAllUserHandles();
for (const handle of userHandles) { for (const handle of userHandles) {
const directories = getUserDirectories(handle); const directories = getUserDirectories(handle);
@@ -166,7 +168,7 @@ async function ensureThumbnailCache() {
} }
} }
const router = express.Router(); export const router = express.Router();
// Important: This route must be mounted as '/thumbnail'. It is used in the client code and saved to chat files. // Important: This route must be mounted as '/thumbnail'. It is used in the client code and saved to chat files.
router.get('/', jsonParser, async function (request, response) { router.get('/', jsonParser, async function (request, response) {
@@ -227,9 +229,3 @@ router.get('/', jsonParser, async function (request, response) {
return response.sendStatus(500); return response.sendStatus(500);
} }
}); });
module.exports = {
invalidateThumbnail,
ensureThumbnailCache,
router,
};

View File

@@ -1,15 +1,19 @@
const fs = require('fs'); import fs from 'node:fs';
const path = require('path'); import path from 'node:path';
const express = require('express'); import { Buffer } from 'node:buffer';
const { SentencePieceProcessor } = require('@agnai/sentencepiece-js');
const tiktoken = require('tiktoken'); import express from 'express';
const { Tokenizer } = require('@agnai/web-tokenizers'); import { sync as writeFileAtomicSync } from 'write-file-atomic';
const { convertClaudePrompt } = require('../prompt-converters');
const { TEXTGEN_TYPES } = require('../constants'); import { Tokenizer } from '@agnai/web-tokenizers';
const { jsonParser } = require('../express-common'); import { SentencePieceProcessor } from '@agnai/sentencepiece-js';
const { setAdditionalHeaders } = require('../additional-headers'); import tiktoken from 'tiktoken';
const { getConfigValue, isValidUrl } = require('../util');
const writeFileAtomicSync = require('write-file-atomic').sync; import { convertClaudePrompt } from '../prompt-converters.js';
import { TEXTGEN_TYPES } from '../constants.js';
import { jsonParser } from '../express-common.js';
import { setAdditionalHeaders } from '../additional-headers.js';
import { getConfigValue, isValidUrl } from '../util.js';
/** /**
* @typedef { (req: import('express').Request, res: import('express').Response) => Promise<any> } TokenizationHandler * @typedef { (req: import('express').Request, res: import('express').Response) => Promise<any> } TokenizationHandler
@@ -23,7 +27,7 @@ const tokenizersCache = {};
/** /**
* @type {string[]} * @type {string[]}
*/ */
const TEXT_COMPLETION_MODELS = [ export const TEXT_COMPLETION_MODELS = [
'gpt-3.5-turbo-instruct', 'gpt-3.5-turbo-instruct',
'gpt-3.5-turbo-instruct-0914', 'gpt-3.5-turbo-instruct-0914',
'text-davinci-003', 'text-davinci-003',
@@ -223,7 +227,7 @@ const commandTokenizer = new WebTokenizer('https://github.com/SillyTavern/SillyT
const qwen2Tokenizer = new WebTokenizer('https://github.com/SillyTavern/SillyTavern-Tokenizers/raw/main/qwen2.json', 'src/tokenizers/llama3.json'); const qwen2Tokenizer = new WebTokenizer('https://github.com/SillyTavern/SillyTavern-Tokenizers/raw/main/qwen2.json', 'src/tokenizers/llama3.json');
const nemoTokenizer = new WebTokenizer('https://github.com/SillyTavern/SillyTavern-Tokenizers/raw/main/nemo.json', 'src/tokenizers/llama3.json'); const nemoTokenizer = new WebTokenizer('https://github.com/SillyTavern/SillyTavern-Tokenizers/raw/main/nemo.json', 'src/tokenizers/llama3.json');
const sentencepieceTokenizers = [ export const sentencepieceTokenizers = [
'llama', 'llama',
'nerdstash', 'nerdstash',
'nerdstash_v2', 'nerdstash_v2',
@@ -238,7 +242,7 @@ const sentencepieceTokenizers = [
* @param {string} model Sentencepiece model name * @param {string} model Sentencepiece model name
* @returns {SentencePieceTokenizer|null} Sentencepiece tokenizer * @returns {SentencePieceTokenizer|null} Sentencepiece tokenizer
*/ */
function getSentencepiceTokenizer(model) { export function getSentencepiceTokenizer(model) {
if (model.includes('llama')) { if (model.includes('llama')) {
return spp_llama; return spp_llama;
} }
@@ -350,7 +354,7 @@ function getWebTokenizersChunks(tokenizer, ids) {
* @param {string} requestModel Models to use for tokenization * @param {string} requestModel Models to use for tokenization
* @returns {string} Tokenizer model to use * @returns {string} Tokenizer model to use
*/ */
function getTokenizerModel(requestModel) { export function getTokenizerModel(requestModel) {
if (requestModel.includes('o1-preview') || requestModel.includes('o1-mini')) { if (requestModel.includes('o1-preview') || requestModel.includes('o1-mini')) {
return 'gpt-4o'; return 'gpt-4o';
} }
@@ -427,7 +431,7 @@ function getTokenizerModel(requestModel) {
return 'gpt-3.5-turbo'; return 'gpt-3.5-turbo';
} }
function getTiktokenTokenizer(model) { export function getTiktokenTokenizer(model) {
if (tokenizersCache[model]) { if (tokenizersCache[model]) {
return tokenizersCache[model]; return tokenizersCache[model];
} }
@@ -444,7 +448,7 @@ function getTiktokenTokenizer(model) {
* @param {object[]} messages Array of messages * @param {object[]} messages Array of messages
* @returns {number} Number of tokens * @returns {number} Number of tokens
*/ */
function countWebTokenizerTokens(tokenizer, messages) { export function countWebTokenizerTokens(tokenizer, messages) {
// Should be fine if we use the old conversion method instead of the messages API one i think? // Should be fine if we use the old conversion method instead of the messages API one i think?
const convertedPrompt = convertClaudePrompt(messages, false, '', false, false, '', false); const convertedPrompt = convertClaudePrompt(messages, false, '', false, false, '', false);
@@ -636,7 +640,7 @@ function createWebTokenizerDecodingHandler(tokenizer) {
}; };
} }
const router = express.Router(); export const router = express.Router();
router.post('/llama/encode', jsonParser, createSentencepieceEncodingHandler(spp_llama)); router.post('/llama/encode', jsonParser, createSentencepieceEncodingHandler(spp_llama));
router.post('/nerdstash/encode', jsonParser, createSentencepieceEncodingHandler(spp_nerd)); router.post('/nerdstash/encode', jsonParser, createSentencepieceEncodingHandler(spp_nerd));
@@ -1002,13 +1006,3 @@ router.post('/remote/textgenerationwebui/encode', jsonParser, async function (re
return response.send({ error: true }); return response.send({ error: true });
} }
}); });
module.exports = {
TEXT_COMPLETION_MODELS,
getTokenizerModel,
getTiktokenTokenizer,
countWebTokenizerTokens,
getSentencepiceTokenizer,
sentencepieceTokenizers,
router,
};

View File

@@ -1,29 +1,18 @@
const fetch = require('node-fetch').default; import https from 'node:https';
const https = require('https');
const express = require('express'); import fetch from 'node-fetch';
const iconv = require('iconv-lite'); import express from 'express';
const { readSecret, SECRET_KEYS } = require('./secrets'); import bingTranslateApi from 'bing-translate-api';
const { getConfigValue, uuidv4 } = require('../util'); import googleTranslateApi from 'google-translate-api-x';
const { jsonParser } = require('../express-common');
import { readSecret, SECRET_KEYS } from './secrets.js';
import { getConfigValue, uuidv4 } from '../util.js';
import { jsonParser } from '../express-common.js';
const DEEPLX_URL_DEFAULT = 'http://127.0.0.1:1188/translate'; const DEEPLX_URL_DEFAULT = 'http://127.0.0.1:1188/translate';
const ONERING_URL_DEFAULT = 'http://127.0.0.1:4990/translate'; const ONERING_URL_DEFAULT = 'http://127.0.0.1:4990/translate';
/** export const router = express.Router();
* Tries to decode a Node.js Buffer to a string using iconv-lite for UTF-8.
* @param {Buffer} buffer Node.js Buffer
* @returns {string} Decoded string
*/
function decodeBuffer(buffer) {
try {
return iconv.decode(buffer, 'utf-8');
} catch (error) {
console.log('Failed to decode buffer:', error);
return buffer.toString('utf-8');
}
}
const router = express.Router();
router.post('/libre', jsonParser, async (request, response) => { router.post('/libre', jsonParser, async (request, response) => {
const key = readSecret(request.user.directories, SECRET_KEYS.LIBRE); const key = readSecret(request.user.directories, SECRET_KEYS.LIBRE);
@@ -70,6 +59,7 @@ router.post('/libre', jsonParser, async (request, response) => {
return response.sendStatus(result.status); return response.sendStatus(result.status);
} }
/** @type {any} */
const json = await result.json(); const json = await result.json();
console.log('Translated text: ' + json.translatedText); console.log('Translated text: ' + json.translatedText);
@@ -82,7 +72,6 @@ router.post('/libre', jsonParser, async (request, response) => {
router.post('/google', jsonParser, async (request, response) => { router.post('/google', jsonParser, async (request, response) => {
try { try {
const { generateRequestUrl, normaliseResponse } = require('google-translate-api-browser');
const text = request.body.text; const text = request.body.text;
const lang = request.body.lang; const lang = request.body.lang;
@@ -92,31 +81,12 @@ router.post('/google', jsonParser, async (request, response) => {
console.log('Input text: ' + text); console.log('Input text: ' + text);
const url = generateRequestUrl(text, { to: lang }); const result = await googleTranslateApi(text, { to: lang, forceBatch: false });
const translatedText = Array.isArray(result) ? result.map(x => x.text).join('') : result.text;
https.get(url, (resp) => { response.setHeader('Content-Type', 'text/plain; charset=utf-8');
const data = []; console.log('Translated text: ' + translatedText);
return response.send(translatedText);
resp.on('data', (chunk) => {
data.push(chunk);
});
resp.on('end', () => {
try {
const decodedData = decodeBuffer(Buffer.concat(data));
const result = normaliseResponse(JSON.parse(decodedData));
console.log('Translated text: ' + result.text);
response.setHeader('Content-Type', 'text/plain; charset=utf-8');
return response.send(result.text);
} catch (error) {
console.log('Translation error', error);
return response.sendStatus(500);
}
});
}).on('error', (err) => {
console.log('Translation error: ' + err.message);
return response.sendStatus(500);
});
} catch (error) { } catch (error) {
console.log('Translation error', error); console.log('Translation error', error);
return response.sendStatus(500); return response.sendStatus(500);
@@ -151,7 +121,6 @@ router.post('/yandex', jsonParser, async (request, response) => {
headers: { headers: {
'Content-Type': 'application/x-www-form-urlencoded', 'Content-Type': 'application/x-www-form-urlencoded',
}, },
timeout: 0,
}); });
if (!result.ok) { if (!result.ok) {
@@ -160,6 +129,7 @@ router.post('/yandex', jsonParser, async (request, response) => {
return response.sendStatus(500); return response.sendStatus(500);
} }
/** @type {any} */
const json = await result.json(); const json = await result.json();
const translated = json.text.join(); const translated = json.text.join();
console.log('Translated text: ' + translated); console.log('Translated text: ' + translated);
@@ -257,7 +227,6 @@ router.post('/deepl', jsonParser, async (request, response) => {
'Authorization': `DeepL-Auth-Key ${key}`, 'Authorization': `DeepL-Auth-Key ${key}`,
'Content-Type': 'application/x-www-form-urlencoded', 'Content-Type': 'application/x-www-form-urlencoded',
}, },
timeout: 0,
}); });
if (!result.ok) { if (!result.ok) {
@@ -266,6 +235,7 @@ router.post('/deepl', jsonParser, async (request, response) => {
return response.sendStatus(result.status); return response.sendStatus(result.status);
} }
/** @type {any} */
const json = await result.json(); const json = await result.json();
console.log('Translated text: ' + json.translations[0].text); console.log('Translated text: ' + json.translations[0].text);
@@ -310,7 +280,6 @@ router.post('/onering', jsonParser, async (request, response) => {
const result = await fetch(fetchUrl, { const result = await fetch(fetchUrl, {
method: 'GET', method: 'GET',
timeout: 0,
}); });
if (!result.ok) { if (!result.ok) {
@@ -319,6 +288,7 @@ router.post('/onering', jsonParser, async (request, response) => {
return response.sendStatus(result.status); return response.sendStatus(result.status);
} }
/** @type {any} */
const data = await result.json(); const data = await result.json();
console.log('Translated text: ' + data.result); console.log('Translated text: ' + data.result);
@@ -366,7 +336,6 @@ router.post('/deeplx', jsonParser, async (request, response) => {
'Accept': 'application/json', 'Accept': 'application/json',
'Content-Type': 'application/json', 'Content-Type': 'application/json',
}, },
timeout: 0,
}); });
if (!result.ok) { if (!result.ok) {
@@ -375,6 +344,7 @@ router.post('/deeplx', jsonParser, async (request, response) => {
return response.sendStatus(result.status); return response.sendStatus(result.status);
} }
/** @type {any} */
const json = await result.json(); const json = await result.json();
console.log('Translated text: ' + json.data); console.log('Translated text: ' + json.data);
@@ -386,7 +356,6 @@ router.post('/deeplx', jsonParser, async (request, response) => {
}); });
router.post('/bing', jsonParser, async (request, response) => { router.post('/bing', jsonParser, async (request, response) => {
const bingTranslateApi = require('bing-translate-api');
const text = request.body.text; const text = request.body.text;
let lang = request.body.lang; let lang = request.body.lang;
@@ -408,5 +377,3 @@ router.post('/bing', jsonParser, async (request, response) => {
return response.sendStatus(500); return response.sendStatus(500);
}); });
}); });
module.exports = { router };

View File

@@ -1,10 +1,11 @@
const fsPromises = require('fs').promises; import { promises as fsPromises } from 'node:fs';
const storage = require('node-persist');
const express = require('express'); import storage from 'node-persist';
const lodash = require('lodash'); import express from 'express';
const { jsonParser } = require('../express-common'); import lodash from 'lodash';
const { checkForNewContent } = require('./content-manager'); import { jsonParser } from '../express-common.js';
const { import { checkForNewContent } from './content-manager.js';
import {
KEY_PREFIX, KEY_PREFIX,
toKey, toKey,
requireAdminMiddleware, requireAdminMiddleware,
@@ -14,17 +15,17 @@ const {
getPasswordHash, getPasswordHash,
getUserDirectories, getUserDirectories,
ensurePublicDirectoriesExist, ensurePublicDirectoriesExist,
} = require('../users'); } from '../users.js';
const { DEFAULT_USER } = require('../constants'); import { DEFAULT_USER } from '../constants.js';
const router = express.Router(); export const router = express.Router();
router.post('/get', requireAdminMiddleware, jsonParser, async (_request, response) => { router.post('/get', requireAdminMiddleware, jsonParser, async (_request, response) => {
try { try {
/** @type {import('../users').User[]} */ /** @type {import('../users.js').User[]} */
const users = await storage.values(x => x.key.startsWith(KEY_PREFIX)); const users = await storage.values(x => x.key.startsWith(KEY_PREFIX));
/** @type {Promise<import('../users').UserViewModel>[]} */ /** @type {Promise<import('../users.js').UserViewModel>[]} */
const viewModelPromises = users const viewModelPromises = users
.map(user => new Promise(resolve => { .map(user => new Promise(resolve => {
getUserAvatar(user.handle).then(avatar => getUserAvatar(user.handle).then(avatar =>
@@ -61,7 +62,7 @@ router.post('/disable', requireAdminMiddleware, jsonParser, async (request, resp
return response.status(400).json({ error: 'Cannot disable yourself' }); return response.status(400).json({ error: 'Cannot disable yourself' });
} }
/** @type {import('../users').User} */ /** @type {import('../users.js').User} */
const user = await storage.getItem(toKey(request.body.handle)); const user = await storage.getItem(toKey(request.body.handle));
if (!user) { if (!user) {
@@ -85,7 +86,7 @@ router.post('/enable', requireAdminMiddleware, jsonParser, async (request, respo
return response.status(400).json({ error: 'Missing required fields' }); return response.status(400).json({ error: 'Missing required fields' });
} }
/** @type {import('../users').User} */ /** @type {import('../users.js').User} */
const user = await storage.getItem(toKey(request.body.handle)); const user = await storage.getItem(toKey(request.body.handle));
if (!user) { if (!user) {
@@ -109,7 +110,7 @@ router.post('/promote', requireAdminMiddleware, jsonParser, async (request, resp
return response.status(400).json({ error: 'Missing required fields' }); return response.status(400).json({ error: 'Missing required fields' });
} }
/** @type {import('../users').User} */ /** @type {import('../users.js').User} */
const user = await storage.getItem(toKey(request.body.handle)); const user = await storage.getItem(toKey(request.body.handle));
if (!user) { if (!user) {
@@ -138,7 +139,7 @@ router.post('/demote', requireAdminMiddleware, jsonParser, async (request, respo
return response.status(400).json({ error: 'Cannot demote yourself' }); return response.status(400).json({ error: 'Cannot demote yourself' });
} }
/** @type {import('../users').User} */ /** @type {import('../users.js').User} */
const user = await storage.getItem(toKey(request.body.handle)); const user = await storage.getItem(toKey(request.body.handle));
if (!user) { if (!user) {
@@ -249,7 +250,3 @@ router.post('/slugify', requireAdminMiddleware, jsonParser, async (request, resp
return response.sendStatus(500); return response.sendStatus(500);
} }
}); });
module.exports = {
router,
};

View File

@@ -1,18 +1,19 @@
const path = require('path'); import path from 'node:path';
const fsPromises = require('fs').promises; import { promises as fsPromises } from 'node:fs';
const storage = require('node-persist'); import crypto from 'node:crypto';
const express = require('express');
const crypto = require('crypto'); import storage from 'node-persist';
const { jsonParser } = require('../express-common'); import express from 'express';
const { getUserAvatar, toKey, getPasswordHash, getPasswordSalt, createBackupArchive, ensurePublicDirectoriesExist, toAvatarKey } = require('../users');
const { SETTINGS_FILE } = require('../constants'); import { jsonParser } from '../express-common.js';
const contentManager = require('./content-manager'); import { getUserAvatar, toKey, getPasswordHash, getPasswordSalt, createBackupArchive, ensurePublicDirectoriesExist, toAvatarKey } from '../users.js';
const { color, Cache } = require('../util'); import { SETTINGS_FILE } from '../constants.js';
const { checkForNewContent } = require('./content-manager'); import { checkForNewContent, CONTENT_TYPES } from './content-manager.js';
import { color, Cache } from '../util.js';
const RESET_CACHE = new Cache(5 * 60 * 1000); const RESET_CACHE = new Cache(5 * 60 * 1000);
const router = express.Router(); export const router = express.Router();
router.post('/logout', async (request, response) => { router.post('/logout', async (request, response) => {
try { try {
@@ -70,7 +71,7 @@ router.post('/change-avatar', jsonParser, async (request, response) => {
return response.status(400).json({ error: 'Invalid data URL' }); return response.status(400).json({ error: 'Invalid data URL' });
} }
/** @type {import('../users').User} */ /** @type {import('../users.js').User} */
const user = await storage.getItem(toKey(request.body.handle)); const user = await storage.getItem(toKey(request.body.handle));
if (!user) { if (!user) {
@@ -99,7 +100,7 @@ router.post('/change-password', jsonParser, async (request, response) => {
return response.status(403).json({ error: 'Unauthorized' }); return response.status(403).json({ error: 'Unauthorized' });
} }
/** @type {import('../users').User} */ /** @type {import('../users.js').User} */
const user = await storage.getItem(toKey(request.body.handle)); const user = await storage.getItem(toKey(request.body.handle));
if (!user) { if (!user) {
@@ -166,7 +167,7 @@ router.post('/reset-settings', jsonParser, async (request, response) => {
const pathToFile = path.join(request.user.directories.root, SETTINGS_FILE); const pathToFile = path.join(request.user.directories.root, SETTINGS_FILE);
await fsPromises.rm(pathToFile, { force: true }); await fsPromises.rm(pathToFile, { force: true });
await contentManager.checkForNewContent([request.user.directories], [contentManager.CONTENT_TYPES.SETTINGS]); await checkForNewContent([request.user.directories], [CONTENT_TYPES.SETTINGS]);
return response.sendStatus(204); return response.sendStatus(204);
} catch (error) { } catch (error) {
@@ -187,7 +188,7 @@ router.post('/change-name', jsonParser, async (request, response) => {
return response.status(403).json({ error: 'Unauthorized' }); return response.status(403).json({ error: 'Unauthorized' });
} }
/** @type {import('../users').User} */ /** @type {import('../users.js').User} */
const user = await storage.getItem(toKey(request.body.handle)); const user = await storage.getItem(toKey(request.body.handle));
if (!user) { if (!user) {
@@ -251,7 +252,3 @@ router.post('/reset-step2', jsonParser, async (request, response) => {
return response.sendStatus(500); return response.sendStatus(500);
} }
}); });
module.exports = {
router,
};

View File

@@ -1,15 +1,16 @@
const crypto = require('crypto'); import crypto from 'node:crypto';
const storage = require('node-persist');
const express = require('express'); import storage from 'node-persist';
const { RateLimiterMemory, RateLimiterRes } = require('rate-limiter-flexible'); import express from 'express';
const { jsonParser, getIpFromRequest } = require('../express-common'); import { RateLimiterMemory, RateLimiterRes } from 'rate-limiter-flexible';
const { color, Cache, getConfigValue } = require('../util'); import { jsonParser, getIpFromRequest } from '../express-common.js';
const { KEY_PREFIX, getUserAvatar, toKey, getPasswordHash, getPasswordSalt } = require('../users'); import { color, Cache, getConfigValue } from '../util.js';
import { KEY_PREFIX, getUserAvatar, toKey, getPasswordHash, getPasswordSalt } from '../users.js';
const DISCREET_LOGIN = getConfigValue('enableDiscreetLogin', false); const DISCREET_LOGIN = getConfigValue('enableDiscreetLogin', false);
const MFA_CACHE = new Cache(5 * 60 * 1000); const MFA_CACHE = new Cache(5 * 60 * 1000);
const router = express.Router(); export const router = express.Router();
const loginLimiter = new RateLimiterMemory({ const loginLimiter = new RateLimiterMemory({
points: 5, points: 5,
duration: 60, duration: 60,
@@ -25,10 +26,10 @@ router.post('/list', async (_request, response) => {
return response.sendStatus(204); return response.sendStatus(204);
} }
/** @type {import('../users').User[]} */ /** @type {import('../users.js').User[]} */
const users = await storage.values(x => x.key.startsWith(KEY_PREFIX)); const users = await storage.values(x => x.key.startsWith(KEY_PREFIX));
/** @type {Promise<import('../users').UserViewModel>[]} */ /** @type {Promise<import('../users.js').UserViewModel>[]} */
const viewModelPromises = users const viewModelPromises = users
.filter(x => x.enabled) .filter(x => x.enabled)
.map(user => new Promise(async (resolve) => { .map(user => new Promise(async (resolve) => {
@@ -62,7 +63,7 @@ router.post('/login', jsonParser, async (request, response) => {
const ip = getIpFromRequest(request); const ip = getIpFromRequest(request);
await loginLimiter.consume(ip); await loginLimiter.consume(ip);
/** @type {import('../users').User} */ /** @type {import('../users.js').User} */
const user = await storage.getItem(toKey(request.body.handle)); const user = await storage.getItem(toKey(request.body.handle));
if (!user) { if (!user) {
@@ -110,7 +111,7 @@ router.post('/recover-step1', jsonParser, async (request, response) => {
const ip = getIpFromRequest(request); const ip = getIpFromRequest(request);
await recoverLimiter.consume(ip); await recoverLimiter.consume(ip);
/** @type {import('../users').User} */ /** @type {import('../users.js').User} */
const user = await storage.getItem(toKey(request.body.handle)); const user = await storage.getItem(toKey(request.body.handle));
if (!user) { if (!user) {
@@ -147,7 +148,7 @@ router.post('/recover-step2', jsonParser, async (request, response) => {
return response.status(400).json({ error: 'Missing required fields' }); return response.status(400).json({ error: 'Missing required fields' });
} }
/** @type {import('../users').User} */ /** @type {import('../users.js').User} */
const user = await storage.getItem(toKey(request.body.handle)); const user = await storage.getItem(toKey(request.body.handle));
const ip = getIpFromRequest(request); const ip = getIpFromRequest(request);
@@ -193,7 +194,3 @@ router.post('/recover-step2', jsonParser, async (request, response) => {
return response.sendStatus(500); return response.sendStatus(500);
} }
}); });
module.exports = {
router,
};

View File

@@ -1,10 +1,22 @@
const vectra = require('vectra'); import path from 'node:path';
const path = require('path'); import fs from 'node:fs';
const fs = require('fs');
const express = require('express'); import vectra from 'vectra';
const sanitize = require('sanitize-filename'); import express from 'express';
const { jsonParser } = require('../express-common'); import sanitize from 'sanitize-filename';
const { getConfigValue } = require('../util');
import { jsonParser } from '../express-common.js';
import { getConfigValue } from '../util.js';
import { getNomicAIBatchVector, getNomicAIVector } from '../vectors/nomicai-vectors.js';
import { getOpenAIVector, getOpenAIBatchVector } from '../vectors/openai-vectors.js';
import { getTransformersVector, getTransformersBatchVector } from '../vectors/embedding.js';
import { getExtrasVector, getExtrasBatchVector } from '../vectors/extras-vectors.js';
import { getMakerSuiteVector, getMakerSuiteBatchVector } from '../vectors/makersuite-vectors.js';
import { getCohereVector, getCohereBatchVector } from '../vectors/cohere-vectors.js';
import { getLlamaCppVector, getLlamaCppBatchVector } from '../vectors/llamacpp-vectors.js';
import { getVllmVector, getVllmBatchVector } from '../vectors/vllm-vectors.js';
import { getOllamaVector, getOllamaBatchVector } from '../vectors/ollama-vectors.js';
// Don't forget to add new sources to the SOURCES array // Don't forget to add new sources to the SOURCES array
const SOURCES = [ const SOURCES = [
@@ -27,31 +39,31 @@ const SOURCES = [
* @param {Object} sourceSettings - Settings for the source, if it needs any * @param {Object} sourceSettings - Settings for the source, if it needs any
* @param {string} text - The text to get the vector for * @param {string} text - The text to get the vector for
* @param {boolean} isQuery - If the text is a query for embedding search * @param {boolean} isQuery - If the text is a query for embedding search
* @param {import('../users').UserDirectoryList} directories - The directories object for the user * @param {import('../users.js').UserDirectoryList} directories - The directories object for the user
* @returns {Promise<number[]>} - The vector for the text * @returns {Promise<number[]>} - The vector for the text
*/ */
async function getVector(source, sourceSettings, text, isQuery, directories) { async function getVector(source, sourceSettings, text, isQuery, directories) {
switch (source) { switch (source) {
case 'nomicai': case 'nomicai':
return require('../vectors/nomicai-vectors').getNomicAIVector(text, source, directories); return getNomicAIVector(text, source, directories);
case 'togetherai': case 'togetherai':
case 'mistral': case 'mistral':
case 'openai': case 'openai':
return require('../vectors/openai-vectors').getOpenAIVector(text, source, directories, sourceSettings.model); return getOpenAIVector(text, source, directories, sourceSettings.model);
case 'transformers': case 'transformers':
return require('../vectors/embedding').getTransformersVector(text); return getTransformersVector(text);
case 'extras': case 'extras':
return require('../vectors/extras-vectors').getExtrasVector(text, sourceSettings.extrasUrl, sourceSettings.extrasKey); return getExtrasVector(text, sourceSettings.extrasUrl, sourceSettings.extrasKey);
case 'palm': case 'palm':
return require('../vectors/makersuite-vectors').getMakerSuiteVector(text, directories); return getMakerSuiteVector(text, directories);
case 'cohere': case 'cohere':
return require('../vectors/cohere-vectors').getCohereVector(text, isQuery, directories, sourceSettings.model); return getCohereVector(text, isQuery, directories, sourceSettings.model);
case 'llamacpp': case 'llamacpp':
return require('../vectors/llamacpp-vectors').getLlamaCppVector(text, sourceSettings.apiUrl, directories); return getLlamaCppVector(text, sourceSettings.apiUrl, directories);
case 'vllm': case 'vllm':
return require('../vectors/vllm-vectors').getVllmVector(text, sourceSettings.apiUrl, sourceSettings.model, directories); return getVllmVector(text, sourceSettings.apiUrl, sourceSettings.model, directories);
case 'ollama': case 'ollama':
return require('../vectors/ollama-vectors').getOllamaVector(text, sourceSettings.apiUrl, sourceSettings.model, sourceSettings.keep, directories); return getOllamaVector(text, sourceSettings.apiUrl, sourceSettings.model, sourceSettings.keep, directories);
} }
throw new Error(`Unknown vector source ${source}`); throw new Error(`Unknown vector source ${source}`);
@@ -63,7 +75,7 @@ async function getVector(source, sourceSettings, text, isQuery, directories) {
* @param {Object} sourceSettings - Settings for the source, if it needs any * @param {Object} sourceSettings - Settings for the source, if it needs any
* @param {string[]} texts - The array of texts to get the vector for * @param {string[]} texts - The array of texts to get the vector for
* @param {boolean} isQuery - If the text is a query for embedding search * @param {boolean} isQuery - If the text is a query for embedding search
* @param {import('../users').UserDirectoryList} directories - The directories object for the user * @param {import('../users.js').UserDirectoryList} directories - The directories object for the user
* @returns {Promise<number[][]>} - The array of vectors for the texts * @returns {Promise<number[][]>} - The array of vectors for the texts
*/ */
async function getBatchVector(source, sourceSettings, texts, isQuery, directories) { async function getBatchVector(source, sourceSettings, texts, isQuery, directories) {
@@ -74,33 +86,33 @@ async function getBatchVector(source, sourceSettings, texts, isQuery, directorie
for (let batch of batches) { for (let batch of batches) {
switch (source) { switch (source) {
case 'nomicai': case 'nomicai':
results.push(...await require('../vectors/nomicai-vectors').getNomicAIBatchVector(batch, source, directories)); results.push(...await getNomicAIBatchVector(batch, source, directories));
break; break;
case 'togetherai': case 'togetherai':
case 'mistral': case 'mistral':
case 'openai': case 'openai':
results.push(...await require('../vectors/openai-vectors').getOpenAIBatchVector(batch, source, directories, sourceSettings.model)); results.push(...await getOpenAIBatchVector(batch, source, directories, sourceSettings.model));
break; break;
case 'transformers': case 'transformers':
results.push(...await require('../vectors/embedding').getTransformersBatchVector(batch)); results.push(...await getTransformersBatchVector(batch));
break; break;
case 'extras': case 'extras':
results.push(...await require('../vectors/extras-vectors').getExtrasBatchVector(batch, sourceSettings.extrasUrl, sourceSettings.extrasKey)); results.push(...await getExtrasBatchVector(batch, sourceSettings.extrasUrl, sourceSettings.extrasKey));
break; break;
case 'palm': case 'palm':
results.push(...await require('../vectors/makersuite-vectors').getMakerSuiteBatchVector(batch, directories)); results.push(...await getMakerSuiteBatchVector(batch, directories));
break; break;
case 'cohere': case 'cohere':
results.push(...await require('../vectors/cohere-vectors').getCohereBatchVector(batch, isQuery, directories, sourceSettings.model)); results.push(...await getCohereBatchVector(batch, isQuery, directories, sourceSettings.model));
break; break;
case 'llamacpp': case 'llamacpp':
results.push(...await require('../vectors/llamacpp-vectors').getLlamaCppBatchVector(batch, sourceSettings.apiUrl, directories)); results.push(...await getLlamaCppBatchVector(batch, sourceSettings.apiUrl, directories));
break; break;
case 'vllm': case 'vllm':
results.push(...await require('../vectors/vllm-vectors').getVllmBatchVector(batch, sourceSettings.apiUrl, sourceSettings.model, directories)); results.push(...await getVllmBatchVector(batch, sourceSettings.apiUrl, sourceSettings.model, directories));
break; break;
case 'ollama': case 'ollama':
results.push(...await require('../vectors/ollama-vectors').getOllamaBatchVector(batch, sourceSettings.apiUrl, sourceSettings.model, sourceSettings.keep, directories)); results.push(...await getOllamaBatchVector(batch, sourceSettings.apiUrl, sourceSettings.model, sourceSettings.keep, directories));
break; break;
default: default:
throw new Error(`Unknown vector source ${source}`); throw new Error(`Unknown vector source ${source}`);
@@ -183,7 +195,7 @@ function getModelScope(sourceSettings) {
/** /**
* Gets the index for the vector collection * Gets the index for the vector collection
* @param {import('../users').UserDirectoryList} directories - User directories * @param {import('../users.js').UserDirectoryList} directories - User directories
* @param {string} collectionId - The collection ID * @param {string} collectionId - The collection ID
* @param {string} source - The source of the vector * @param {string} source - The source of the vector
* @param {object} sourceSettings - The model for the source * @param {object} sourceSettings - The model for the source
@@ -203,7 +215,7 @@ async function getIndex(directories, collectionId, source, sourceSettings) {
/** /**
* Inserts items into the vector collection * Inserts items into the vector collection
* @param {import('../users').UserDirectoryList} directories - User directories * @param {import('../users.js').UserDirectoryList} directories - User directories
* @param {string} collectionId - The collection ID * @param {string} collectionId - The collection ID
* @param {string} source - The source of the vector * @param {string} source - The source of the vector
* @param {Object} sourceSettings - Settings for the source, if it needs any * @param {Object} sourceSettings - Settings for the source, if it needs any
@@ -227,7 +239,7 @@ async function insertVectorItems(directories, collectionId, source, sourceSettin
/** /**
* Gets the hashes of the items in the vector collection * Gets the hashes of the items in the vector collection
* @param {import('../users').UserDirectoryList} directories - User directories * @param {import('../users.js').UserDirectoryList} directories - User directories
* @param {string} collectionId - The collection ID * @param {string} collectionId - The collection ID
* @param {string} source - The source of the vector * @param {string} source - The source of the vector
* @param {Object} sourceSettings - Settings for the source, if it needs any * @param {Object} sourceSettings - Settings for the source, if it needs any
@@ -244,7 +256,7 @@ async function getSavedHashes(directories, collectionId, source, sourceSettings)
/** /**
* Deletes items from the vector collection by hash * Deletes items from the vector collection by hash
* @param {import('../users').UserDirectoryList} directories - User directories * @param {import('../users.js').UserDirectoryList} directories - User directories
* @param {string} collectionId - The collection ID * @param {string} collectionId - The collection ID
* @param {string} source - The source of the vector * @param {string} source - The source of the vector
* @param {Object} sourceSettings - Settings for the source, if it needs any * @param {Object} sourceSettings - Settings for the source, if it needs any
@@ -265,7 +277,7 @@ async function deleteVectorItems(directories, collectionId, source, sourceSettin
/** /**
* Gets the hashes of the items in the vector collection that match the search text * Gets the hashes of the items in the vector collection that match the search text
* @param {import('../users').UserDirectoryList} directories - User directories * @param {import('../users.js').UserDirectoryList} directories - User directories
* @param {string} collectionId - The collection ID * @param {string} collectionId - The collection ID
* @param {string} source - The source of the vector * @param {string} source - The source of the vector
* @param {Object} sourceSettings - Settings for the source, if it needs any * @param {Object} sourceSettings - Settings for the source, if it needs any
@@ -286,7 +298,7 @@ async function queryCollection(directories, collectionId, source, sourceSettings
/** /**
* Queries multiple collections for the given search queries. Returns the overall top K results. * Queries multiple collections for the given search queries. Returns the overall top K results.
* @param {import('../users').UserDirectoryList} directories - User directories * @param {import('../users.js').UserDirectoryList} directories - User directories
* @param {string[]} collectionIds - The collection IDs to query * @param {string[]} collectionIds - The collection IDs to query
* @param {string} source - The source of the vector * @param {string} source - The source of the vector
* @param {Object} sourceSettings - Settings for the source, if it needs any * @param {Object} sourceSettings - Settings for the source, if it needs any
@@ -359,7 +371,7 @@ async function regenerateCorruptedIndexErrorHandler(req, res, error) {
return res.sendStatus(500); return res.sendStatus(500);
} }
const router = express.Router(); export const router = express.Router();
router.post('/query', jsonParser, async (req, res) => { router.post('/query', jsonParser, async (req, res) => {
try { try {
@@ -495,5 +507,3 @@ router.post('/purge', jsonParser, async (req, res) => {
return res.sendStatus(500); return res.sendStatus(500);
} }
}); });
module.exports = { router };

View File

@@ -1,19 +1,20 @@
const fs = require('fs'); import fs from 'node:fs';
const path = require('path'); import path from 'node:path';
const express = require('express');
const sanitize = require('sanitize-filename');
const writeFileAtomicSync = require('write-file-atomic').sync;
const { jsonParser, urlencodedParser } = require('../express-common'); import express from 'express';
import sanitize from 'sanitize-filename';
import { sync as writeFileAtomicSync } from 'write-file-atomic';
import { jsonParser, urlencodedParser } from '../express-common.js';
/** /**
* Reads a World Info file and returns its contents * Reads a World Info file and returns its contents
* @param {import('../users').UserDirectoryList} directories User directories * @param {import('../users.js').UserDirectoryList} directories User directories
* @param {string} worldInfoName Name of the World Info file * @param {string} worldInfoName Name of the World Info file
* @param {boolean} allowDummy If true, returns an empty object if the file doesn't exist * @param {boolean} allowDummy If true, returns an empty object if the file doesn't exist
* @returns {object} World Info file contents * @returns {object} World Info file contents
*/ */
function readWorldInfoFile(directories, worldInfoName, allowDummy) { export function readWorldInfoFile(directories, worldInfoName, allowDummy) {
const dummyObject = allowDummy ? { entries: {} } : null; const dummyObject = allowDummy ? { entries: {} } : null;
if (!worldInfoName) { if (!worldInfoName) {
@@ -33,7 +34,7 @@ function readWorldInfoFile(directories, worldInfoName, allowDummy) {
return worldInfo; return worldInfo;
} }
const router = express.Router(); export const router = express.Router();
router.post('/get', jsonParser, (request, response) => { router.post('/get', jsonParser, (request, response) => {
if (!request.body?.name) { if (!request.body?.name) {
@@ -122,5 +123,3 @@ router.post('/edit', jsonParser, (request, response) => {
return response.send({ ok: true }); return response.send({ ok: true });
}); });
module.exports = { router, readWorldInfoFile };

View File

@@ -1,28 +1,27 @@
const express = require('express'); import express from 'express';
const ipaddr = require('ipaddr.js'); import ipaddr from 'ipaddr.js';
// Instantiate parser middleware here with application-level size limits // Instantiate parser middleware here with application-level size limits
const jsonParser = express.json({ limit: '200mb' }); export const jsonParser = express.json({ limit: '200mb' });
const urlencodedParser = express.urlencoded({ extended: true, limit: '200mb' }); export const urlencodedParser = express.urlencoded({ extended: true, limit: '200mb' });
/** /**
* Gets the IP address of the client from the request object. * Gets the IP address of the client from the request object.
* @param {import('express'.Request)} req Request object * @param {import('express').Request} req Request object
* @returns {string} IP address of the client * @returns {string} IP address of the client
*/ */
function getIpFromRequest(req) { export function getIpFromRequest(req) {
let clientIp = req.connection.remoteAddress; let clientIp = req.socket.remoteAddress;
if (!clientIp) {
return 'unknown';
}
let ip = ipaddr.parse(clientIp); let ip = ipaddr.parse(clientIp);
// Check if the IP address is IPv4-mapped IPv6 address // Check if the IP address is IPv4-mapped IPv6 address
if (ip.kind() === 'ipv6' && ip instanceof ipaddr.IPv6 && ip.isIPv4MappedAddress()) { if (ip.kind() === 'ipv6' && ip instanceof ipaddr.IPv6 && ip.isIPv4MappedAddress()) {
const ipv4 = ip.toIPv4Address().toString(); const ipv4 = ip.toIPv4Address().toString();
clientIp = ipv4; clientIp = ipv4;
} else { } else {
clientIp = ip; clientIp = ip.toString();
clientIp = clientIp.toString();
} }
return clientIp; return clientIp;
} }
module.exports = { jsonParser, urlencodedParser, getIpFromRequest };

View File

@@ -2,9 +2,10 @@
* When applied, this middleware will ensure the request contains the required header for basic authentication and only * When applied, this middleware will ensure the request contains the required header for basic authentication and only
* allow access to the endpoint after successful authentication. * allow access to the endpoint after successful authentication.
*/ */
const { getAllUserHandles, toKey, getPasswordHash } = require('../users.js'); import { Buffer } from 'node:buffer';
const { getConfig, getConfigValue } = require('../util.js'); import storage from 'node-persist';
const storage = require('node-persist'); import { getAllUserHandles, toKey, getPasswordHash } from '../users.js';
import { getConfig, getConfigValue } from '../util.js';
const PER_USER_BASIC_AUTH = getConfigValue('perUserBasicAuth', false); const PER_USER_BASIC_AUTH = getConfigValue('perUserBasicAuth', false);
const ENABLE_ACCOUNTS = getConfigValue('enableUserAccounts', false); const ENABLE_ACCOUNTS = getConfigValue('enableUserAccounts', false);
@@ -49,4 +50,4 @@ const basicAuthMiddleware = async function (request, response, callback) {
return unauthorizedResponse(response); return unauthorizedResponse(response);
}; };
module.exports = basicAuthMiddleware; export default basicAuthMiddleware;

View File

@@ -1,3 +1,5 @@
import { Buffer } from 'node:buffer';
/** /**
* Decodes a file name from Latin1 to UTF-8. * Decodes a file name from Latin1 to UTF-8.
* @param {string} str Input string * @param {string} str Input string
@@ -14,7 +16,7 @@ function decodeFileName(str) {
* @param {import('express').Response} _res Response * @param {import('express').Response} _res Response
* @param {import('express').NextFunction} next Next middleware * @param {import('express').NextFunction} next Next middleware
*/ */
function multerMonkeyPatch(req, _res, next) { export default function multerMonkeyPatch(req, _res, next) {
try { try {
if (req.file) { if (req.file) {
req.file.originalname = decodeFileName(req.file.originalname); req.file.originalname = decodeFileName(req.file.originalname);
@@ -26,5 +28,3 @@ function multerMonkeyPatch(req, _res, next) {
next(); next();
} }
} }
module.exports = multerMonkeyPatch;

View File

@@ -1,9 +1,10 @@
const path = require('path'); import path from 'node:path';
const fs = require('fs'); import fs from 'node:fs';
const ipMatching = require('ip-matching'); import process from 'node:process';
import ipMatching from 'ip-matching';
const { getIpFromRequest } = require('../express-common'); import { getIpFromRequest } from '../express-common.js';
const { color, getConfigValue } = require('../util'); import { color, getConfigValue } from '../util.js';
const whitelistPath = path.join(process.cwd(), './whitelist.txt'); const whitelistPath = path.join(process.cwd(), './whitelist.txt');
const enableForwardedWhitelist = getConfigValue('enableForwardedWhitelist', false); const enableForwardedWhitelist = getConfigValue('enableForwardedWhitelist', false);
@@ -50,7 +51,7 @@ function getForwardedIp(req) {
* @param {boolean} listen If listen mode is enabled via config or command line * @param {boolean} listen If listen mode is enabled via config or command line
* @returns {import('express').RequestHandler} The middleware function * @returns {import('express').RequestHandler} The middleware function
*/ */
function whitelistMiddleware(whitelistMode, listen) { export default function whitelistMiddleware(whitelistMode, listen) {
return function (req, res, next) { return function (req, res, next) {
const clientIp = getIpFromRequest(req); const clientIp = getIpFromRequest(req);
const forwardedIp = getForwardedIp(req); const forwardedIp = getForwardedIp(req);
@@ -82,5 +83,3 @@ function whitelistMiddleware(whitelistMode, listen) {
next(); next();
}; };
} }
module.exports = whitelistMiddleware;

View File

@@ -1,8 +1,9 @@
const fs = require('fs'); import fs from 'node:fs';
const path = require('path'); import path from 'node:path';
const url = require('url'); import url from 'node:url';
const express = require('express');
const { getConfigValue } = require('./util'); import express from 'express';
import { getConfigValue } from './util.js';
const enableServerPlugins = getConfigValue('enableServerPlugins', false); const enableServerPlugins = getConfigValue('enableServerPlugins', false);
/** /**
@@ -16,7 +17,7 @@ const loadedPlugins = new Map();
* @param {string} file Path to file * @param {string} file Path to file
* @returns {boolean} True if file is a CommonJS module * @returns {boolean} True if file is a CommonJS module
*/ */
const isCommonJS = (file) => path.extname(file) === '.js'; const isCommonJS = (file) => path.extname(file) === '.js' || path.extname(file) === '.cjs';
/** /**
* Determine if a file is an ECMAScript module. * Determine if a file is an ECMAScript module.
@@ -32,9 +33,9 @@ const isESModule = (file) => path.extname(file) === '.mjs';
* @returns {Promise<Function>} Promise that resolves when all plugins are loaded. Resolves to a "cleanup" function to * @returns {Promise<Function>} Promise that resolves when all plugins are loaded. Resolves to a "cleanup" function to
* be called before the server shuts down. * be called before the server shuts down.
*/ */
async function loadPlugins(app, pluginsPath) { export async function loadPlugins(app, pluginsPath) {
const exitHooks = []; const exitHooks = [];
const emptyFn = () => {}; const emptyFn = () => { };
// Server plugins are disabled. // Server plugins are disabled.
if (!enableServerPlugins) { if (!enableServerPlugins) {
@@ -89,19 +90,15 @@ async function loadFromDirectory(app, pluginDirectoryPath, exitHooks) {
} }
} }
// Plugin is a CommonJS module. // Plugin is a module file.
const cjsFilePath = path.join(pluginDirectoryPath, 'index.js'); const fileTypes = ['index.js', 'index.cjs', 'index.mjs'];
if (fs.existsSync(cjsFilePath)) {
if (await loadFromFile(app, cjsFilePath, exitHooks)) {
return;
}
}
// Plugin is an ECMAScript module. for (const fileType of fileTypes) {
const esmFilePath = path.join(pluginDirectoryPath, 'index.mjs'); const filePath = path.join(pluginDirectoryPath, fileType);
if (fs.existsSync(esmFilePath)) { if (fs.existsSync(filePath)) {
if (await loadFromFile(app, esmFilePath, exitHooks)) { if (await loadFromFile(app, filePath, exitHooks)) {
return; return;
}
} }
} }
} }
@@ -217,7 +214,3 @@ async function initPlugin(app, plugin, exitHooks) {
return true; return true;
} }
module.exports = {
loadPlugins,
};

View File

@@ -1,10 +0,0 @@
if (!Array.prototype.findLastIndex) {
Array.prototype.findLastIndex = function (callback, thisArg) {
for (let i = this.length - 1; i >= 0; i--) {
if (callback.call(thisArg, this[i], i, this)) return i;
}
return -1;
};
}
module.exports = {};

View File

@@ -1,6 +1,5 @@
require('./polyfill.js'); import crypto from 'node:crypto';
const { getConfigValue } = require('./util.js'); import { getConfigValue } from './util.js';
const crypto = require('crypto');
const PROMPT_PLACEHOLDER = getConfigValue('promptPlaceholder', 'Let\'s get started.'); const PROMPT_PLACEHOLDER = getConfigValue('promptPlaceholder', 'Let\'s get started.');
@@ -17,7 +16,7 @@ const PROMPT_PLACEHOLDER = getConfigValue('promptPlaceholder', 'Let\'s get start
* @returns {string} Prompt for Claude * @returns {string} Prompt for Claude
* @copyright Prompt Conversion script taken from RisuAI by kwaroran (GPLv3). * @copyright Prompt Conversion script taken from RisuAI by kwaroran (GPLv3).
*/ */
function convertClaudePrompt(messages, addAssistantPostfix, addAssistantPrefill, withSysPromptSupport, useSystemPrompt, addSysHumanMsg, excludePrefixes) { export function convertClaudePrompt(messages, addAssistantPostfix, addAssistantPrefill, withSysPromptSupport, useSystemPrompt, addSysHumanMsg, excludePrefixes) {
//Prepare messages for claude. //Prepare messages for claude.
//When 'Exclude Human/Assistant prefixes' checked, setting messages role to the 'system'(last message is exception). //When 'Exclude Human/Assistant prefixes' checked, setting messages role to the 'system'(last message is exception).
@@ -96,7 +95,7 @@ function convertClaudePrompt(messages, addAssistantPostfix, addAssistantPrefill,
* @param {string} charName Character name * @param {string} charName Character name
* @param {string} userName User name * @param {string} userName User name
*/ */
function convertClaudeMessages(messages, prefillString, useSysPrompt, useTools, humanMsgFix, charName = '', userName = '') { export function convertClaudeMessages(messages, prefillString, useSysPrompt, useTools, humanMsgFix, charName = '', userName = '') {
let systemPrompt = []; let systemPrompt = [];
if (useSysPrompt) { if (useSysPrompt) {
// Collect all the system messages up until the first instance of a non-system message, and then remove them from the messages array. // Collect all the system messages up until the first instance of a non-system message, and then remove them from the messages array.
@@ -279,7 +278,7 @@ function convertClaudeMessages(messages, prefillString, useSysPrompt, useTools,
* @param {string} userName User name * @param {string} userName User name
* @returns {{chatHistory: object[]}} Prompt for Cohere * @returns {{chatHistory: object[]}} Prompt for Cohere
*/ */
function convertCohereMessages(messages, charName = '', userName = '') { export function convertCohereMessages(messages, charName = '', userName = '') {
if (messages.length === 0) { if (messages.length === 0) {
messages.unshift({ messages.unshift({
role: 'user', role: 'user',
@@ -333,7 +332,7 @@ function convertCohereMessages(messages, charName = '', userName = '') {
* @param {string} userName User name * @param {string} userName User name
* @returns {{contents: *[], system_instruction: {parts: {text: string}}}} Prompt for Google MakerSuite models * @returns {{contents: *[], system_instruction: {parts: {text: string}}}} Prompt for Google MakerSuite models
*/ */
function convertGooglePrompt(messages, model, useSysPrompt = false, charName = '', userName = '') { export function convertGooglePrompt(messages, model, useSysPrompt = false, charName = '', userName = '') {
// This is a 1x1 transparent PNG // This is a 1x1 transparent PNG
const PNG_PIXEL = 'iVBORw0KGgoAAAANSUhEUgAAAAEAAAABCAQAAAC1HAwCAAAAC0lEQVR42mNkYAAAAAYAAjCB0C8AAAAASUVORK5CYII='; const PNG_PIXEL = 'iVBORw0KGgoAAAANSUhEUgAAAAEAAAABCAQAAAC1HAwCAAAAC0lEQVR42mNkYAAAAAYAAjCB0C8AAAAASUVORK5CYII=';
@@ -456,7 +455,7 @@ function convertGooglePrompt(messages, model, useSysPrompt = false, charName = '
* @param {string} charName Character name * @param {string} charName Character name
* @param {string} userName User name * @param {string} userName User name
*/ */
function convertAI21Messages(messages, charName = '', userName = '') { export function convertAI21Messages(messages, charName = '', userName = '') {
if (!Array.isArray(messages)) { if (!Array.isArray(messages)) {
return []; return [];
} }
@@ -528,7 +527,7 @@ function convertAI21Messages(messages, charName = '', userName = '') {
* @param {string} charName Character name * @param {string} charName Character name
* @param {string} userName User name * @param {string} userName User name
*/ */
function convertMistralMessages(messages, charName = '', userName = '') { export function convertMistralMessages(messages, charName = '', userName = '') {
if (!Array.isArray(messages)) { if (!Array.isArray(messages)) {
return []; return [];
} }
@@ -612,7 +611,7 @@ function convertMistralMessages(messages, charName = '', userName = '') {
* @param {boolean} strict Enable strict mode: only allow one system message at the start, force user first message * @param {boolean} strict Enable strict mode: only allow one system message at the start, force user first message
* @returns {any[]} Merged messages * @returns {any[]} Merged messages
*/ */
function mergeMessages(messages, charName, userName, strict) { export function mergeMessages(messages, charName, userName, strict) {
let mergedMessages = []; let mergedMessages = [];
// Remove names from the messages // Remove names from the messages
@@ -686,7 +685,7 @@ function mergeMessages(messages, charName, userName, strict) {
* @param {object[]} messages Array of messages * @param {object[]} messages Array of messages
* @returns {string} Prompt for Text Completion API * @returns {string} Prompt for Text Completion API
*/ */
function convertTextCompletionPrompt(messages) { export function convertTextCompletionPrompt(messages) {
if (typeof messages === 'string') { if (typeof messages === 'string') {
return messages; return messages;
} }
@@ -705,14 +704,3 @@ function convertTextCompletionPrompt(messages) {
}); });
return messageStrings.join('\n') + '\nassistant:'; return messageStrings.join('\n') + '\nassistant:';
} }
module.exports = {
convertClaudePrompt,
convertClaudeMessages,
convertGooglePrompt,
convertTextCompletionPrompt,
convertCohereMessages,
convertMistralMessages,
convertAI21Messages,
mergeMessages,
};

View File

@@ -1,7 +1,8 @@
const http = require('node:http'); import process from 'node:process';
const https = require('node:https'); import http from 'node:http';
import https from 'node:https';
const { isValidUrl, color } = require('./util.js'); import { ProxyAgent } from 'proxy-agent';
import { isValidUrl, color } from './util.js';
const LOG_HEADER = '[Request Proxy]'; const LOG_HEADER = '[Request Proxy]';
@@ -13,10 +14,8 @@ const LOG_HEADER = '[Request Proxy]';
* @property {string} url Proxy URL. * @property {string} url Proxy URL.
* @property {string[]} bypass List of URLs to bypass proxy. * @property {string[]} bypass List of URLs to bypass proxy.
*/ */
function initRequestProxy({ enabled, url, bypass }) { export default function initRequestProxy({ enabled, url, bypass }) {
try { try {
const { ProxyAgent } = require('proxy-agent');
// No proxy is enabled, so return // No proxy is enabled, so return
if (!enabled) { if (!enabled) {
return; return;
@@ -36,7 +35,6 @@ function initRequestProxy({ enabled, url, bypass }) {
// Reference: https://github.com/Rob--W/proxy-from-env // Reference: https://github.com/Rob--W/proxy-from-env
process.env.all_proxy = url; process.env.all_proxy = url;
if (Array.isArray(bypass) && bypass.length > 0) { if (Array.isArray(bypass) && bypass.length > 0) {
process.env.no_proxy = bypass.join(','); process.env.no_proxy = bypass.join(',');
} }
@@ -52,5 +50,3 @@ function initRequestProxy({ enabled, url, bypass }) {
console.error(color.red(LOG_HEADER), 'Failed to initialize request proxy:', error); console.error(color.red(LOG_HEADER), 'Failed to initialize request proxy:', error);
} }
} }
module.exports = initRequestProxy;

View File

@@ -1,7 +1,10 @@
import { pipeline, env, RawImage, Pipeline } from 'sillytavern-transformers'; import path from 'node:path';
import fs from 'node:fs';
import process from 'node:process';
import { Buffer } from 'node:buffer';
import { pipeline, env, RawImage } from 'sillytavern-transformers';
import { getConfigValue } from './util.js'; import { getConfigValue } from './util.js';
import path from 'path';
import fs from 'fs';
configureTransformers(); configureTransformers();
@@ -50,7 +53,7 @@ const tasks = {
* @param {string} image Base64-encoded image * @param {string} image Base64-encoded image
* @returns {Promise<RawImage|null>} Object representing the image * @returns {Promise<RawImage|null>} Object representing the image
*/ */
async function getRawImage(image) { export async function getRawImage(image) {
try { try {
const buffer = Buffer.from(image, 'base64'); const buffer = Buffer.from(image, 'base64');
const byteArray = new Uint8Array(buffer); const byteArray = new Uint8Array(buffer);
@@ -114,9 +117,9 @@ async function migrateCacheToDataDir() {
* Gets the transformers.js pipeline for a given task. * Gets the transformers.js pipeline for a given task.
* @param {import('sillytavern-transformers').PipelineType} task The task to get the pipeline for * @param {import('sillytavern-transformers').PipelineType} task The task to get the pipeline for
* @param {string} forceModel The model to use for the pipeline, if any * @param {string} forceModel The model to use for the pipeline, if any
* @returns {Promise<Pipeline>} Pipeline for the task * @returns {Promise<import('sillytavern-transformers').Pipeline>} The transformers.js pipeline
*/ */
async function getPipeline(task, forceModel = '') { export async function getPipeline(task, forceModel = '') {
await migrateCacheToDataDir(); await migrateCacheToDataDir();
if (tasks[task].pipeline) { if (tasks[task].pipeline) {
@@ -134,10 +137,11 @@ async function getPipeline(task, forceModel = '') {
const instance = await pipeline(task, model, { cache_dir: cacheDir, quantized: tasks[task].quantized ?? true, local_files_only: localOnly }); const instance = await pipeline(task, model, { cache_dir: cacheDir, quantized: tasks[task].quantized ?? true, local_files_only: localOnly });
tasks[task].pipeline = instance; tasks[task].pipeline = instance;
tasks[task].currentModel = model; tasks[task].currentModel = model;
// @ts-ignore
return instance; return instance;
} }
export default { export default {
getPipeline,
getRawImage, getRawImage,
getPipeline,
}; };

View File

@@ -1,22 +1,25 @@
// Native Node Modules // Native Node Modules
const path = require('path'); import path from 'node:path';
const fs = require('fs'); import fs from 'node:fs';
const crypto = require('crypto'); import crypto from 'node:crypto';
const os = require('os'); import os from 'node:os';
import process from 'node:process';
import { Buffer } from 'node:buffer';
// Express and other dependencies // Express and other dependencies
const storage = require('node-persist'); import storage from 'node-persist';
const express = require('express'); import express from 'express';
const mime = require('mime-types'); import mime from 'mime-types';
const archiver = require('archiver'); import archiver from 'archiver';
const writeFileAtomicSync = require('write-file-atomic').sync; import _ from 'lodash';
const _ = require('lodash'); import { sync as writeFileAtomicSync } from 'write-file-atomic';
const { USER_DIRECTORY_TEMPLATE, DEFAULT_USER, PUBLIC_DIRECTORIES, SETTINGS_FILE } = require('./constants'); import { USER_DIRECTORY_TEMPLATE, DEFAULT_USER, PUBLIC_DIRECTORIES, SETTINGS_FILE } from './constants.js';
const { getConfigValue, color, delay, setConfigValue, generateTimestamp } = require('./util'); import { getConfigValue, color, delay, setConfigValue, generateTimestamp } from './util.js';
const { readSecret, writeSecret } = require('./endpoints/secrets'); import { readSecret, writeSecret } from './endpoints/secrets.js';
import { getContentOfType } from './endpoints/content-manager.js';
const KEY_PREFIX = 'user:'; export const KEY_PREFIX = 'user:';
const AVATAR_PREFIX = 'avatar:'; const AVATAR_PREFIX = 'avatar:';
const ENABLE_ACCOUNTS = getConfigValue('enableUserAccounts', false); const ENABLE_ACCOUNTS = getConfigValue('enableUserAccounts', false);
const AUTHELIA_AUTH = getConfigValue('autheliaAuth', false); const AUTHELIA_AUTH = getConfigValue('autheliaAuth', false);
@@ -92,9 +95,9 @@ const STORAGE_KEYS = {
/** /**
* Ensures that the content directories exist. * Ensures that the content directories exist.
* @returns {Promise<import('./users').UserDirectoryList[]>} - The list of user directories * @returns {Promise<import('./users.js').UserDirectoryList[]>} - The list of user directories
*/ */
async function ensurePublicDirectoriesExist() { export async function ensurePublicDirectoriesExist() {
for (const dir of Object.values(PUBLIC_DIRECTORIES)) { for (const dir of Object.values(PUBLIC_DIRECTORIES)) {
if (!fs.existsSync(dir)) { if (!fs.existsSync(dir)) {
fs.mkdirSync(dir, { recursive: true }); fs.mkdirSync(dir, { recursive: true });
@@ -115,9 +118,9 @@ async function ensurePublicDirectoriesExist() {
/** /**
* Gets a list of all user directories. * Gets a list of all user directories.
* @returns {Promise<import('./users').UserDirectoryList[]>} - The list of user directories * @returns {Promise<import('./users.js').UserDirectoryList[]>} - The list of user directories
*/ */
async function getUserDirectoriesList() { export async function getUserDirectoriesList() {
const userHandles = await getAllUserHandles(); const userHandles = await getAllUserHandles();
const directoriesList = userHandles.map(handle => getUserDirectories(handle)); const directoriesList = userHandles.map(handle => getUserDirectories(handle));
return directoriesList; return directoriesList;
@@ -126,7 +129,7 @@ async function getUserDirectoriesList() {
/** /**
* Perform migration from the old user data format to the new one. * Perform migration from the old user data format to the new one.
*/ */
async function migrateUserData() { export async function migrateUserData() {
const publicDirectory = path.join(process.cwd(), 'public'); const publicDirectory = path.join(process.cwd(), 'public');
// No need to migrate if the characters directory doesn't exists // No need to migrate if the characters directory doesn't exists
@@ -328,14 +331,13 @@ async function migrateUserData() {
console.log(color.green('Migration completed!')); console.log(color.green('Migration completed!'));
} }
async function migrateSystemPrompts() { export async function migrateSystemPrompts() {
/** /**
* Gets the default system prompts. * Gets the default system prompts.
* @returns {Promise<any[]>} - The list of default system prompts * @returns {Promise<any[]>} - The list of default system prompts
*/ */
async function getDefaultSystemPrompts() { async function getDefaultSystemPrompts() {
try { try {
const { getContentOfType } = await import('./endpoints/content-manager.js');
return getContentOfType('sysprompt', 'json'); return getContentOfType('sysprompt', 'json');
} catch { } catch {
return []; return [];
@@ -391,7 +393,7 @@ async function migrateSystemPrompts() {
* @param {string} handle User handle * @param {string} handle User handle
* @returns {string} The key for the user storage * @returns {string} The key for the user storage
*/ */
function toKey(handle) { export function toKey(handle) {
return `${KEY_PREFIX}${handle}`; return `${KEY_PREFIX}${handle}`;
} }
@@ -400,7 +402,7 @@ function toKey(handle) {
* @param {string} handle User handle * @param {string} handle User handle
* @returns {string} The key for the avatar storage * @returns {string} The key for the avatar storage
*/ */
function toAvatarKey(handle) { export function toAvatarKey(handle) {
return `${AVATAR_PREFIX}${handle}`; return `${AVATAR_PREFIX}${handle}`;
} }
@@ -409,7 +411,7 @@ function toAvatarKey(handle) {
* @param {string} dataRoot The root directory for user data * @param {string} dataRoot The root directory for user data
* @returns {Promise<void>} * @returns {Promise<void>}
*/ */
async function initUserStorage(dataRoot) { export async function initUserStorage(dataRoot) {
global.DATA_ROOT = dataRoot; global.DATA_ROOT = dataRoot;
console.log('Using data root:', color.green(global.DATA_ROOT)); console.log('Using data root:', color.green(global.DATA_ROOT));
console.log(); console.log();
@@ -430,7 +432,7 @@ async function initUserStorage(dataRoot) {
* Get the cookie secret from the config. If it doesn't exist, generate a new one. * Get the cookie secret from the config. If it doesn't exist, generate a new one.
* @returns {string} The cookie secret * @returns {string} The cookie secret
*/ */
function getCookieSecret() { export function getCookieSecret() {
let secret = getConfigValue(STORAGE_KEYS.cookieSecret); let secret = getConfigValue(STORAGE_KEYS.cookieSecret);
if (!secret) { if (!secret) {
@@ -446,7 +448,7 @@ function getCookieSecret() {
* Generates a random password salt. * Generates a random password salt.
* @returns {string} The password salt * @returns {string} The password salt
*/ */
function getPasswordSalt() { export function getPasswordSalt() {
return crypto.randomBytes(16).toString('base64'); return crypto.randomBytes(16).toString('base64');
} }
@@ -454,7 +456,7 @@ function getPasswordSalt() {
* Get the session name for the current server. * Get the session name for the current server.
* @returns {string} The session name * @returns {string} The session name
*/ */
function getCookieSessionName() { export function getCookieSessionName() {
// Get server hostname and hash it to generate a session suffix // Get server hostname and hash it to generate a session suffix
const suffix = crypto.createHash('sha256').update(os.hostname()).digest('hex').slice(0, 8); const suffix = crypto.createHash('sha256').update(os.hostname()).digest('hex').slice(0, 8);
return `session-${suffix}`; return `session-${suffix}`;
@@ -466,7 +468,7 @@ function getCookieSessionName() {
* @param {string} salt Salt to use for hashing * @param {string} salt Salt to use for hashing
* @returns {string} Hashed password * @returns {string} Hashed password
*/ */
function getPasswordHash(password, salt) { export function getPasswordHash(password, salt) {
return crypto.scryptSync(password.normalize(), salt, 64).toString('base64'); return crypto.scryptSync(password.normalize(), salt, 64).toString('base64');
} }
@@ -475,7 +477,7 @@ function getPasswordHash(password, salt) {
* @param {import('express').Request} [request] HTTP request object * @param {import('express').Request} [request] HTTP request object
* @returns {string} The CSRF secret * @returns {string} The CSRF secret
*/ */
function getCsrfSecret(request) { export function getCsrfSecret(request) {
if (!request || !request.user) { if (!request || !request.user) {
return ANON_CSRF_SECRET; return ANON_CSRF_SECRET;
} }
@@ -494,7 +496,7 @@ function getCsrfSecret(request) {
* Gets a list of all user handles. * Gets a list of all user handles.
* @returns {Promise<string[]>} - The list of user handles * @returns {Promise<string[]>} - The list of user handles
*/ */
async function getAllUserHandles() { export async function getAllUserHandles() {
const keys = await storage.keys(x => x.key.startsWith(KEY_PREFIX)); const keys = await storage.keys(x => x.key.startsWith(KEY_PREFIX));
const handles = keys.map(x => x.replace(KEY_PREFIX, '')); const handles = keys.map(x => x.replace(KEY_PREFIX, ''));
return handles; return handles;
@@ -505,7 +507,7 @@ async function getAllUserHandles() {
* @param {string} handle User handle * @param {string} handle User handle
* @returns {UserDirectoryList} User directories * @returns {UserDirectoryList} User directories
*/ */
function getUserDirectories(handle) { export function getUserDirectories(handle) {
if (DIRECTORIES_CACHE.has(handle)) { if (DIRECTORIES_CACHE.has(handle)) {
const cache = DIRECTORIES_CACHE.get(handle); const cache = DIRECTORIES_CACHE.get(handle);
if (cache) { if (cache) {
@@ -526,7 +528,7 @@ function getUserDirectories(handle) {
* @param {string} handle User handle * @param {string} handle User handle
* @returns {Promise<string>} User avatar URL * @returns {Promise<string>} User avatar URL
*/ */
async function getUserAvatar(handle) { export async function getUserAvatar(handle) {
try { try {
// Check if the user has a custom avatar // Check if the user has a custom avatar
const avatarKey = toAvatarKey(handle); const avatarKey = toAvatarKey(handle);
@@ -563,7 +565,7 @@ async function getUserAvatar(handle) {
* @param {import('express').Request} request Request object * @param {import('express').Request} request Request object
* @returns {boolean} Whether the user should be redirected to the login page * @returns {boolean} Whether the user should be redirected to the login page
*/ */
function shouldRedirectToLogin(request) { export function shouldRedirectToLogin(request) {
return ENABLE_ACCOUNTS && !request.user; return ENABLE_ACCOUNTS && !request.user;
} }
@@ -574,7 +576,7 @@ function shouldRedirectToLogin(request) {
* @param {boolean} basicAuthMode If Basic auth mode is enabled * @param {boolean} basicAuthMode If Basic auth mode is enabled
* @returns {Promise<boolean>} Whether auto-login was performed * @returns {Promise<boolean>} Whether auto-login was performed
*/ */
async function tryAutoLogin(request, basicAuthMode) { export async function tryAutoLogin(request, basicAuthMode) {
if (!ENABLE_ACCOUNTS || request.user || !request.session) { if (!ENABLE_ACCOUNTS || request.user || !request.session) {
return false; return false;
} }
@@ -693,7 +695,7 @@ async function basicUserLogin(request) {
* @param {import('express').Response} response Response object * @param {import('express').Response} response Response object
* @param {import('express').NextFunction} next Next function * @param {import('express').NextFunction} next Next function
*/ */
async function setUserDataMiddleware(request, response, next) { export async function setUserDataMiddleware(request, response, next) {
// If user accounts are disabled, use the default user // If user accounts are disabled, use the default user
if (!ENABLE_ACCOUNTS) { if (!ENABLE_ACCOUNTS) {
const handle = DEFAULT_USER.handle; const handle = DEFAULT_USER.handle;
@@ -751,7 +753,7 @@ async function setUserDataMiddleware(request, response, next) {
* @param {import('express').Response} response Response object * @param {import('express').Response} response Response object
* @param {import('express').NextFunction} next Next function * @param {import('express').NextFunction} next Next function
*/ */
function requireLoginMiddleware(request, response, next) { export function requireLoginMiddleware(request, response, next) {
if (!request.user) { if (!request.user) {
return response.sendStatus(403); return response.sendStatus(403);
} }
@@ -787,7 +789,7 @@ function createRouteHandler(directoryFn) {
* @param {import('express').NextFunction} next Next function * @param {import('express').NextFunction} next Next function
* @returns {any} * @returns {any}
*/ */
function requireAdminMiddleware(request, response, next) { export function requireAdminMiddleware(request, response, next) {
if (!request.user) { if (!request.user) {
return response.sendStatus(403); return response.sendStatus(403);
} }
@@ -806,7 +808,7 @@ function requireAdminMiddleware(request, response, next) {
* @param {import('express').Response} response Express response object to write to * @param {import('express').Response} response Express response object to write to
* @returns {Promise<void>} Promise that resolves when the archive is created * @returns {Promise<void>} Promise that resolves when the archive is created
*/ */
async function createBackupArchive(handle, response) { export async function createBackupArchive(handle, response) {
const directories = getUserDirectories(handle); const directories = getUserDirectories(handle);
console.log('Backup requested for', handle); console.log('Backup requested for', handle);
@@ -855,7 +857,7 @@ async function getAllUsers() {
* Gets all of the enabled users. * Gets all of the enabled users.
* @returns {Promise<User[]>} * @returns {Promise<User[]>}
*/ */
async function getAllEnabledUsers() { export async function getAllEnabledUsers() {
const users = await getAllUsers(); const users = await getAllUsers();
return users.filter(x => x.enabled); return users.filter(x => x.enabled);
} }
@@ -863,7 +865,7 @@ async function getAllEnabledUsers() {
/** /**
* Express router for serving files from the user's directories. * Express router for serving files from the user's directories.
*/ */
const router = express.Router(); export const router = express.Router();
router.use('/backgrounds/*', createRouteHandler(req => req.user.directories.backgrounds)); router.use('/backgrounds/*', createRouteHandler(req => req.user.directories.backgrounds));
router.use('/characters/*', createRouteHandler(req => req.user.directories.characters)); router.use('/characters/*', createRouteHandler(req => req.user.directories.characters));
router.use('/User%20Avatars/*', createRouteHandler(req => req.user.directories.avatars)); router.use('/User%20Avatars/*', createRouteHandler(req => req.user.directories.avatars));
@@ -871,31 +873,3 @@ router.use('/assets/*', createRouteHandler(req => req.user.directories.assets));
router.use('/user/images/*', createRouteHandler(req => req.user.directories.userImages)); router.use('/user/images/*', createRouteHandler(req => req.user.directories.userImages));
router.use('/user/files/*', createRouteHandler(req => req.user.directories.files)); router.use('/user/files/*', createRouteHandler(req => req.user.directories.files));
router.use('/scripts/extensions/third-party/*', createRouteHandler(req => req.user.directories.extensions)); router.use('/scripts/extensions/third-party/*', createRouteHandler(req => req.user.directories.extensions));
module.exports = {
KEY_PREFIX,
toKey,
toAvatarKey,
initUserStorage,
ensurePublicDirectoriesExist,
getUserDirectoriesList,
getAllUserHandles,
getUserDirectories,
setUserDataMiddleware,
requireLoginMiddleware,
requireAdminMiddleware,
migrateUserData,
migrateSystemPrompts,
getPasswordSalt,
getPasswordHash,
getCsrfSecret,
getCookieSecret,
getCookieSessionName,
getUserAvatar,
shouldRedirectToLogin,
createBackupArchive,
tryAutoLogin,
getAllUsers,
getAllEnabledUsers,
router,
};

View File

@@ -1,13 +1,18 @@
const path = require('path'); import path from 'node:path';
const fs = require('fs'); import fs from 'node:fs';
const commandExistsSync = require('command-exists').sync; import http2 from 'node:http2';
const writeFileAtomicSync = require('write-file-atomic').sync; import process from 'node:process';
const _ = require('lodash'); import { Readable } from 'node:stream';
const yauzl = require('yauzl'); import { createRequire } from 'node:module';
const mime = require('mime-types'); import { Buffer } from 'node:buffer';
const yaml = require('yaml');
const { default: simpleGit } = require('simple-git'); import yaml from 'yaml';
const { Readable } = require('stream'); import { sync as commandExistsSync } from 'command-exists';
import { sync as writeFileAtomicSync } from 'write-file-atomic';
import _ from 'lodash';
import yauzl from 'yauzl';
import mime from 'mime-types';
import { default as simpleGit } from 'simple-git';
/** /**
* Parsed config object. * Parsed config object.
@@ -18,7 +23,7 @@ let CACHED_CONFIG = null;
* Returns the config object from the config.yaml file. * Returns the config object from the config.yaml file.
* @returns {object} Config object * @returns {object} Config object
*/ */
function getConfig() { export function getConfig() {
if (CACHED_CONFIG) { if (CACHED_CONFIG) {
return CACHED_CONFIG; return CACHED_CONFIG;
} }
@@ -46,7 +51,7 @@ function getConfig() {
* @param {any} defaultValue - Default value to return if the key is not found * @param {any} defaultValue - Default value to return if the key is not found
* @returns {any} Value for the given key * @returns {any} Value for the given key
*/ */
function getConfigValue(key, defaultValue = null) { export function getConfigValue(key, defaultValue = null) {
const config = getConfig(); const config = getConfig();
return _.get(config, key, defaultValue); return _.get(config, key, defaultValue);
} }
@@ -56,7 +61,7 @@ function getConfigValue(key, defaultValue = null) {
* @param {string} key Key to set * @param {string} key Key to set
* @param {any} value Value to set * @param {any} value Value to set
*/ */
function setConfigValue(key, value) { export function setConfigValue(key, value) {
// Reset cache so that the next getConfig call will read the updated config file // Reset cache so that the next getConfig call will read the updated config file
CACHED_CONFIG = null; CACHED_CONFIG = null;
const config = getConfig(); const config = getConfig();
@@ -69,7 +74,7 @@ function setConfigValue(key, value) {
* @param {string} auth username:password * @param {string} auth username:password
* @returns {string} Basic Auth header value * @returns {string} Basic Auth header value
*/ */
function getBasicAuthHeader(auth) { export function getBasicAuthHeader(auth) {
const encoded = Buffer.from(`${auth}`).toString('base64'); const encoded = Buffer.from(`${auth}`).toString('base64');
return `Basic ${encoded}`; return `Basic ${encoded}`;
} }
@@ -79,7 +84,7 @@ function getBasicAuthHeader(auth) {
* Also returns the agent string for the Horde API. * Also returns the agent string for the Horde API.
* @returns {Promise<{agent: string, pkgVersion: string, gitRevision: string | null, gitBranch: string | null, commitDate: string | null, isLatest: boolean}>} Version info object * @returns {Promise<{agent: string, pkgVersion: string, gitRevision: string | null, gitBranch: string | null, commitDate: string | null, isLatest: boolean}>} Version info object
*/ */
async function getVersion() { export async function getVersion() {
let pkgVersion = 'UNKNOWN'; let pkgVersion = 'UNKNOWN';
let gitRevision = null; let gitRevision = null;
let gitBranch = null; let gitBranch = null;
@@ -87,9 +92,10 @@ async function getVersion() {
let isLatest = true; let isLatest = true;
try { try {
const require = createRequire(import.meta.url);
const pkgJson = require(path.join(process.cwd(), './package.json')); const pkgJson = require(path.join(process.cwd(), './package.json'));
pkgVersion = pkgJson.version; pkgVersion = pkgJson.version;
if (!process['pkg'] && commandExistsSync('git')) { if (commandExistsSync('git')) {
const git = simpleGit(); const git = simpleGit();
const cwd = process.cwd(); const cwd = process.cwd();
gitRevision = await git.cwd(cwd).revparse(['--short', 'HEAD']); gitRevision = await git.cwd(cwd).revparse(['--short', 'HEAD']);
@@ -117,7 +123,7 @@ async function getVersion() {
* @param {number} ms Milliseconds to wait * @param {number} ms Milliseconds to wait
* @returns {Promise<void>} Promise that resolves after the given amount of milliseconds * @returns {Promise<void>} Promise that resolves after the given amount of milliseconds
*/ */
function delay(ms) { export function delay(ms) {
return new Promise(resolve => setTimeout(resolve, ms)); return new Promise(resolve => setTimeout(resolve, ms));
} }
@@ -127,7 +133,7 @@ function delay(ms) {
* @returns {string} Random hex string * @returns {string} Random hex string
* @example getHexString(8) // 'a1b2c3d4' * @example getHexString(8) // 'a1b2c3d4'
*/ */
function getHexString(length) { export function getHexString(length) {
const chars = '0123456789abcdef'; const chars = '0123456789abcdef';
let result = ''; let result = '';
for (let i = 0; i < length; i++) { for (let i = 0; i < length; i++) {
@@ -142,7 +148,7 @@ function getHexString(length) {
* @param {string} fileExtension File extension to look for * @param {string} fileExtension File extension to look for
* @returns {Promise<Buffer|null>} Buffer containing the extracted file. Null if the file was not found. * @returns {Promise<Buffer|null>} Buffer containing the extracted file. Null if the file was not found.
*/ */
async function extractFileFromZipBuffer(archiveBuffer, fileExtension) { export async function extractFileFromZipBuffer(archiveBuffer, fileExtension) {
return await new Promise((resolve, reject) => yauzl.fromBuffer(Buffer.from(archiveBuffer), { lazyEntries: true }, (err, zipfile) => { return await new Promise((resolve, reject) => yauzl.fromBuffer(Buffer.from(archiveBuffer), { lazyEntries: true }, (err, zipfile) => {
if (err) { if (err) {
reject(err); reject(err);
@@ -181,7 +187,7 @@ async function extractFileFromZipBuffer(archiveBuffer, fileExtension) {
* @param {string} zipFilePath Path to the ZIP archive * @param {string} zipFilePath Path to the ZIP archive
* @returns {Promise<[string, Buffer][]>} Array of image buffers * @returns {Promise<[string, Buffer][]>} Array of image buffers
*/ */
async function getImageBuffers(zipFilePath) { export async function getImageBuffers(zipFilePath) {
return new Promise((resolve, reject) => { return new Promise((resolve, reject) => {
// Check if the zip file exists // Check if the zip file exists
if (!fs.existsSync(zipFilePath)) { if (!fs.existsSync(zipFilePath)) {
@@ -237,7 +243,7 @@ async function getImageBuffers(zipFilePath) {
* @param {any} readableStream Readable stream to read from * @param {any} readableStream Readable stream to read from
* @returns {Promise<Buffer[]>} Array of chunks * @returns {Promise<Buffer[]>} Array of chunks
*/ */
async function readAllChunks(readableStream) { export async function readAllChunks(readableStream) {
return new Promise((resolve, reject) => { return new Promise((resolve, reject) => {
// Consume the readable stream // Consume the readable stream
const chunks = []; const chunks = [];
@@ -261,7 +267,7 @@ function isObject(item) {
return (item && typeof item === 'object' && !Array.isArray(item)); return (item && typeof item === 'object' && !Array.isArray(item));
} }
function deepMerge(target, source) { export function deepMerge(target, source) {
let output = Object.assign({}, target); let output = Object.assign({}, target);
if (isObject(target) && isObject(source)) { if (isObject(target) && isObject(source)) {
Object.keys(source).forEach(key => { Object.keys(source).forEach(key => {
@@ -278,7 +284,7 @@ function deepMerge(target, source) {
return output; return output;
} }
const color = { export const color = {
byNum: (mess, fgNum) => { byNum: (mess, fgNum) => {
mess = mess || ''; mess = mess || '';
fgNum = fgNum === undefined ? 31 : fgNum; fgNum = fgNum === undefined ? 31 : fgNum;
@@ -298,7 +304,7 @@ const color = {
* Gets a random UUIDv4 string. * Gets a random UUIDv4 string.
* @returns {string} A UUIDv4 string * @returns {string} A UUIDv4 string
*/ */
function uuidv4() { export function uuidv4() {
if ('crypto' in global && 'randomUUID' in global.crypto) { if ('crypto' in global && 'randomUUID' in global.crypto) {
return global.crypto.randomUUID(); return global.crypto.randomUUID();
} }
@@ -309,7 +315,7 @@ function uuidv4() {
}); });
} }
function humanizedISO8601DateTime(date) { export function humanizedISO8601DateTime(date) {
let baseDate = typeof date === 'number' ? new Date(date) : new Date(); let baseDate = typeof date === 'number' ? new Date(date) : new Date();
let humanYear = baseDate.getFullYear(); let humanYear = baseDate.getFullYear();
let humanMonth = (baseDate.getMonth() + 1); let humanMonth = (baseDate.getMonth() + 1);
@@ -322,7 +328,7 @@ function humanizedISO8601DateTime(date) {
return HumanizedDateTime; return HumanizedDateTime;
} }
function tryParse(str) { export function tryParse(str) {
try { try {
return JSON.parse(str); return JSON.parse(str);
} catch { } catch {
@@ -337,7 +343,7 @@ function tryParse(str) {
* @param {string} inputPath The path to be converted. * @param {string} inputPath The path to be converted.
* @returns The relative URL path from which the client can access the file. * @returns The relative URL path from which the client can access the file.
*/ */
function clientRelativePath(root, inputPath) { export function clientRelativePath(root, inputPath) {
if (!inputPath.startsWith(root)) { if (!inputPath.startsWith(root)) {
throw new Error('Input path does not start with the root directory'); throw new Error('Input path does not start with the root directory');
} }
@@ -350,11 +356,11 @@ function clientRelativePath(root, inputPath) {
* @param {string} filename The file name to remove the extension from. * @param {string} filename The file name to remove the extension from.
* @returns The file name, sans extension * @returns The file name, sans extension
*/ */
function removeFileExtension(filename) { export function removeFileExtension(filename) {
return filename.replace(/\.[^.]+$/, ''); return filename.replace(/\.[^.]+$/, '');
} }
function generateTimestamp() { export function generateTimestamp() {
const now = new Date(); const now = new Date();
const year = now.getFullYear(); const year = now.getFullYear();
const month = String(now.getMonth() + 1).padStart(2, '0'); const month = String(now.getMonth() + 1).padStart(2, '0');
@@ -371,7 +377,7 @@ function generateTimestamp() {
* @param {string} directory The root directory to remove backups from. * @param {string} directory The root directory to remove backups from.
* @param {string} prefix File prefix to filter backups by. * @param {string} prefix File prefix to filter backups by.
*/ */
function removeOldBackups(directory, prefix) { export function removeOldBackups(directory, prefix) {
const MAX_BACKUPS = Number(getConfigValue('numberOfBackups', 50)); const MAX_BACKUPS = Number(getConfigValue('numberOfBackups', 50));
let files = fs.readdirSync(directory).filter(f => f.startsWith(prefix)); let files = fs.readdirSync(directory).filter(f => f.startsWith(prefix));
@@ -389,7 +395,7 @@ function removeOldBackups(directory, prefix) {
* @param {'name' | 'date'} sortBy Sort images by name or date * @param {'name' | 'date'} sortBy Sort images by name or date
* @returns {string[]} List of image file names * @returns {string[]} List of image file names
*/ */
function getImages(directoryPath, sortBy = 'name') { export function getImages(directoryPath, sortBy = 'name') {
function getSortFunction() { function getSortFunction() {
switch (sortBy) { switch (sortBy) {
case 'name': case 'name':
@@ -415,7 +421,7 @@ function getImages(directoryPath, sortBy = 'name') {
* @param {import('node-fetch').Response} from The Fetch API response to pipe from. * @param {import('node-fetch').Response} from The Fetch API response to pipe from.
* @param {import('express').Response} to The Express response to pipe to. * @param {import('express').Response} to The Express response to pipe to.
*/ */
function forwardFetchResponse(from, to) { export function forwardFetchResponse(from, to) {
let statusCode = from.status; let statusCode = from.status;
let statusText = from.statusText; let statusText = from.statusText;
@@ -434,17 +440,22 @@ function forwardFetchResponse(from, to) {
to.statusCode = statusCode; to.statusCode = statusCode;
to.statusMessage = statusText; to.statusMessage = statusText;
from.body.pipe(to);
to.socket.on('close', function () { if (from.body && to.socket) {
if (from.body instanceof Readable) from.body.destroy(); // Close the remote stream from.body.pipe(to);
to.end(); // End the Express response
});
from.body.on('end', function () { to.socket.on('close', function () {
console.log('Streaming request finished'); if (from.body instanceof Readable) from.body.destroy(); // Close the remote stream
to.end(); // End the Express response
});
from.body.on('end', function () {
console.log('Streaming request finished');
to.end();
});
} else {
to.end(); to.end();
}); }
} }
/** /**
@@ -457,10 +468,9 @@ function forwardFetchResponse(from, to) {
* @param {object} headers Request headers * @param {object} headers Request headers
* @returns {Promise<string>} Response body * @returns {Promise<string>} Response body
*/ */
function makeHttp2Request(endpoint, method, body, headers) { export function makeHttp2Request(endpoint, method, body, headers) {
return new Promise((resolve, reject) => { return new Promise((resolve, reject) => {
try { try {
const http2 = require('http2');
const url = new URL(endpoint); const url = new URL(endpoint);
const client = http2.connect(url.origin); const client = http2.connect(url.origin);
@@ -511,7 +521,7 @@ function makeHttp2Request(endpoint, method, body, headers) {
* @param {string} yamlString YAML-serialized object * @param {string} yamlString YAML-serialized object
* @returns * @returns
*/ */
function mergeObjectWithYaml(obj, yamlString) { export function mergeObjectWithYaml(obj, yamlString) {
if (!yamlString) { if (!yamlString) {
return; return;
} }
@@ -540,7 +550,7 @@ function mergeObjectWithYaml(obj, yamlString) {
* @param {string} yamlString YAML-serialized array * @param {string} yamlString YAML-serialized array
* @returns {void} Nothing * @returns {void} Nothing
*/ */
function excludeKeysByYaml(obj, yamlString) { export function excludeKeysByYaml(obj, yamlString) {
if (!yamlString) { if (!yamlString) {
return; return;
} }
@@ -569,14 +579,14 @@ function excludeKeysByYaml(obj, yamlString) {
* @param {string} str Input string * @param {string} str Input string
* @returns {string} Trimmed string * @returns {string} Trimmed string
*/ */
function trimV1(str) { export function trimV1(str) {
return String(str ?? '').replace(/\/$/, '').replace(/\/v1$/, ''); return String(str ?? '').replace(/\/$/, '').replace(/\/v1$/, '');
} }
/** /**
* Simple TTL memory cache. * Simple TTL memory cache.
*/ */
class Cache { export class Cache {
/** /**
* @param {number} ttl Time to live in milliseconds * @param {number} ttl Time to live in milliseconds
*/ */
@@ -633,7 +643,7 @@ class Cache {
* @param {string} text Text with color formatting * @param {string} text Text with color formatting
* @returns {string} Text without color formatting * @returns {string} Text without color formatting
*/ */
function removeColorFormatting(text) { export function removeColorFormatting(text) {
// ANSI escape codes for colors are usually in the format \x1b[<codes>m // ANSI escape codes for colors are usually in the format \x1b[<codes>m
return text.replace(/\x1b\[\d{1,2}(;\d{1,2})*m/g, ''); return text.replace(/\x1b\[\d{1,2}(;\d{1,2})*m/g, '');
} }
@@ -643,7 +653,7 @@ function removeColorFormatting(text) {
* @param {number} n Number of times to repeat the separator * @param {number} n Number of times to repeat the separator
* @returns {string} Separator string * @returns {string} Separator string
*/ */
function getSeparator(n) { export function getSeparator(n) {
return '='.repeat(n); return '='.repeat(n);
} }
@@ -652,7 +662,7 @@ function getSeparator(n) {
* @param {string} url String to check * @param {string} url String to check
* @returns {boolean} If the URL is valid * @returns {boolean} If the URL is valid
*/ */
function isValidUrl(url) { export function isValidUrl(url) {
try { try {
new URL(url); new URL(url);
return true; return true;
@@ -660,35 +670,3 @@ function isValidUrl(url) {
return false; return false;
} }
} }
module.exports = {
getConfig,
getConfigValue,
setConfigValue,
getVersion,
getBasicAuthHeader,
extractFileFromZipBuffer,
getImageBuffers,
readAllChunks,
delay,
deepMerge,
color,
uuidv4,
humanizedISO8601DateTime,
tryParse,
clientRelativePath,
removeFileExtension,
generateTimestamp,
removeOldBackups,
getImages,
forwardFetchResponse,
getHexString,
mergeObjectWithYaml,
excludeKeysByYaml,
trimV1,
Cache,
makeHttp2Request,
removeColorFormatting,
getSeparator,
isValidUrl,
};

View File

@@ -5,7 +5,7 @@
* *
* @link https://github.com/malfoyslastname/character-card-spec-v2 * @link https://github.com/malfoyslastname/character-card-spec-v2
*/ */
class TavernCardValidator { export class TavernCardValidator {
/** /**
* @type {string|null} * @type {string|null}
*/ */
@@ -167,5 +167,3 @@ class TavernCardValidator {
return true; return true;
} }
} }
module.exports = { TavernCardValidator };

View File

@@ -1,15 +1,15 @@
const fetch = require('node-fetch').default; import fetch from 'node-fetch';
const { SECRET_KEYS, readSecret } = require('../endpoints/secrets'); import { SECRET_KEYS, readSecret } from '../endpoints/secrets.js';
/** /**
* Gets the vector for the given text batch from an OpenAI compatible endpoint. * Gets the vector for the given text batch from an OpenAI compatible endpoint.
* @param {string[]} texts - The array of texts to get the vector for * @param {string[]} texts - The array of texts to get the vector for
* @param {boolean} isQuery - If the text is a query for embedding search * @param {boolean} isQuery - If the text is a query for embedding search
* @param {import('../users').UserDirectoryList} directories - The directories object for the user * @param {import('../users.js').UserDirectoryList} directories - The directories object for the user
* @param {string} model - The model to use for the embedding * @param {string} model - The model to use for the embedding
* @returns {Promise<number[][]>} - The array of vectors for the texts * @returns {Promise<number[][]>} - The array of vectors for the texts
*/ */
async function getCohereBatchVector(texts, isQuery, directories, model) { export async function getCohereBatchVector(texts, isQuery, directories, model) {
const key = readSecret(directories, SECRET_KEYS.COHERE); const key = readSecret(directories, SECRET_KEYS.COHERE);
if (!key) { if (!key) {
@@ -38,6 +38,7 @@ async function getCohereBatchVector(texts, isQuery, directories, model) {
throw new Error('API request failed'); throw new Error('API request failed');
} }
/** @type {any} */
const data = await response.json(); const data = await response.json();
if (!Array.isArray(data?.embeddings?.float)) { if (!Array.isArray(data?.embeddings?.float)) {
console.log('API response was not an array'); console.log('API response was not an array');
@@ -51,16 +52,12 @@ async function getCohereBatchVector(texts, isQuery, directories, model) {
* Gets the vector for the given text from an OpenAI compatible endpoint. * Gets the vector for the given text from an OpenAI compatible endpoint.
* @param {string} text - The text to get the vector for * @param {string} text - The text to get the vector for
* @param {boolean} isQuery - If the text is a query for embedding search * @param {boolean} isQuery - If the text is a query for embedding search
* @param {import('../users').UserDirectoryList} directories - The directories object for the user * @param {import('../users.js').UserDirectoryList} directories - The directories object for the user
* @param {string} model - The model to use for the embedding * @param {string} model - The model to use for the embedding
* @returns {Promise<number[]>} - The vector for the text * @returns {Promise<number[]>} - The vector for the text
*/ */
async function getCohereVector(text, isQuery, directories, model) { export async function getCohereVector(text, isQuery, directories, model) {
const vectors = await getCohereBatchVector([text], isQuery, directories, model); const vectors = await getCohereBatchVector([text], isQuery, directories, model);
return vectors[0]; return vectors[0];
} }
module.exports = {
getCohereBatchVector,
getCohereVector,
};

View File

@@ -1,3 +1,4 @@
import { getPipeline } from '../transformers.mjs';
const TASK = 'feature-extraction'; const TASK = 'feature-extraction';
/** /**
@@ -5,9 +6,8 @@ const TASK = 'feature-extraction';
* @param {string} text - The text to vectorize * @param {string} text - The text to vectorize
* @returns {Promise<number[]>} - The vectorized text in form of an array of numbers * @returns {Promise<number[]>} - The vectorized text in form of an array of numbers
*/ */
async function getTransformersVector(text) { export async function getTransformersVector(text) {
const module = await import('../transformers.mjs'); const pipe = await getPipeline(TASK);
const pipe = await module.default.getPipeline(TASK);
const result = await pipe(text, { pooling: 'mean', normalize: true }); const result = await pipe(text, { pooling: 'mean', normalize: true });
const vector = Array.from(result.data); const vector = Array.from(result.data);
return vector; return vector;
@@ -18,15 +18,10 @@ async function getTransformersVector(text) {
* @param {string[]} texts - The texts to vectorize * @param {string[]} texts - The texts to vectorize
* @returns {Promise<number[][]>} - The vectorized texts in form of an array of arrays of numbers * @returns {Promise<number[][]>} - The vectorized texts in form of an array of arrays of numbers
*/ */
async function getTransformersBatchVector(texts) { export async function getTransformersBatchVector(texts) {
const result = []; const result = [];
for (const text of texts) { for (const text of texts) {
result.push(await getTransformersVector(text)); result.push(await getTransformersVector(text));
} }
return result; return result;
} }
module.exports = {
getTransformersVector,
getTransformersBatchVector,
};

View File

@@ -1,4 +1,4 @@
const fetch = require('node-fetch').default; import fetch from 'node-fetch';
/** /**
* Gets the vector for the given text from SillyTavern-extras * Gets the vector for the given text from SillyTavern-extras
@@ -7,7 +7,7 @@ const fetch = require('node-fetch').default;
* @param {string} apiKey - The Extras API key, or empty string if API key not enabled * @param {string} apiKey - The Extras API key, or empty string if API key not enabled
* @returns {Promise<number[][]>} - The array of vectors for the texts * @returns {Promise<number[][]>} - The array of vectors for the texts
*/ */
async function getExtrasBatchVector(texts, apiUrl, apiKey) { export async function getExtrasBatchVector(texts, apiUrl, apiKey) {
return getExtrasVectorImpl(texts, apiUrl, apiKey); return getExtrasVectorImpl(texts, apiUrl, apiKey);
} }
@@ -18,7 +18,7 @@ async function getExtrasBatchVector(texts, apiUrl, apiKey) {
* @param {string} apiKey - The Extras API key, or empty string if API key not enabled * @param {string} apiKey - The Extras API key, or empty string if API key not enabled
* @returns {Promise<number[]>} - The vector for the text * @returns {Promise<number[]>} - The vector for the text
*/ */
async function getExtrasVector(text, apiUrl, apiKey) { export async function getExtrasVector(text, apiUrl, apiKey) {
return getExtrasVectorImpl(text, apiUrl, apiKey); return getExtrasVectorImpl(text, apiUrl, apiKey);
} }
@@ -66,13 +66,9 @@ async function getExtrasVectorImpl(text, apiUrl, apiKey) {
throw new Error('Extras request failed'); throw new Error('Extras request failed');
} }
/** @type {any} */
const data = await response.json(); const data = await response.json();
const vector = data.embedding; // `embedding`: number[] (one text item), or number[][] (multiple text items). const vector = data.embedding; // `embedding`: number[] (one text item), or number[][] (multiple text items).
return vector; return vector;
} }
module.exports = {
getExtrasVector,
getExtrasBatchVector,
};

View File

@@ -1,15 +1,15 @@
const fetch = require('node-fetch').default; import fetch from 'node-fetch';
const { setAdditionalHeadersByType } = require('../additional-headers'); import { setAdditionalHeadersByType } from '../additional-headers.js';
const { TEXTGEN_TYPES } = require('../constants'); import { TEXTGEN_TYPES } from '../constants.js';
/** /**
* Gets the vector for the given text from LlamaCpp * Gets the vector for the given text from LlamaCpp
* @param {string[]} texts - The array of texts to get the vectors for * @param {string[]} texts - The array of texts to get the vectors for
* @param {string} apiUrl - The API URL * @param {string} apiUrl - The API URL
* @param {import('../users').UserDirectoryList} directories - The directories object for the user * @param {import('../users.js').UserDirectoryList} directories - The directories object for the user
* @returns {Promise<number[][]>} - The array of vectors for the texts * @returns {Promise<number[][]>} - The array of vectors for the texts
*/ */
async function getLlamaCppBatchVector(texts, apiUrl, directories) { export async function getLlamaCppBatchVector(texts, apiUrl, directories) {
const url = new URL(apiUrl); const url = new URL(apiUrl);
url.pathname = '/v1/embeddings'; url.pathname = '/v1/embeddings';
@@ -30,6 +30,7 @@ async function getLlamaCppBatchVector(texts, apiUrl, directories) {
throw new Error(`LlamaCpp: Failed to get vector for text: ${response.statusText} ${responseText}`); throw new Error(`LlamaCpp: Failed to get vector for text: ${response.statusText} ${responseText}`);
} }
/** @type {any} */
const data = await response.json(); const data = await response.json();
if (!Array.isArray(data?.data)) { if (!Array.isArray(data?.data)) {
@@ -47,15 +48,10 @@ async function getLlamaCppBatchVector(texts, apiUrl, directories) {
* Gets the vector for the given text from LlamaCpp * Gets the vector for the given text from LlamaCpp
* @param {string} text - The text to get the vector for * @param {string} text - The text to get the vector for
* @param {string} apiUrl - The API URL * @param {string} apiUrl - The API URL
* @param {import('../users').UserDirectoryList} directories - The directories object for the user * @param {import('../users.js').UserDirectoryList} directories - The directories object for the user
* @returns {Promise<number[]>} - The vector for the text * @returns {Promise<number[]>} - The vector for the text
*/ */
async function getLlamaCppVector(text, apiUrl, directories) { export async function getLlamaCppVector(text, apiUrl, directories) {
const vectors = await getLlamaCppBatchVector([text], apiUrl, directories); const vectors = await getLlamaCppBatchVector([text], apiUrl, directories);
return vectors[0]; return vectors[0];
} }
module.exports = {
getLlamaCppBatchVector,
getLlamaCppVector,
};

View File

@@ -1,14 +1,14 @@
const fetch = require('node-fetch').default; import fetch from 'node-fetch';
const { SECRET_KEYS, readSecret } = require('../endpoints/secrets'); import { SECRET_KEYS, readSecret } from '../endpoints/secrets.js';
const API_MAKERSUITE = 'https://generativelanguage.googleapis.com'; const API_MAKERSUITE = 'https://generativelanguage.googleapis.com';
/** /**
* Gets the vector for the given text from gecko model * Gets the vector for the given text from gecko model
* @param {string[]} texts - The array of texts to get the vector for * @param {string[]} texts - The array of texts to get the vector for
* @param {import('../users').UserDirectoryList} directories - The directories object for the user * @param {import('../users.js').UserDirectoryList} directories - The directories object for the user
* @returns {Promise<number[][]>} - The array of vectors for the texts * @returns {Promise<number[][]>} - The array of vectors for the texts
*/ */
async function getMakerSuiteBatchVector(texts, directories) { export async function getMakerSuiteBatchVector(texts, directories) {
const promises = texts.map(text => getMakerSuiteVector(text, directories)); const promises = texts.map(text => getMakerSuiteVector(text, directories));
return await Promise.all(promises); return await Promise.all(promises);
} }
@@ -16,10 +16,10 @@ async function getMakerSuiteBatchVector(texts, directories) {
/** /**
* Gets the vector for the given text from Gemini API text-embedding-004 model * Gets the vector for the given text from Gemini API text-embedding-004 model
* @param {string} text - The text to get the vector for * @param {string} text - The text to get the vector for
* @param {import('../users').UserDirectoryList} directories - The directories object for the user * @param {import('../users.js').UserDirectoryList} directories - The directories object for the user
* @returns {Promise<number[]>} - The vector for the text * @returns {Promise<number[]>} - The vector for the text
*/ */
async function getMakerSuiteVector(text, directories) { export async function getMakerSuiteVector(text, directories) {
const key = readSecret(directories, SECRET_KEYS.MAKERSUITE); const key = readSecret(directories, SECRET_KEYS.MAKERSUITE);
if (!key) { if (!key) {
@@ -52,12 +52,8 @@ async function getMakerSuiteVector(text, directories) {
throw new Error('Google AI Studio request failed'); throw new Error('Google AI Studio request failed');
} }
/** @type {any} */
const data = await response.json(); const data = await response.json();
// noinspection JSValidateTypes // noinspection JSValidateTypes
return data['embedding']['values']; return data['embedding']['values'];
} }
module.exports = {
getMakerSuiteVector,
getMakerSuiteBatchVector,
};

View File

@@ -1,5 +1,5 @@
const fetch = require('node-fetch').default; import fetch from 'node-fetch';
const { SECRET_KEYS, readSecret } = require('../endpoints/secrets'); import { SECRET_KEYS, readSecret } from '../endpoints/secrets.js';
const SOURCES = { const SOURCES = {
'nomicai': { 'nomicai': {
@@ -13,10 +13,10 @@ const SOURCES = {
* Gets the vector for the given text batch from an OpenAI compatible endpoint. * Gets the vector for the given text batch from an OpenAI compatible endpoint.
* @param {string[]} texts - The array of texts to get the vector for * @param {string[]} texts - The array of texts to get the vector for
* @param {string} source - The source of the vector * @param {string} source - The source of the vector
* @param {import('../users').UserDirectoryList} directories - The directories object for the user * @param {import('../users.js').UserDirectoryList} directories - The directories object for the user
* @returns {Promise<number[][]>} - The array of vectors for the texts * @returns {Promise<number[][]>} - The array of vectors for the texts
*/ */
async function getNomicAIBatchVector(texts, source, directories) { export async function getNomicAIBatchVector(texts, source, directories) {
const config = SOURCES[source]; const config = SOURCES[source];
if (!config) { if (!config) {
@@ -51,6 +51,7 @@ async function getNomicAIBatchVector(texts, source, directories) {
throw new Error('API request failed'); throw new Error('API request failed');
} }
/** @type {any} */
const data = await response.json(); const data = await response.json();
if (!Array.isArray(data?.embeddings)) { if (!Array.isArray(data?.embeddings)) {
console.log('API response was not an array'); console.log('API response was not an array');
@@ -64,15 +65,10 @@ async function getNomicAIBatchVector(texts, source, directories) {
* Gets the vector for the given text from an OpenAI compatible endpoint. * Gets the vector for the given text from an OpenAI compatible endpoint.
* @param {string} text - The text to get the vector for * @param {string} text - The text to get the vector for
* @param {string} source - The source of the vector * @param {string} source - The source of the vector
* @param {import('../users').UserDirectoryList} directories - The directories object for the user * @param {import('../users.js').UserDirectoryList} directories - The directories object for the user
* @returns {Promise<number[]>} - The vector for the text * @returns {Promise<number[]>} - The vector for the text
*/ */
async function getNomicAIVector(text, source, directories) { export async function getNomicAIVector(text, source, directories) {
const vectors = await getNomicAIBatchVector([text], source, directories); const vectors = await getNomicAIBatchVector([text], source, directories);
return vectors[0]; return vectors[0];
} }
module.exports = {
getNomicAIVector,
getNomicAIBatchVector,
};

View File

@@ -1,6 +1,6 @@
const fetch = require('node-fetch').default; import fetch from 'node-fetch';
const { setAdditionalHeadersByType } = require('../additional-headers'); import { setAdditionalHeadersByType } from '../additional-headers.js';
const { TEXTGEN_TYPES } = require('../constants'); import { TEXTGEN_TYPES } from '../constants.js';
/** /**
* Gets the vector for the given text from Ollama * Gets the vector for the given text from Ollama
@@ -8,10 +8,10 @@ const { TEXTGEN_TYPES } = require('../constants');
* @param {string} apiUrl - The API URL * @param {string} apiUrl - The API URL
* @param {string} model - The model to use * @param {string} model - The model to use
* @param {boolean} keep - Keep the model loaded in memory * @param {boolean} keep - Keep the model loaded in memory
* @param {import('../users').UserDirectoryList} directories - The directories object for the user * @param {import('../users.js').UserDirectoryList} directories - The directories object for the user
* @returns {Promise<number[][]>} - The array of vectors for the texts * @returns {Promise<number[][]>} - The array of vectors for the texts
*/ */
async function getOllamaBatchVector(texts, apiUrl, model, keep, directories) { export async function getOllamaBatchVector(texts, apiUrl, model, keep, directories) {
const result = []; const result = [];
for (const text of texts) { for (const text of texts) {
const vector = await getOllamaVector(text, apiUrl, model, keep, directories); const vector = await getOllamaVector(text, apiUrl, model, keep, directories);
@@ -26,10 +26,10 @@ async function getOllamaBatchVector(texts, apiUrl, model, keep, directories) {
* @param {string} apiUrl - The API URL * @param {string} apiUrl - The API URL
* @param {string} model - The model to use * @param {string} model - The model to use
* @param {boolean} keep - Keep the model loaded in memory * @param {boolean} keep - Keep the model loaded in memory
* @param {import('../users').UserDirectoryList} directories - The directories object for the user * @param {import('../users.js').UserDirectoryList} directories - The directories object for the user
* @returns {Promise<number[]>} - The vector for the text * @returns {Promise<number[]>} - The vector for the text
*/ */
async function getOllamaVector(text, apiUrl, model, keep, directories) { export async function getOllamaVector(text, apiUrl, model, keep, directories) {
const url = new URL(apiUrl); const url = new URL(apiUrl);
url.pathname = '/api/embeddings'; url.pathname = '/api/embeddings';
@@ -54,6 +54,7 @@ async function getOllamaVector(text, apiUrl, model, keep, directories) {
throw new Error(`Ollama: Failed to get vector for text: ${response.statusText} ${responseText}`); throw new Error(`Ollama: Failed to get vector for text: ${response.statusText} ${responseText}`);
} }
/** @type {any} */
const data = await response.json(); const data = await response.json();
if (!Array.isArray(data?.embedding)) { if (!Array.isArray(data?.embedding)) {
@@ -62,8 +63,3 @@ async function getOllamaVector(text, apiUrl, model, keep, directories) {
return data.embedding; return data.embedding;
} }
module.exports = {
getOllamaBatchVector,
getOllamaVector,
};

View File

@@ -1,5 +1,5 @@
const fetch = require('node-fetch').default; import fetch from 'node-fetch';
const { SECRET_KEYS, readSecret } = require('../endpoints/secrets'); import { SECRET_KEYS, readSecret } from '../endpoints/secrets.js';
const SOURCES = { const SOURCES = {
'togetherai': { 'togetherai': {
@@ -23,11 +23,11 @@ const SOURCES = {
* Gets the vector for the given text batch from an OpenAI compatible endpoint. * Gets the vector for the given text batch from an OpenAI compatible endpoint.
* @param {string[]} texts - The array of texts to get the vector for * @param {string[]} texts - The array of texts to get the vector for
* @param {string} source - The source of the vector * @param {string} source - The source of the vector
* @param {import('../users').UserDirectoryList} directories - The directories object for the user * @param {import('../users.js').UserDirectoryList} directories - The directories object for the user
* @param {string} model - The model to use for the embedding * @param {string} model - The model to use for the embedding
* @returns {Promise<number[][]>} - The array of vectors for the texts * @returns {Promise<number[][]>} - The array of vectors for the texts
*/ */
async function getOpenAIBatchVector(texts, source, directories, model = '') { export async function getOpenAIBatchVector(texts, source, directories, model = '') {
const config = SOURCES[source]; const config = SOURCES[source];
if (!config) { if (!config) {
@@ -61,6 +61,7 @@ async function getOpenAIBatchVector(texts, source, directories, model = '') {
throw new Error('API request failed'); throw new Error('API request failed');
} }
/** @type {any} */
const data = await response.json(); const data = await response.json();
if (!Array.isArray(data?.data)) { if (!Array.isArray(data?.data)) {
@@ -79,16 +80,11 @@ async function getOpenAIBatchVector(texts, source, directories, model = '') {
* Gets the vector for the given text from an OpenAI compatible endpoint. * Gets the vector for the given text from an OpenAI compatible endpoint.
* @param {string} text - The text to get the vector for * @param {string} text - The text to get the vector for
* @param {string} source - The source of the vector * @param {string} source - The source of the vector
* @param {import('../users').UserDirectoryList} directories - The directories object for the user * @param {import('../users.js').UserDirectoryList} directories - The directories object for the user
* @param {string} model - The model to use for the embedding * @param {string} model - The model to use for the embedding
* @returns {Promise<number[]>} - The vector for the text * @returns {Promise<number[]>} - The vector for the text
*/ */
async function getOpenAIVector(text, source, directories, model = '') { export async function getOpenAIVector(text, source, directories, model = '') {
const vectors = await getOpenAIBatchVector([text], source, directories, model); const vectors = await getOpenAIBatchVector([text], source, directories, model);
return vectors[0]; return vectors[0];
} }
module.exports = {
getOpenAIVector,
getOpenAIBatchVector,
};

View File

@@ -1,16 +1,16 @@
const fetch = require('node-fetch').default; import fetch from 'node-fetch';
const { setAdditionalHeadersByType } = require('../additional-headers'); import { setAdditionalHeadersByType } from '../additional-headers.js';
const { TEXTGEN_TYPES } = require('../constants'); import { TEXTGEN_TYPES } from '../constants.js';
/** /**
* Gets the vector for the given text from VLLM * Gets the vector for the given text from VLLM
* @param {string[]} texts - The array of texts to get the vectors for * @param {string[]} texts - The array of texts to get the vectors for
* @param {string} apiUrl - The API URL * @param {string} apiUrl - The API URL
* @param {string} model - The model to use * @param {string} model - The model to use
* @param {import('../users').UserDirectoryList} directories - The directories object for the user * @param {import('../users.js').UserDirectoryList} directories - The directories object for the user
* @returns {Promise<number[][]>} - The array of vectors for the texts * @returns {Promise<number[][]>} - The array of vectors for the texts
*/ */
async function getVllmBatchVector(texts, apiUrl, model, directories) { export async function getVllmBatchVector(texts, apiUrl, model, directories) {
const url = new URL(apiUrl); const url = new URL(apiUrl);
url.pathname = '/v1/embeddings'; url.pathname = '/v1/embeddings';
@@ -31,6 +31,7 @@ async function getVllmBatchVector(texts, apiUrl, model, directories) {
throw new Error(`VLLM: Failed to get vector for text: ${response.statusText} ${responseText}`); throw new Error(`VLLM: Failed to get vector for text: ${response.statusText} ${responseText}`);
} }
/** @type {any} */
const data = await response.json(); const data = await response.json();
if (!Array.isArray(data?.data)) { if (!Array.isArray(data?.data)) {
@@ -49,15 +50,10 @@ async function getVllmBatchVector(texts, apiUrl, model, directories) {
* @param {string} text - The text to get the vector for * @param {string} text - The text to get the vector for
* @param {string} apiUrl - The API URL * @param {string} apiUrl - The API URL
* @param {string} model - The model to use * @param {string} model - The model to use
* @param {import('../users').UserDirectoryList} directories - The directories object for the user * @param {import('../users.js').UserDirectoryList} directories - The directories object for the user
* @returns {Promise<number[]>} - The vector for the text * @returns {Promise<number[]>} - The vector for the text
*/ */
async function getVllmVector(text, apiUrl, model, directories) { export async function getVllmVector(text, apiUrl, model, directories) {
const vectors = await getVllmBatchVector([text], apiUrl, model, directories); const vectors = await getVllmBatchVector([text], apiUrl, model, directories);
return vectors[0]; return vectors[0];
} }
module.exports = {
getVllmBatchVector,
getVllmVector,
};