2023-07-20 19:32:15 +02:00
#!/usr/bin/env node
2023-08-29 23:05:18 +02:00
// native node modules
const child _process = require ( 'child_process' )
2023-08-29 23:06:37 +02:00
const fs = require ( 'fs' ) ;
const path = require ( 'path' ) ;
2023-08-29 23:05:18 +02:00
2023-08-29 23:16:39 +02:00
// cli/fs related library imports
const yargs = require ( 'yargs/yargs' ) ;
const { hideBin } = require ( 'yargs/helpers' ) ;
const simpleGit = require ( 'simple-git' ) ;
const writeFileAtomicSync = require ( 'write-file-atomic' ) . sync ;
const sanitize = require ( 'sanitize-filename' ) ;
2023-08-29 23:10:40 +02:00
// express related library imports
const express = require ( 'express' ) ;
const compression = require ( 'compression' ) ;
const responseTime = require ( 'response-time' ) ;
const multer = require ( "multer" ) ;
2023-08-29 23:12:47 +02:00
// image processing related library imports
const extract = require ( 'png-chunks-extract' ) ;
const encode = require ( 'png-chunks-encode' ) ;
const PNGtext = require ( 'png-chunk-text' ) ;
const jimp = require ( 'jimp' ) ;
const mime = require ( 'mime-types' ) ;
const exif = require ( 'piexifjs' ) ;
const webp = require ( 'webp-converter' ) ;
2023-07-20 19:32:15 +02:00
createDefaultFiles ( ) ;
function createDefaultFiles ( ) {
const files = {
settings : 'public/settings.json' ,
bg _load : 'public/css/bg_load.css' ,
config : 'config.conf' ,
} ;
for ( const file of Object . values ( files ) ) {
try {
if ( ! fs . existsSync ( file ) ) {
const defaultFilePath = path . join ( 'default' , path . parse ( file ) . base ) ;
fs . copyFileSync ( defaultFilePath , file ) ;
console . log ( ` Created default file: ${ file } ` ) ;
}
} catch ( error ) {
console . error ( ` FATAL: Could not write default file: ${ file } ` , error ) ;
}
}
}
const net = require ( "net" ) ;
// work around a node v20 bug: https://github.com/nodejs/node/issues/47822#issuecomment-1564708870
if ( net . setDefaultAutoSelectFamily ) {
net . setDefaultAutoSelectFamily ( false ) ;
}
const cliArguments = yargs ( hideBin ( process . argv ) )
2023-08-06 15:42:15 +02:00
. option ( 'disableCsrf' , {
type : 'boolean' ,
default : false ,
describe : 'Disables CSRF protection'
} ) . option ( 'ssl' , {
2023-07-20 19:32:15 +02:00
type : 'boolean' ,
default : false ,
describe : 'Enables SSL'
} ) . option ( 'certPath' , {
type : 'string' ,
default : 'certs/cert.pem' ,
describe : 'Path to your certificate file.'
} ) . option ( 'keyPath' , {
type : 'string' ,
default : 'certs/privkey.pem' ,
describe : 'Path to your private key file.'
} ) . argv ;
// change all relative paths
const directory = process . pkg ? path . dirname ( process . execPath ) : _ _dirname ;
console . log ( process . pkg ? 'Running from binary' : 'Running from source' ) ;
process . chdir ( directory ) ;
2023-08-29 23:10:40 +02:00
2023-07-20 19:32:15 +02:00
const app = express ( ) ;
2023-08-29 23:10:40 +02:00
2023-08-29 23:16:39 +02:00
2023-07-20 19:32:15 +02:00
app . use ( compression ( ) ) ;
app . use ( responseTime ( ) ) ;
2023-08-29 23:16:39 +02:00
2023-07-20 19:32:15 +02:00
const readline = require ( 'readline' ) ;
const open = require ( 'open' ) ;
const http = require ( "http" ) ;
const https = require ( 'https' ) ;
const basicAuthMiddleware = require ( './src/middleware/basicAuthMiddleware' ) ;
2023-07-21 14:28:32 +02:00
const contentManager = require ( './src/content-manager' ) ;
2023-07-20 19:32:15 +02:00
2023-08-29 23:16:39 +02:00
2023-07-20 19:32:15 +02:00
const cookieParser = require ( 'cookie-parser' ) ;
const crypto = require ( 'crypto' ) ;
const ipaddr = require ( 'ipaddr.js' ) ;
const json5 = require ( 'json5' ) ;
const DeviceDetector = require ( "device-detector-js" ) ;
const { TextEncoder , TextDecoder } = require ( 'util' ) ;
const utf8Encode = new TextEncoder ( ) ;
const commandExistsSync = require ( 'command-exists' ) . sync ;
// impoort from statsHelpers.js
const statsHelpers = require ( './statsHelpers.js' ) ;
const characterCardParser = require ( './src/character-card-parser.js' ) ;
const config = require ( path . join ( process . cwd ( ) , './config.conf' ) ) ;
const server _port = process . env . SILLY _TAVERN _PORT || config . port ;
const whitelistPath = path . join ( process . cwd ( ) , "./whitelist.txt" ) ;
let whitelist = config . whitelist ;
if ( fs . existsSync ( whitelistPath ) ) {
try {
let whitelistTxt = fs . readFileSync ( whitelistPath , 'utf-8' ) ;
whitelist = whitelistTxt . split ( "\n" ) . filter ( ip => ip ) . map ( ip => ip . trim ( ) ) ;
} catch ( e ) { }
}
const whitelistMode = config . whitelistMode ;
const autorun = config . autorun && ! cliArguments . ssl ;
const enableExtensions = config . enableExtensions ;
const listen = config . listen ;
const allowKeysExposure = config . allowKeysExposure ;
const axios = require ( 'axios' ) ;
const tiktoken = require ( '@dqbd/tiktoken' ) ;
const WebSocket = require ( 'ws' ) ;
2023-08-07 18:34:10 +02:00
function getHordeClient ( ) {
const AIHorde = require ( "./src/horde" ) ;
const ai _horde = new AIHorde ( {
client _agent : getVersion ( ) ? . agent || 'SillyTavern:UNKNOWN:Cohee#1207' ,
} ) ;
return ai _horde ;
}
2023-07-20 19:32:15 +02:00
const ipMatching = require ( 'ip-matching' ) ;
const yauzl = require ( 'yauzl' ) ;
const Client = require ( 'node-rest-client' ) . Client ;
const client = new Client ( ) ;
client . on ( 'error' , ( err ) => {
console . error ( 'An error occurred:' , err ) ;
} ) ;
let api _server = "http://0.0.0.0:5000" ;
let api _novelai = "https://api.novelai.net" ;
let api _openai = "https://api.openai.com/v1" ;
let api _claude = "https://api.anthropic.com/v1" ;
let main _api = "kobold" ;
let characters = { } ;
let response _dw _bg ;
2023-08-26 13:17:57 +02:00
let color = {
byNum : ( mess , fgNum ) => {
mess = mess || '' ;
fgNum = fgNum === undefined ? 31 : fgNum ;
return '\u001b[' + fgNum + 'm' + mess + '\u001b[39m' ;
} ,
black : ( mess ) => color . byNum ( mess , 30 ) ,
red : ( mess ) => color . byNum ( mess , 31 ) ,
green : ( mess ) => color . byNum ( mess , 32 ) ,
yellow : ( mess ) => color . byNum ( mess , 33 ) ,
blue : ( mess ) => color . byNum ( mess , 34 ) ,
magenta : ( mess ) => color . byNum ( mess , 35 ) ,
cyan : ( mess ) => color . byNum ( mess , 36 ) ,
white : ( mess ) => color . byNum ( mess , 37 )
} ;
2023-08-03 05:25:24 +02:00
function get _mancer _headers ( ) {
const api _key _mancer = readSecret ( SECRET _KEYS . MANCER ) ;
2023-08-06 15:42:15 +02:00
return api _key _mancer ? { "X-API-KEY" : api _key _mancer } : { } ;
2023-08-03 05:25:24 +02:00
}
2023-07-20 19:32:15 +02:00
//RossAscends: Added function to format dates used in files and chat timestamps to a humanized format.
//Mostly I wanted this to be for file names, but couldn't figure out exactly where the filename save code was as everything seemed to be connected.
//During testing, this performs the same as previous date.now() structure.
//It also does not break old characters/chats, as the code just uses whatever timestamp exists in the chat.
//New chats made with characters will use this new formatting.
//Useable variable is (( humanizedISO8601Datetime ))
const delay = ms => new Promise ( resolve => setTimeout ( resolve , ms ) )
2023-08-15 17:08:34 +02:00
const { SentencePieceProcessor } = require ( "@agnai/sentencepiece-js" ) ;
const { Tokenizer } = require ( '@agnai/web-tokenizers' ) ;
2023-07-20 19:32:15 +02:00
const CHARS _PER _TOKEN = 3.35 ;
let spp _llama ;
let spp _nerd ;
let spp _nerd _v2 ;
let claude _tokenizer ;
async function loadSentencepieceTokenizer ( modelPath ) {
try {
const spp = new SentencePieceProcessor ( ) ;
await spp . load ( modelPath ) ;
return spp ;
} catch ( error ) {
console . error ( "Sentencepiece tokenizer failed to load: " + modelPath , error ) ;
return null ;
}
} ;
async function countSentencepieceTokens ( spp , text ) {
// Fallback to strlen estimation
if ( ! spp ) {
2023-08-02 21:35:05 +02:00
return {
ids : [ ] ,
count : Math . ceil ( text . length / CHARS _PER _TOKEN )
} ;
2023-07-20 19:32:15 +02:00
}
2023-08-02 21:35:05 +02:00
let cleaned = text ; // cleanText(text); <-- cleaning text can result in an incorrect tokenization
2023-07-20 19:32:15 +02:00
let ids = spp . encodeIds ( cleaned ) ;
2023-08-02 21:35:05 +02:00
return {
ids ,
count : ids . length
} ;
2023-07-20 19:32:15 +02:00
}
async function loadClaudeTokenizer ( modelPath ) {
try {
const arrayBuffer = fs . readFileSync ( modelPath ) . buffer ;
const instance = await Tokenizer . fromJSON ( arrayBuffer ) ;
return instance ;
} catch ( error ) {
console . error ( "Claude tokenizer failed to load: " + modelPath , error ) ;
return null ;
}
}
function countClaudeTokens ( tokenizer , messages ) {
const convertedPrompt = convertClaudePrompt ( messages , false , false ) ;
// Fallback to strlen estimation
if ( ! tokenizer ) {
return Math . ceil ( convertedPrompt . length / CHARS _PER _TOKEN ) ;
}
const count = tokenizer . encode ( convertedPrompt ) . length ;
return count ;
}
const tokenizersCache = { } ;
function getTokenizerModel ( requestModel ) {
if ( requestModel . includes ( 'claude' ) ) {
return 'claude' ;
}
if ( requestModel . includes ( 'gpt-4-32k' ) ) {
return 'gpt-4-32k' ;
}
if ( requestModel . includes ( 'gpt-4' ) ) {
return 'gpt-4' ;
}
if ( requestModel . includes ( 'gpt-3.5-turbo' ) ) {
return 'gpt-3.5-turbo' ;
}
if ( requestModel . startsWith ( 'text-' ) || requestModel . startsWith ( 'code-' ) ) {
return requestModel ;
}
// default
return 'gpt-3.5-turbo' ;
}
function getTiktokenTokenizer ( model ) {
if ( tokenizersCache [ model ] ) {
return tokenizersCache [ model ] ;
}
const tokenizer = tiktoken . encoding _for _model ( model ) ;
console . log ( 'Instantiated the tokenizer for' , model ) ;
tokenizersCache [ model ] = tokenizer ;
return tokenizer ;
}
function humanizedISO8601DateTime ( ) {
let baseDate = new Date ( Date . now ( ) ) ;
let humanYear = baseDate . getFullYear ( ) ;
let humanMonth = ( baseDate . getMonth ( ) + 1 ) ;
let humanDate = baseDate . getDate ( ) ;
let humanHour = ( baseDate . getHours ( ) < 10 ? '0' : '' ) + baseDate . getHours ( ) ;
let humanMinute = ( baseDate . getMinutes ( ) < 10 ? '0' : '' ) + baseDate . getMinutes ( ) ;
let humanSecond = ( baseDate . getSeconds ( ) < 10 ? '0' : '' ) + baseDate . getSeconds ( ) ;
let humanMillisecond = ( baseDate . getMilliseconds ( ) < 10 ? '0' : '' ) + baseDate . getMilliseconds ( ) ;
let HumanizedDateTime = ( humanYear + "-" + humanMonth + "-" + humanDate + " @" + humanHour + "h " + humanMinute + "m " + humanSecond + "s " + humanMillisecond + "ms" ) ;
return HumanizedDateTime ;
} ;
var is _colab = process . env . colaburl !== undefined ;
var charactersPath = 'public/characters/' ;
var chatsPath = 'public/chats/' ;
2023-08-19 16:43:56 +02:00
const UPLOADS _PATH = './uploads' ;
2023-07-20 19:32:15 +02:00
const AVATAR _WIDTH = 400 ;
const AVATAR _HEIGHT = 600 ;
const jsonParser = express . json ( { limit : '100mb' } ) ;
const urlencodedParser = express . urlencoded ( { extended : true , limit : '100mb' } ) ;
const baseRequestArgs = { headers : { "Content-Type" : "application/json" } } ;
const directories = {
worlds : 'public/worlds/' ,
avatars : 'public/User Avatars' ,
2023-08-20 05:01:09 +02:00
images : 'public/img/' ,
userImages : 'public/user/images/' ,
2023-07-20 19:32:15 +02:00
groups : 'public/groups/' ,
groupChats : 'public/group chats' ,
chats : 'public/chats/' ,
characters : 'public/characters/' ,
backgrounds : 'public/backgrounds' ,
novelAI _Settings : 'public/NovelAI Settings' ,
koboldAI _Settings : 'public/KoboldAI Settings' ,
openAI _Settings : 'public/OpenAI Settings' ,
textGen _Settings : 'public/TextGen Settings' ,
thumbnails : 'thumbnails/' ,
thumbnailsBg : 'thumbnails/bg/' ,
thumbnailsAvatar : 'thumbnails/avatar/' ,
themes : 'public/themes' ,
movingUI : 'public/movingUI' ,
extensions : 'public/scripts/extensions' ,
instruct : 'public/instruct' ,
context : 'public/context' ,
backups : 'backups/' ,
2023-08-22 01:49:45 +02:00
quickreplies : 'public/QuickReplies' ,
2023-08-23 03:47:13 +02:00
assets : 'public/assets' ,
2023-07-20 19:32:15 +02:00
} ;
// CSRF Protection //
2023-08-06 15:42:15 +02:00
if ( cliArguments . disableCsrf === false ) {
const doubleCsrf = require ( 'csrf-csrf' ) . doubleCsrf ;
const CSRF _SECRET = crypto . randomBytes ( 8 ) . toString ( 'hex' ) ;
const COOKIES _SECRET = crypto . randomBytes ( 8 ) . toString ( 'hex' ) ;
const { generateToken , doubleCsrfProtection } = doubleCsrf ( {
getSecret : ( ) => CSRF _SECRET ,
cookieName : "X-CSRF-Token" ,
cookieOptions : {
httpOnly : true ,
sameSite : "strict" ,
secure : false
} ,
size : 64 ,
getTokenFromRequest : ( req ) => req . headers [ "x-csrf-token" ]
} ) ;
2023-07-20 19:32:15 +02:00
2023-08-06 15:42:15 +02:00
app . get ( "/csrf-token" , ( req , res ) => {
res . json ( {
"token" : generateToken ( res )
} ) ;
2023-07-20 19:32:15 +02:00
} ) ;
2023-08-06 15:42:15 +02:00
app . use ( cookieParser ( COOKIES _SECRET ) ) ;
app . use ( doubleCsrfProtection ) ;
} else {
console . warn ( "\nCSRF protection is disabled. This will make your server vulnerable to CSRF attacks.\n" ) ;
app . get ( "/csrf-token" , ( req , res ) => {
res . json ( {
"token" : 'disabled'
} ) ;
} ) ;
}
2023-07-20 19:32:15 +02:00
// CORS Settings //
const cors = require ( 'cors' ) ;
const CORS = cors ( {
origin : 'null' ,
methods : [ 'OPTIONS' ]
} ) ;
app . use ( CORS ) ;
if ( listen && config . basicAuthMode ) app . use ( basicAuthMiddleware ) ;
2023-08-26 13:17:57 +02:00
// IP Whitelist //
let knownIPs = new Set ( ) ;
function getIpFromRequest ( req ) {
2023-07-20 19:32:15 +02:00
let clientIp = req . connection . remoteAddress ;
let ip = ipaddr . parse ( clientIp ) ;
// Check if the IP address is IPv4-mapped IPv6 address
if ( ip . kind ( ) === 'ipv6' && ip . isIPv4MappedAddress ( ) ) {
const ipv4 = ip . toIPv4Address ( ) . toString ( ) ;
clientIp = ipv4 ;
} else {
clientIp = ip ;
clientIp = clientIp . toString ( ) ;
}
2023-08-26 13:17:57 +02:00
return clientIp ;
}
app . use ( function ( req , res , next ) {
const clientIp = getIpFromRequest ( req ) ;
if ( listen && ! knownIPs . has ( clientIp ) ) {
const userAgent = req . headers [ 'user-agent' ] ;
2023-08-26 15:05:42 +02:00
console . log ( color . yellow ( ` New connection from ${ clientIp } ; User Agent: ${ userAgent } \n ` ) ) ;
2023-08-26 13:17:57 +02:00
knownIPs . add ( clientIp ) ;
// Write access log
const timestamp = new Date ( ) . toISOString ( ) ;
const log = ` ${ timestamp } ${ clientIp } ${ userAgent } \n ` ;
fs . appendFile ( 'access.log' , log , ( err ) => {
if ( err ) {
console . error ( 'Failed to write access log:' , err ) ;
}
} ) ;
}
2023-07-20 19:32:15 +02:00
//clientIp = req.connection.remoteAddress.split(':').pop();
if ( whitelistMode === true && ! whitelist . some ( x => ipMatching . matches ( clientIp , ipMatching . getMatch ( x ) ) ) ) {
2023-08-26 13:17:57 +02:00
console . log ( color . red ( 'Forbidden: Connection attempt from ' + clientIp + '. If you are attempting to connect, please add your IP address in whitelist or disable whitelist mode in config.conf in root of SillyTavern folder.\n' ) ) ;
2023-07-20 19:32:15 +02:00
return res . status ( 403 ) . send ( '<b>Forbidden</b>: Connection attempt from <b>' + clientIp + '</b>. If you are attempting to connect, please add your IP address in whitelist or disable whitelist mode in config.conf in root of SillyTavern folder.' ) ;
}
next ( ) ;
} ) ;
app . use ( express . static ( process . cwd ( ) + "/public" , { refresh : true } ) ) ;
app . use ( '/backgrounds' , ( req , res ) => {
const filePath = decodeURIComponent ( path . join ( process . cwd ( ) , 'public/backgrounds' , req . url . replace ( /%20/g , ' ' ) ) ) ;
fs . readFile ( filePath , ( err , data ) => {
if ( err ) {
res . status ( 404 ) . send ( 'File not found' ) ;
return ;
}
//res.contentType('image/jpeg');
res . send ( data ) ;
} ) ;
} ) ;
app . use ( '/characters' , ( req , res ) => {
const filePath = decodeURIComponent ( path . join ( process . cwd ( ) , charactersPath , req . url . replace ( /%20/g , ' ' ) ) ) ;
fs . readFile ( filePath , ( err , data ) => {
if ( err ) {
res . status ( 404 ) . send ( 'File not found' ) ;
return ;
}
res . send ( data ) ;
} ) ;
} ) ;
2023-08-19 16:43:56 +02:00
app . use ( multer ( { dest : UPLOADS _PATH , limits : { fieldSize : 10 * 1024 * 1024 } } ) . single ( "avatar" ) ) ;
2023-07-20 19:32:15 +02:00
app . get ( "/" , function ( request , response ) {
response . sendFile ( process . cwd ( ) + "/public/index.html" ) ;
} ) ;
app . get ( "/notes/*" , function ( request , response ) {
response . sendFile ( process . cwd ( ) + "/public" + request . url + ".html" ) ;
} ) ;
app . get ( '/deviceinfo' , function ( request , response ) {
const userAgent = request . header ( 'user-agent' ) ;
const deviceDetector = new DeviceDetector ( ) ;
const deviceInfo = deviceDetector . parse ( userAgent ) ;
return response . send ( deviceInfo ) ;
} ) ;
app . get ( '/version' , function ( _ , response ) {
const data = getVersion ( ) ;
response . send ( data ) ;
} )
//**************Kobold api
app . post ( "/generate" , jsonParser , async function ( request , response _generate = response ) {
if ( ! request . body ) return response _generate . sendStatus ( 400 ) ;
const request _prompt = request . body . prompt ;
const controller = new AbortController ( ) ;
request . socket . removeAllListeners ( 'close' ) ;
request . socket . on ( 'close' , async function ( ) {
if ( request . body . can _abort && ! response _generate . writableEnded ) {
try {
console . log ( 'Aborting Kobold generation...' ) ;
// send abort signal to koboldcpp
const abortResponse = await fetch ( ` ${ api _server } /extra/abort ` , {
method : 'POST' ,
} ) ;
if ( ! abortResponse . ok ) {
console . log ( 'Error sending abort request to Kobold:' , abortResponse . status ) ;
}
} catch ( error ) {
console . log ( error ) ;
}
}
controller . abort ( ) ;
} ) ;
let this _settings = {
prompt : request _prompt ,
use _story : false ,
use _memory : false ,
use _authors _note : false ,
use _world _info : false ,
max _context _length : request . body . max _context _length ,
singleline : ! ! request . body . singleline ,
} ;
if ( request . body . gui _settings == false ) {
const sampler _order = [ request . body . s1 , request . body . s2 , request . body . s3 , request . body . s4 , request . body . s5 , request . body . s6 , request . body . s7 ] ;
this _settings = {
prompt : request _prompt ,
use _story : false ,
use _memory : false ,
use _authors _note : false ,
use _world _info : false ,
max _context _length : request . body . max _context _length ,
max _length : request . body . max _length ,
rep _pen : request . body . rep _pen ,
rep _pen _range : request . body . rep _pen _range ,
rep _pen _slope : request . body . rep _pen _slope ,
temperature : request . body . temperature ,
tfs : request . body . tfs ,
top _a : request . body . top _a ,
top _k : request . body . top _k ,
top _p : request . body . top _p ,
typical : request . body . typical ,
sampler _order : sampler _order ,
singleline : ! ! request . body . singleline ,
} ;
if ( ! ! request . body . stop _sequence ) {
this _settings [ 'stop_sequence' ] = request . body . stop _sequence ;
}
}
console . log ( this _settings ) ;
const args = {
body : JSON . stringify ( this _settings ) ,
headers : { "Content-Type" : "application/json" } ,
signal : controller . signal ,
} ;
const MAX _RETRIES = 50 ;
const delayAmount = 2500 ;
let fetch , url , response ;
for ( let i = 0 ; i < MAX _RETRIES ; i ++ ) {
try {
fetch = require ( 'node-fetch' ) . default ;
url = request . body . streaming ? ` ${ api _server } /extra/generate/stream ` : ` ${ api _server } /v1/generate ` ;
response = await fetch ( url , { method : 'POST' , timeout : 0 , ... args } ) ;
if ( request . body . streaming ) {
request . socket . on ( 'close' , function ( ) {
response . body . destroy ( ) ; // Close the remote stream
response _generate . end ( ) ; // End the Express response
} ) ;
response . body . on ( 'end' , function ( ) {
console . log ( "Streaming request finished" ) ;
response _generate . end ( ) ;
} ) ;
// Pipe remote SSE stream to Express response
return response . body . pipe ( response _generate ) ;
} else {
if ( ! response . ok ) {
2023-08-01 14:22:51 +02:00
const errorText = await response . text ( ) ;
console . log ( ` Kobold returned error: ${ response . status } ${ response . statusText } ${ errorText } ` ) ;
try {
const errorJson = JSON . parse ( errorText ) ;
const message = errorJson ? . detail ? . msg || errorText ;
return response _generate . status ( 400 ) . send ( { error : { message } } ) ;
} catch {
return response _generate . status ( 400 ) . send ( { error : { message : errorText } } ) ;
}
2023-07-20 19:32:15 +02:00
}
const data = await response . json ( ) ;
2023-08-17 11:52:32 +02:00
console . log ( "Endpoint response:" , data ) ;
2023-07-20 19:32:15 +02:00
return response _generate . send ( data ) ;
}
} catch ( error ) {
// response
switch ( error ? . status ) {
case 403 :
case 503 : // retry in case of temporary service issue, possibly caused by a queue failure?
console . debug ( ` KoboldAI is busy. Retry attempt ${ i + 1 } of ${ MAX _RETRIES } ... ` ) ;
await delay ( delayAmount ) ;
break ;
default :
if ( 'status' in error ) {
console . log ( 'Status Code from Kobold:' , error . status ) ;
}
return response _generate . send ( { error : true } ) ;
}
}
}
console . log ( 'Max retries exceeded. Giving up.' ) ;
return response _generate . send ( { error : true } ) ;
} ) ;
//************** Text generation web UI
app . post ( "/generate_textgenerationwebui" , jsonParser , async function ( request , response _generate = response ) {
if ( ! request . body ) return response _generate . sendStatus ( 400 ) ;
console . log ( request . body ) ;
const controller = new AbortController ( ) ;
let isGenerationStopped = false ;
request . socket . removeAllListeners ( 'close' ) ;
request . socket . on ( 'close' , function ( ) {
isGenerationStopped = true ;
controller . abort ( ) ;
} ) ;
if ( request . header ( 'X-Response-Streaming' ) ) {
response _generate . writeHead ( 200 , {
'Content-Type' : 'text/plain;charset=utf-8' ,
'Transfer-Encoding' : 'chunked' ,
'Cache-Control' : 'no-transform' ,
} ) ;
async function * readWebsocket ( ) {
2023-08-20 06:30:34 +02:00
const streamingUrl = request . header ( 'X-Streaming-URL' ) . replace ( "localhost" , "127.0.0.1" ) ;
2023-07-20 19:32:15 +02:00
const websocket = new WebSocket ( streamingUrl ) ;
websocket . on ( 'open' , async function ( ) {
2023-08-17 11:52:32 +02:00
console . log ( 'WebSocket opened' ) ;
2023-08-18 00:29:40 +02:00
const combined _args = Object . assign ( request . body . use _mancer ? get _mancer _headers ( ) : { } , request . body ) ;
websocket . send ( JSON . stringify ( combined _args ) ) ;
2023-07-20 19:32:15 +02:00
} ) ;
websocket . on ( 'close' , ( code , buffer ) => {
const reason = new TextDecoder ( ) . decode ( buffer )
2023-08-17 11:52:32 +02:00
console . log ( "WebSocket closed (reason: %o)" , reason ) ;
2023-07-20 19:32:15 +02:00
} ) ;
while ( true ) {
if ( isGenerationStopped ) {
console . error ( 'Streaming stopped by user. Closing websocket...' ) ;
websocket . close ( ) ;
return ;
}
2023-08-19 14:58:17 +02:00
2023-08-17 11:52:32 +02:00
let rawMessage = null ;
try {
// This lunacy is because the websocket can fail to connect AFTER we're awaiting 'message'... so 'message' never triggers.
// So instead we need to look for 'error' at the same time to reject the promise. And then remove the listener if we resolve.
// This is awful.
// Welcome to the shenanigan shack.
rawMessage = await new Promise ( function ( resolve , reject ) {
websocket . once ( 'error' , reject ) ;
websocket . once ( 'message' , ( data , isBinary ) => {
websocket . removeListener ( 'error' , reject ) ;
resolve ( data , isBinary ) ;
} ) ;
} ) ;
2023-08-19 17:52:06 +02:00
} catch ( err ) {
2023-08-17 11:52:32 +02:00
console . error ( "Socket error:" , err ) ;
websocket . close ( ) ;
yield "[SillyTavern] Streaming failed:\n" + err ;
return ;
}
2023-07-20 19:32:15 +02:00
const message = json5 . parse ( rawMessage ) ;
switch ( message . event ) {
case 'text_stream' :
yield message . text ;
break ;
case 'stream_end' :
websocket . close ( ) ;
return ;
}
}
}
let reply = '' ;
try {
for await ( const text of readWebsocket ( ) ) {
if ( typeof text !== 'string' ) {
break ;
}
let newText = text ;
if ( ! newText ) {
continue ;
}
reply += text ;
response _generate . write ( newText ) ;
}
console . log ( reply ) ;
}
finally {
response _generate . end ( ) ;
}
}
else {
const args = {
body : JSON . stringify ( request . body ) ,
2023-08-03 05:38:50 +02:00
headers : { "Content-Type" : "application/json" } ,
2023-07-20 19:32:15 +02:00
signal : controller . signal ,
} ;
2023-08-03 05:25:24 +02:00
if ( request . body . use _mancer ) {
args . headers = Object . assign ( args . headers , get _mancer _headers ( ) ) ;
}
2023-07-20 19:32:15 +02:00
try {
const data = await postAsync ( api _server + "/v1/generate" , args ) ;
2023-08-17 11:52:32 +02:00
console . log ( "Endpoint response:" , data ) ;
2023-07-20 19:32:15 +02:00
return response _generate . send ( data ) ;
} catch ( error ) {
2023-08-03 13:32:53 +02:00
retval = { error : true , status : error . status , response : error . statusText } ;
2023-08-17 11:52:32 +02:00
console . log ( "Endpoint error:" , error ) ;
2023-08-03 13:32:53 +02:00
try {
retval . response = await error . json ( ) ;
retval . response = retval . response . result ;
2023-08-06 15:42:15 +02:00
} catch { }
2023-08-03 13:32:53 +02:00
return response _generate . send ( retval ) ;
2023-07-20 19:32:15 +02:00
}
}
} ) ;
app . post ( "/savechat" , jsonParser , function ( request , response ) {
try {
var dir _name = String ( request . body . avatar _url ) . replace ( '.png' , '' ) ;
let chat _data = request . body . chat ;
let jsonlData = chat _data . map ( JSON . stringify ) . join ( '\n' ) ;
2023-08-17 14:20:02 +02:00
writeFileAtomicSync ( ` ${ chatsPath + sanitize ( dir _name ) } / ${ sanitize ( String ( request . body . file _name ) ) } .jsonl ` , jsonlData , 'utf8' ) ;
2023-07-20 19:32:15 +02:00
return response . send ( { result : "ok" } ) ;
} catch ( error ) {
response . send ( error ) ;
return console . log ( error ) ;
}
} ) ;
app . post ( "/getchat" , jsonParser , function ( request , response ) {
try {
const dirName = String ( request . body . avatar _url ) . replace ( '.png' , '' ) ;
const chatDirExists = fs . existsSync ( chatsPath + dirName ) ;
//if no chat dir for the character is found, make one with the character name
if ( ! chatDirExists ) {
fs . mkdirSync ( chatsPath + dirName ) ;
return response . send ( { } ) ;
}
if ( ! request . body . file _name ) {
return response . send ( { } ) ;
}
const fileName = ` ${ chatsPath + dirName } / ${ sanitize ( String ( request . body . file _name ) ) } .jsonl ` ;
const chatFileExists = fs . existsSync ( fileName ) ;
if ( ! chatFileExists ) {
return response . send ( { } ) ;
}
const data = fs . readFileSync ( fileName , 'utf8' ) ;
const lines = data . split ( '\n' ) ;
// Iterate through the array of strings and parse each line as JSON
const jsonData = lines . map ( tryParse ) . filter ( x => x ) ;
return response . send ( jsonData ) ;
} catch ( error ) {
console . error ( error ) ;
return response . send ( { } ) ;
}
} ) ;
2023-08-26 20:56:41 +02:00
app . post ( "/getstatus" , jsonParser , async function ( request , response ) {
if ( ! request . body ) return response . sendStatus ( 400 ) ;
2023-07-20 19:32:15 +02:00
api _server = request . body . api _server ;
main _api = request . body . main _api ;
if ( api _server . indexOf ( 'localhost' ) != - 1 ) {
api _server = api _server . replace ( 'localhost' , '127.0.0.1' ) ;
}
2023-08-26 20:56:41 +02:00
const args = {
2023-07-20 19:32:15 +02:00
headers : { "Content-Type" : "application/json" }
} ;
2023-08-03 05:25:24 +02:00
if ( main _api == 'textgenerationwebui' && request . body . use _mancer ) {
args . headers = Object . assign ( args . headers , get _mancer _headers ( ) ) ;
}
2023-08-26 20:56:41 +02:00
const url = api _server + "/v1/model" ;
2023-07-20 19:32:15 +02:00
let version = '' ;
let koboldVersion = { } ;
2023-08-26 20:56:41 +02:00
2023-07-20 19:32:15 +02:00
if ( main _api == "kobold" ) {
try {
version = ( await getAsync ( api _server + "/v1/info/version" ) ) . result ;
}
catch {
version = '0.0.0' ;
}
try {
koboldVersion = ( await getAsync ( api _server + "/extra/version" ) ) ;
}
catch {
koboldVersion = {
result : 'Kobold' ,
version : '0.0' ,
} ;
}
}
2023-08-26 20:56:41 +02:00
try {
let data = await getAsync ( url , args ) ;
if ( ! data || typeof data !== 'object' ) {
2023-07-20 19:32:15 +02:00
data = { } ;
}
2023-08-26 20:56:41 +02:00
if ( data . result == "ReadOnly" ) {
2023-07-20 19:32:15 +02:00
data . result = "no_connection" ;
}
2023-08-26 20:56:41 +02:00
data . version = version ;
data . koboldVersion = koboldVersion ;
return response . send ( data ) ;
} catch ( error ) {
console . log ( error ) ;
return response . send ( { result : "no_connection" } ) ;
}
2023-07-20 19:32:15 +02:00
} ) ;
function getVersion ( ) {
let pkgVersion = 'UNKNOWN' ;
let gitRevision = null ;
let gitBranch = null ;
try {
const pkgJson = require ( './package.json' ) ;
pkgVersion = pkgJson . version ;
if ( ! process . pkg && commandExistsSync ( 'git' ) ) {
2023-08-29 23:05:18 +02:00
gitRevision = child _process
2023-07-20 19:32:15 +02:00
. execSync ( 'git rev-parse --short HEAD' , { cwd : process . cwd ( ) , stdio : [ 'ignore' , 'pipe' , 'ignore' ] } )
. toString ( ) . trim ( ) ;
2023-08-29 23:05:18 +02:00
gitBranch = child _process
2023-07-20 19:32:15 +02:00
. execSync ( 'git rev-parse --abbrev-ref HEAD' , { cwd : process . cwd ( ) , stdio : [ 'ignore' , 'pipe' , 'ignore' ] } )
. toString ( ) . trim ( ) ;
}
}
catch {
// suppress exception
}
const agent = ` SillyTavern: ${ pkgVersion } :Cohee#1207 ` ;
return { agent , pkgVersion , gitRevision , gitBranch } ;
}
function tryParse ( str ) {
try {
return json5 . parse ( str ) ;
} catch {
return undefined ;
}
}
function convertToV2 ( char ) {
// Simulate incoming data from frontend form
const result = charaFormatData ( {
json _data : JSON . stringify ( char ) ,
ch _name : char . name ,
description : char . description ,
personality : char . personality ,
scenario : char . scenario ,
first _mes : char . first _mes ,
mes _example : char . mes _example ,
creator _notes : char . creatorcomment ,
talkativeness : char . talkativeness ,
fav : char . fav ,
creator : char . creator ,
tags : char . tags ,
} ) ;
result . chat = char . chat ? ? humanizedISO8601DateTime ( ) ;
result . create _date = char . create _date ;
return result ;
}
function unsetFavFlag ( char ) {
const _ = require ( 'lodash' ) ;
_ . set ( char , 'fav' , false ) ;
_ . set ( char , 'data.extensions.fav' , false ) ;
}
function readFromV2 ( char ) {
const _ = require ( 'lodash' ) ;
if ( _ . isUndefined ( char . data ) ) {
console . warn ( 'Spec v2 data missing' ) ;
return char ;
}
const fieldMappings = {
name : 'name' ,
description : 'description' ,
personality : 'personality' ,
scenario : 'scenario' ,
first _mes : 'first_mes' ,
mes _example : 'mes_example' ,
talkativeness : 'extensions.talkativeness' ,
fav : 'extensions.fav' ,
tags : 'tags' ,
} ;
_ . forEach ( fieldMappings , ( v2Path , charField ) => {
//console.log(`Migrating field: ${charField} from ${v2Path}`);
const v2Value = _ . get ( char . data , v2Path ) ;
if ( _ . isUndefined ( v2Value ) ) {
let defaultValue = undefined ;
// Backfill default values for missing ST extension fields
if ( v2Path === 'extensions.talkativeness' ) {
defaultValue = 0.5 ;
}
if ( v2Path === 'extensions.fav' ) {
defaultValue = false ;
}
if ( ! _ . isUndefined ( defaultValue ) ) {
//console.debug(`Spec v2 extension data missing for field: ${charField}, using default value: ${defaultValue}`);
char [ charField ] = defaultValue ;
} else {
console . debug ( ` Spec v2 data missing for unknown field: ${ charField } ` ) ;
return ;
}
}
if ( ! _ . isUndefined ( char [ charField ] ) && ! _ . isUndefined ( v2Value ) && String ( char [ charField ] ) !== String ( v2Value ) ) {
console . debug ( ` Spec v2 data mismatch with Spec v1 for field: ${ charField } ` , char [ charField ] , v2Value ) ;
}
char [ charField ] = v2Value ;
} ) ;
char [ 'chat' ] = char [ 'chat' ] ? ? humanizedISO8601DateTime ( ) ;
return char ;
}
//***************** Main functions
function charaFormatData ( data ) {
// This is supposed to save all the foreign keys that ST doesn't care about
const _ = require ( 'lodash' ) ;
const char = tryParse ( data . json _data ) || { } ;
// This function uses _.cond() to create a series of conditional checks that return the desired output based on the input data.
// It checks if data.alternate_greetings is an array, a string, or neither, and acts accordingly.
const getAlternateGreetings = data => _ . cond ( [
[ d => Array . isArray ( d . alternate _greetings ) , d => d . alternate _greetings ] ,
[ d => typeof d . alternate _greetings === 'string' , d => [ d . alternate _greetings ] ] ,
[ _ . stubTrue , _ . constant ( [ ] ) ]
] ) ( data ) ;
// Spec V1 fields
_ . set ( char , 'name' , data . ch _name ) ;
_ . set ( char , 'description' , data . description || '' ) ;
_ . set ( char , 'personality' , data . personality || '' ) ;
_ . set ( char , 'scenario' , data . scenario || '' ) ;
_ . set ( char , 'first_mes' , data . first _mes || '' ) ;
_ . set ( char , 'mes_example' , data . mes _example || '' ) ;
// Old ST extension fields (for backward compatibility, will be deprecated)
_ . set ( char , 'creatorcomment' , data . creator _notes ) ;
_ . set ( char , 'avatar' , 'none' ) ;
_ . set ( char , 'chat' , data . ch _name + ' - ' + humanizedISO8601DateTime ( ) ) ;
_ . set ( char , 'talkativeness' , data . talkativeness ) ;
_ . set ( char , 'fav' , data . fav == 'true' ) ;
_ . set ( char , 'create_date' , humanizedISO8601DateTime ( ) ) ;
// Spec V2 fields
_ . set ( char , 'spec' , 'chara_card_v2' ) ;
_ . set ( char , 'spec_version' , '2.0' ) ;
_ . set ( char , 'data.name' , data . ch _name ) ;
_ . set ( char , 'data.description' , data . description || '' ) ;
_ . set ( char , 'data.personality' , data . personality || '' ) ;
_ . set ( char , 'data.scenario' , data . scenario || '' ) ;
_ . set ( char , 'data.first_mes' , data . first _mes || '' ) ;
_ . set ( char , 'data.mes_example' , data . mes _example || '' ) ;
// New V2 fields
_ . set ( char , 'data.creator_notes' , data . creator _notes || '' ) ;
_ . set ( char , 'data.system_prompt' , data . system _prompt || '' ) ;
_ . set ( char , 'data.post_history_instructions' , data . post _history _instructions || '' ) ;
_ . set ( char , 'data.tags' , typeof data . tags == 'string' ? ( data . tags . split ( ',' ) . map ( x => x . trim ( ) ) . filter ( x => x ) ) : data . tags || [ ] ) ;
_ . set ( char , 'data.creator' , data . creator || '' ) ;
_ . set ( char , 'data.character_version' , data . character _version || '' ) ;
_ . set ( char , 'data.alternate_greetings' , getAlternateGreetings ( data ) ) ;
// ST extension fields to V2 object
_ . set ( char , 'data.extensions.talkativeness' , data . talkativeness ) ;
_ . set ( char , 'data.extensions.fav' , data . fav == 'true' ) ;
_ . set ( char , 'data.extensions.world' , data . world || '' ) ;
//_.set(char, 'data.extensions.create_date', humanizedISO8601DateTime());
//_.set(char, 'data.extensions.avatar', 'none');
//_.set(char, 'data.extensions.chat', data.ch_name + ' - ' + humanizedISO8601DateTime());
if ( data . world ) {
try {
const file = readWorldInfoFile ( data . world ) ;
// File was imported - save it to the character book
if ( file && file . originalData ) {
_ . set ( char , 'data.character_book' , file . originalData ) ;
}
// File was not imported - convert the world info to the character book
if ( file && file . entries ) {
_ . set ( char , 'data.character_book' , convertWorldInfoToCharacterBook ( data . world , file . entries ) ) ;
}
} catch {
console . debug ( ` Failed to read world info file: ${ data . world } . Character book will not be available. ` ) ;
}
}
return char ;
}
2023-08-19 16:43:56 +02:00
app . post ( "/createcharacter" , urlencodedParser , async function ( request , response ) {
2023-07-20 19:32:15 +02:00
if ( ! request . body ) return response . sendStatus ( 400 ) ;
request . body . ch _name = sanitize ( request . body . ch _name ) ;
const char = JSON . stringify ( charaFormatData ( request . body ) ) ;
const internalName = getPngName ( request . body . ch _name ) ;
const avatarName = ` ${ internalName } .png ` ;
const defaultAvatar = './public/img/ai4.png' ;
const chatsPath = directories . chats + internalName ; //path.join(chatsPath, internalName);
if ( ! fs . existsSync ( chatsPath ) ) fs . mkdirSync ( chatsPath ) ;
if ( ! request . file ) {
charaWrite ( defaultAvatar , char , internalName , response , avatarName ) ;
} else {
const crop = tryParse ( request . query . crop ) ;
2023-08-19 16:43:56 +02:00
const uploadPath = path . join ( UPLOADS _PATH , request . file . filename ) ;
await charaWrite ( uploadPath , char , internalName , response , avatarName , crop ) ;
fs . unlinkSync ( uploadPath ) ;
2023-07-20 19:32:15 +02:00
}
} ) ;
app . post ( '/renamechat' , jsonParser , async function ( request , response ) {
if ( ! request . body || ! request . body . original _file || ! request . body . renamed _file ) {
return response . sendStatus ( 400 ) ;
}
const pathToFolder = request . body . is _group
? directories . groupChats
: path . join ( directories . chats , String ( request . body . avatar _url ) . replace ( '.png' , '' ) ) ;
const pathToOriginalFile = path . join ( pathToFolder , request . body . original _file ) ;
const pathToRenamedFile = path . join ( pathToFolder , request . body . renamed _file ) ;
console . log ( 'Old chat name' , pathToOriginalFile ) ;
console . log ( 'New chat name' , pathToRenamedFile ) ;
if ( ! fs . existsSync ( pathToOriginalFile ) || fs . existsSync ( pathToRenamedFile ) ) {
console . log ( 'Either Source or Destination files are not available' ) ;
return response . status ( 400 ) . send ( { error : true } ) ;
}
console . log ( 'Successfully renamed.' ) ;
fs . renameSync ( pathToOriginalFile , pathToRenamedFile ) ;
return response . send ( { ok : true } ) ;
} ) ;
app . post ( "/renamecharacter" , jsonParser , async function ( request , response ) {
if ( ! request . body . avatar _url || ! request . body . new _name ) {
return response . sendStatus ( 400 ) ;
}
const oldAvatarName = request . body . avatar _url ;
const newName = sanitize ( request . body . new _name ) ;
const oldInternalName = path . parse ( request . body . avatar _url ) . name ;
const newInternalName = getPngName ( newName ) ;
const newAvatarName = ` ${ newInternalName } .png ` ;
const oldAvatarPath = path . join ( charactersPath , oldAvatarName ) ;
const oldChatsPath = path . join ( chatsPath , oldInternalName ) ;
const newChatsPath = path . join ( chatsPath , newInternalName ) ;
try {
const _ = require ( 'lodash' ) ;
// Read old file, replace name int it
const rawOldData = await charaRead ( oldAvatarPath ) ;
const oldData = getCharaCardV2 ( json5 . parse ( rawOldData ) ) ;
_ . set ( oldData , 'data.name' , newName ) ;
_ . set ( oldData , 'name' , newName ) ;
const newData = JSON . stringify ( oldData ) ;
// Write data to new location
await charaWrite ( oldAvatarPath , newData , newInternalName ) ;
// Rename chats folder
if ( fs . existsSync ( oldChatsPath ) && ! fs . existsSync ( newChatsPath ) ) {
fs . renameSync ( oldChatsPath , newChatsPath ) ;
}
// Remove the old character file
fs . rmSync ( oldAvatarPath ) ;
// Return new avatar name to ST
return response . send ( { 'avatar' : newAvatarName } ) ;
}
catch ( err ) {
console . error ( err ) ;
return response . sendStatus ( 500 ) ;
}
} ) ;
app . post ( "/editcharacter" , urlencodedParser , async function ( request , response ) {
if ( ! request . body ) {
console . error ( 'Error: no response body detected' ) ;
response . status ( 400 ) . send ( 'Error: no response body detected' ) ;
return ;
}
if ( request . body . ch _name === '' || request . body . ch _name === undefined || request . body . ch _name === '.' ) {
console . error ( 'Error: invalid name.' ) ;
response . status ( 400 ) . send ( 'Error: invalid name.' ) ;
return ;
}
let char = charaFormatData ( request . body ) ;
char . chat = request . body . chat ;
char . create _date = request . body . create _date ;
char = JSON . stringify ( char ) ;
let target _img = ( request . body . avatar _url ) . replace ( '.png' , '' ) ;
try {
if ( ! request . file ) {
const avatarPath = path . join ( charactersPath , request . body . avatar _url ) ;
await charaWrite ( avatarPath , char , target _img , response , 'Character saved' ) ;
} else {
const crop = tryParse ( request . query . crop ) ;
2023-08-19 16:43:56 +02:00
const newAvatarPath = path . join ( UPLOADS _PATH , request . file . filename ) ;
2023-07-20 19:32:15 +02:00
invalidateThumbnail ( 'avatar' , request . body . avatar _url ) ;
await charaWrite ( newAvatarPath , char , target _img , response , 'Character saved' , crop ) ;
2023-08-19 16:43:56 +02:00
fs . unlinkSync ( newAvatarPath ) ;
2023-07-20 19:32:15 +02:00
}
}
catch {
console . error ( 'An error occured, character edit invalidated.' ) ;
}
} ) ;
/ * *
* Handle a POST request to edit a character attribute .
*
* This function reads the character data from a file , updates the specified attribute ,
* and writes the updated data back to the file .
*
* @ param { Object } request - The HTTP request object .
* @ param { Object } response - The HTTP response object .
* @ returns { void }
* /
app . post ( "/editcharacterattribute" , jsonParser , async function ( request , response ) {
console . log ( request . body ) ;
if ( ! request . body ) {
console . error ( 'Error: no response body detected' ) ;
response . status ( 400 ) . send ( 'Error: no response body detected' ) ;
return ;
}
if ( request . body . ch _name === '' || request . body . ch _name === undefined || request . body . ch _name === '.' ) {
console . error ( 'Error: invalid name.' ) ;
response . status ( 400 ) . send ( 'Error: invalid name.' ) ;
return ;
}
try {
const avatarPath = path . join ( charactersPath , request . body . avatar _url ) ;
charaRead ( avatarPath ) . then ( ( char ) => {
char = JSON . parse ( char ) ;
//check if the field exists
if ( char [ request . body . field ] === undefined && char . data [ request . body . field ] === undefined ) {
console . error ( 'Error: invalid field.' ) ;
response . status ( 400 ) . send ( 'Error: invalid field.' ) ;
return ;
}
char [ request . body . field ] = request . body . value ;
char . data [ request . body . field ] = request . body . value ;
char = JSON . stringify ( char ) ;
return { char } ;
} ) . then ( ( { char } ) => {
charaWrite ( avatarPath , char , ( request . body . avatar _url ) . replace ( '.png' , '' ) , response , 'Character saved' ) ;
} ) . catch ( ( err ) => {
console . error ( 'An error occured, character edit invalidated.' , err ) ;
} ) ;
}
catch {
console . error ( 'An error occured, character edit invalidated.' ) ;
}
} ) ;
2023-07-24 21:05:27 +02:00
app . post ( "/deletecharacter" , jsonParser , async function ( request , response ) {
2023-07-20 19:32:15 +02:00
if ( ! request . body || ! request . body . avatar _url ) {
return response . sendStatus ( 400 ) ;
}
if ( request . body . avatar _url !== sanitize ( request . body . avatar _url ) ) {
console . error ( 'Malicious filename prevented' ) ;
return response . sendStatus ( 403 ) ;
}
const avatarPath = charactersPath + request . body . avatar _url ;
if ( ! fs . existsSync ( avatarPath ) ) {
return response . sendStatus ( 400 ) ;
}
fs . rmSync ( avatarPath ) ;
invalidateThumbnail ( 'avatar' , request . body . avatar _url ) ;
let dir _name = ( request . body . avatar _url . replace ( '.png' , '' ) ) ;
if ( ! dir _name . length ) {
console . error ( 'Malicious dirname prevented' ) ;
return response . sendStatus ( 403 ) ;
}
2023-08-04 13:41:00 +02:00
if ( request . body . delete _chats == true ) {
2023-07-20 19:32:15 +02:00
try {
await fs . promises . rm ( path . join ( chatsPath , sanitize ( dir _name ) ) , { recursive : true , force : true } )
} catch ( err ) {
console . error ( err ) ;
return response . sendStatus ( 500 ) ;
}
}
return response . sendStatus ( 200 ) ;
} ) ;
async function charaWrite ( img _url , data , target _img , response = undefined , mes = 'ok' , crop = undefined ) {
try {
// Read the image, resize, and save it as a PNG into the buffer
const image = await tryReadImage ( img _url , crop ) ;
// Get the chunks
const chunks = extract ( image ) ;
const tEXtChunks = chunks . filter ( chunk => chunk . create _date === 'tEXt' || chunk . name === 'tEXt' ) ;
// Remove all existing tEXt chunks
for ( let tEXtChunk of tEXtChunks ) {
chunks . splice ( chunks . indexOf ( tEXtChunk ) , 1 ) ;
}
// Add new chunks before the IEND chunk
const base64EncodedData = Buffer . from ( data , 'utf8' ) . toString ( 'base64' ) ;
chunks . splice ( - 1 , 0 , PNGtext . encode ( 'chara' , base64EncodedData ) ) ;
//chunks.splice(-1, 0, text.encode('lorem', 'ipsum'));
2023-08-17 14:20:02 +02:00
writeFileAtomicSync ( charactersPath + target _img + '.png' , new Buffer . from ( encode ( chunks ) ) ) ;
2023-07-20 19:32:15 +02:00
if ( response !== undefined ) response . send ( mes ) ;
return true ;
} catch ( err ) {
console . log ( err ) ;
if ( response !== undefined ) response . status ( 500 ) . send ( err ) ;
return false ;
}
}
async function tryReadImage ( img _url , crop ) {
try {
let rawImg = await jimp . read ( img _url ) ;
let final _width = rawImg . bitmap . width , final _height = rawImg . bitmap . height
// Apply crop if defined
if ( typeof crop == 'object' && [ crop . x , crop . y , crop . width , crop . height ] . every ( x => typeof x === 'number' ) ) {
rawImg = rawImg . crop ( crop . x , crop . y , crop . width , crop . height ) ;
// Apply standard resize if requested
if ( crop . want _resize ) {
final _width = AVATAR _WIDTH
final _height = AVATAR _HEIGHT
}
}
const image = await rawImg . cover ( final _width , final _height ) . getBufferAsync ( jimp . MIME _PNG ) ;
return image ;
}
// If it's an unsupported type of image (APNG) - just read the file as buffer
catch {
return fs . readFileSync ( img _url ) ;
}
}
async function charaRead ( img _url , input _format ) {
return characterCardParser . parse ( img _url , input _format ) ;
}
/ * *
* calculateChatSize - Calculates the total chat size for a given character .
*
* @ param { string } charDir The directory where the chats are stored .
* @ return { number } The total chat size .
* /
const calculateChatSize = ( charDir ) => {
let chatSize = 0 ;
let dateLastChat = 0 ;
if ( fs . existsSync ( charDir ) ) {
const chats = fs . readdirSync ( charDir ) ;
if ( Array . isArray ( chats ) && chats . length ) {
for ( const chat of chats ) {
const chatStat = fs . statSync ( path . join ( charDir , chat ) ) ;
chatSize += chatStat . size ;
dateLastChat = Math . max ( dateLastChat , chatStat . mtimeMs ) ;
}
}
}
return { chatSize , dateLastChat } ;
}
2023-07-27 22:45:25 +02:00
// Calculate the total string length of the data object
const calculateDataSize = ( data ) => {
return typeof data === 'object' ? Object . values ( data ) . reduce ( ( acc , val ) => acc + new String ( val ) . length , 0 ) : 0 ;
}
2023-07-20 19:32:15 +02:00
/ * *
* processCharacter - Process a given character , read its data and calculate its statistics .
*
* @ param { string } item The name of the character .
* @ param { number } i The index of the character in the characters list .
* @ return { Promise } A Promise that resolves when the character processing is done .
* /
const processCharacter = async ( item , i ) => {
try {
const img _data = await charaRead ( charactersPath + item ) ;
let jsonObject = getCharaCardV2 ( json5 . parse ( img _data ) ) ;
jsonObject . avatar = item ;
characters [ i ] = jsonObject ;
characters [ i ] [ 'json_data' ] = img _data ;
const charStat = fs . statSync ( path . join ( charactersPath , item ) ) ;
characters [ i ] [ 'date_added' ] = charStat . birthtimeMs ;
const char _dir = path . join ( chatsPath , item . replace ( '.png' , '' ) ) ;
const { chatSize , dateLastChat } = calculateChatSize ( char _dir ) ;
characters [ i ] [ 'chat_size' ] = chatSize ;
characters [ i ] [ 'date_last_chat' ] = dateLastChat ;
2023-07-27 22:45:25 +02:00
characters [ i ] [ 'data_size' ] = calculateDataSize ( jsonObject ? . data ) ;
2023-07-20 19:32:15 +02:00
}
catch ( err ) {
characters [ i ] = {
date _added : 0 ,
date _last _chat : 0 ,
chat _size : 0
} ;
console . log ( ` Could not process character: ${ item } ` ) ;
if ( err instanceof SyntaxError ) {
console . log ( "String [" + i + "] is not valid JSON!" ) ;
} else {
console . log ( "An unexpected error occurred: " , err ) ;
}
}
}
/ * *
* HTTP POST endpoint for the "/getcharacters" route .
*
* This endpoint is responsible for reading character files from the ` charactersPath ` directory ,
* parsing character data , calculating stats for each character and responding with the data .
* Stats are calculated only on the first run , on subsequent runs the stats are fetched from
* the ` charStats ` variable .
* The stats are calculated by the ` calculateStats ` function .
* The characters are processed by the ` processCharacter ` function .
*
* @ param { object } request The HTTP request object .
* @ param { object } response The HTTP response object .
* @ return { undefined } Does not return a value .
* /
app . post ( "/getcharacters" , jsonParser , function ( request , response ) {
fs . readdir ( charactersPath , async ( err , files ) => {
if ( err ) {
console . error ( err ) ;
return ;
}
const pngFiles = files . filter ( file => file . endsWith ( '.png' ) ) ;
characters = { } ;
let processingPromises = pngFiles . map ( ( file , index ) => processCharacter ( file , index ) ) ;
await Promise . all ( processingPromises ) ; performance . mark ( 'B' ) ;
response . send ( JSON . stringify ( characters ) ) ;
} ) ;
} ) ;
2023-08-19 14:58:17 +02:00
app . post ( "/getonecharacter" , jsonParser , async function ( request , response ) {
if ( ! request . body ) return response . sendStatus ( 400 ) ;
const item = request . body . avatar _url ;
const filePath = path . join ( charactersPath , item ) ;
if ( ! fs . existsSync ( filePath ) ) {
return response . sendStatus ( 404 ) ;
}
characters = { } ;
await processCharacter ( item , 0 ) ;
2023-07-20 19:32:15 +02:00
2023-08-19 14:58:17 +02:00
return response . send ( characters [ 0 ] ) ;
} ) ;
2023-07-20 19:32:15 +02:00
/ * *
* Handle a POST request to get the stats object
*
* This function returns the stats object that was calculated by the ` calculateStats ` function .
*
*
* @ param { Object } request - The HTTP request object .
* @ param { Object } response - The HTTP response object .
* @ returns { void }
* /
app . post ( "/getstats" , jsonParser , function ( request , response ) {
response . send ( JSON . stringify ( statsHelpers . getCharStats ( ) ) ) ;
} ) ;
/ * *
* Handle a POST request to update the stats object
*
* This function updates the stats object with the data from the request body .
*
* @ param { Object } request - The HTTP request object .
* @ param { Object } response - The HTTP response object .
* @ returns { void }
*
* /
app . post ( "/updatestats" , jsonParser , function ( request , response ) {
if ( ! request . body ) return response . sendStatus ( 400 ) ;
statsHelpers . setCharStats ( request . body ) ;
return response . sendStatus ( 200 ) ;
} ) ;
app . post ( "/getbackgrounds" , jsonParser , function ( request , response ) {
var images = getImages ( "public/backgrounds" ) ;
response . send ( JSON . stringify ( images ) ) ;
} ) ;
app . post ( "/iscolab" , jsonParser , function ( request , response ) {
let send _data = false ;
if ( is _colab ) {
send _data = String ( process . env . colaburl ) . trim ( ) ;
}
response . send ( { colaburl : send _data } ) ;
} ) ;
app . post ( "/getuseravatars" , jsonParser , function ( request , response ) {
var images = getImages ( "public/User Avatars" ) ;
response . send ( JSON . stringify ( images ) ) ;
} ) ;
app . post ( '/deleteuseravatar' , jsonParser , function ( request , response ) {
if ( ! request . body ) return response . sendStatus ( 400 ) ;
if ( request . body . avatar !== sanitize ( request . body . avatar ) ) {
console . error ( 'Malicious avatar name prevented' ) ;
return response . sendStatus ( 403 ) ;
}
const fileName = path . join ( directories . avatars , sanitize ( request . body . avatar ) ) ;
if ( fs . existsSync ( fileName ) ) {
fs . rmSync ( fileName ) ;
return response . send ( { result : 'ok' } ) ;
}
return response . sendStatus ( 404 ) ;
} ) ;
app . post ( "/setbackground" , jsonParser , function ( request , response ) {
2023-08-18 11:11:18 +02:00
try {
const bg = ` #bg1 {background-image: url('../backgrounds/ ${ request . body . bg } ');} ` ;
writeFileAtomicSync ( 'public/css/bg_load.css' , bg , 'utf8' ) ;
response . send ( { result : 'ok' } ) ;
} catch ( err ) {
console . log ( err ) ;
response . send ( err ) ;
}
2023-07-20 19:32:15 +02:00
} ) ;
2023-08-18 11:11:18 +02:00
2023-07-20 19:32:15 +02:00
app . post ( "/delbackground" , jsonParser , function ( request , response ) {
if ( ! request . body ) return response . sendStatus ( 400 ) ;
if ( request . body . bg !== sanitize ( request . body . bg ) ) {
console . error ( 'Malicious bg name prevented' ) ;
return response . sendStatus ( 403 ) ;
}
const fileName = path . join ( 'public/backgrounds/' , sanitize ( request . body . bg ) ) ;
if ( ! fs . existsSync ( fileName ) ) {
console . log ( 'BG file not found' ) ;
return response . sendStatus ( 400 ) ;
}
fs . rmSync ( fileName ) ;
invalidateThumbnail ( 'bg' , request . body . bg ) ;
return response . send ( 'ok' ) ;
} ) ;
app . post ( "/delchat" , jsonParser , function ( request , response ) {
console . log ( '/delchat entered' ) ;
if ( ! request . body ) {
console . log ( 'no request body seen' ) ;
return response . sendStatus ( 400 ) ;
}
if ( request . body . chatfile !== sanitize ( request . body . chatfile ) ) {
console . error ( 'Malicious chat name prevented' ) ;
return response . sendStatus ( 403 ) ;
}
const dirName = String ( request . body . avatar _url ) . replace ( '.png' , '' ) ;
const fileName = ` ${ chatsPath + dirName } / ${ sanitize ( String ( request . body . chatfile ) ) } ` ;
const chatFileExists = fs . existsSync ( fileName ) ;
if ( ! chatFileExists ) {
console . log ( ` Chat file not found ' ${ fileName } ' ` ) ;
return response . sendStatus ( 400 ) ;
} else {
console . log ( 'found the chat file: ' + fileName ) ;
/* fs.unlinkSync(fileName); */
fs . rmSync ( fileName ) ;
console . log ( 'deleted chat file: ' + fileName ) ;
}
return response . send ( 'ok' ) ;
} ) ;
app . post ( '/renamebackground' , jsonParser , function ( request , response ) {
if ( ! request . body ) return response . sendStatus ( 400 ) ;
const oldFileName = path . join ( 'public/backgrounds/' , sanitize ( request . body . old _bg ) ) ;
const newFileName = path . join ( 'public/backgrounds/' , sanitize ( request . body . new _bg ) ) ;
if ( ! fs . existsSync ( oldFileName ) ) {
console . log ( 'BG file not found' ) ;
return response . sendStatus ( 400 ) ;
}
if ( fs . existsSync ( newFileName ) ) {
console . log ( 'New BG file already exists' ) ;
return response . sendStatus ( 400 ) ;
}
fs . renameSync ( oldFileName , newFileName ) ;
invalidateThumbnail ( 'bg' , request . body . old _bg ) ;
return response . send ( 'ok' ) ;
} ) ;
app . post ( "/downloadbackground" , urlencodedParser , function ( request , response ) {
response _dw _bg = response ;
if ( ! request . body || ! request . file ) return response . sendStatus ( 400 ) ;
2023-08-19 16:43:56 +02:00
const img _path = path . join ( UPLOADS _PATH , request . file . filename ) ;
2023-07-20 19:32:15 +02:00
const filename = request . file . originalname ;
try {
fs . copyFileSync ( img _path , path . join ( 'public/backgrounds/' , filename ) ) ;
invalidateThumbnail ( 'bg' , filename ) ;
response _dw _bg . send ( filename ) ;
2023-08-19 16:43:56 +02:00
fs . unlinkSync ( img _path ) ;
2023-07-20 19:32:15 +02:00
} catch ( err ) {
console . error ( err ) ;
response _dw _bg . sendStatus ( 500 ) ;
}
} ) ;
app . post ( "/savesettings" , jsonParser , function ( request , response ) {
2023-08-18 11:11:18 +02:00
try {
writeFileAtomicSync ( 'public/settings.json' , JSON . stringify ( request . body , null , 4 ) , 'utf8' ) ;
response . send ( { result : "ok" } ) ;
} catch ( err ) {
console . log ( err ) ;
response . send ( err ) ;
}
2023-07-20 19:32:15 +02:00
} ) ;
function getCharaCardV2 ( jsonObject ) {
if ( jsonObject . spec === undefined ) {
jsonObject = convertToV2 ( jsonObject ) ;
} else {
jsonObject = readFromV2 ( jsonObject ) ;
}
return jsonObject ;
}
function readAndParseFromDirectory ( directoryPath , fileExtension = '.json' ) {
const files = fs
. readdirSync ( directoryPath )
. filter ( x => path . parse ( x ) . ext == fileExtension )
. sort ( ) ;
const parsedFiles = [ ] ;
files . forEach ( item => {
try {
const file = fs . readFileSync ( path . join ( directoryPath , item ) , 'utf-8' ) ;
parsedFiles . push ( fileExtension == '.json' ? json5 . parse ( file ) : file ) ;
}
catch {
// skip
}
} ) ;
return parsedFiles ;
}
function sortByModifiedDate ( directory ) {
return ( a , b ) => new Date ( fs . statSync ( ` ${ directory } / ${ b } ` ) . mtime ) - new Date ( fs . statSync ( ` ${ directory } / ${ a } ` ) . mtime ) ;
}
function sortByName ( _ ) {
return ( a , b ) => a . localeCompare ( b ) ;
}
function readPresetsFromDirectory ( directoryPath , options = { } ) {
const {
sortFunction ,
removeFileExtension = false
} = options ;
const files = fs . readdirSync ( directoryPath ) . sort ( sortFunction ) ;
const fileContents = [ ] ;
const fileNames = [ ] ;
files . forEach ( item => {
try {
const file = fs . readFileSync ( path . join ( directoryPath , item ) , 'utf8' ) ;
json5 . parse ( file ) ;
fileContents . push ( file ) ;
fileNames . push ( removeFileExtension ? item . replace ( /\.[^/.]+$/ , '' ) : item ) ;
} catch {
// skip
console . log ( ` ${ item } is not a valid JSON ` ) ;
}
} ) ;
return { fileContents , fileNames } ;
}
// Wintermute's code
app . post ( '/getsettings' , jsonParser , ( request , response ) => {
const settings = fs . readFileSync ( 'public/settings.json' , 'utf8' , ( err , data ) => {
if ( err ) return response . sendStatus ( 500 ) ;
return data ;
} ) ;
// NovelAI Settings
const { fileContents : novelai _settings , fileNames : novelai _setting _names }
= readPresetsFromDirectory ( directories . novelAI _Settings , {
sortFunction : sortByName ( directories . novelAI _Settings ) ,
removeFileExtension : true
} ) ;
// OpenAI Settings
const { fileContents : openai _settings , fileNames : openai _setting _names }
= readPresetsFromDirectory ( directories . openAI _Settings , {
sortFunction : sortByModifiedDate ( directories . openAI _Settings ) , removeFileExtension : true
} ) ;
// TextGenerationWebUI Settings
const { fileContents : textgenerationwebui _presets , fileNames : textgenerationwebui _preset _names }
= readPresetsFromDirectory ( directories . textGen _Settings , {
sortFunction : sortByName ( directories . textGen _Settings ) , removeFileExtension : true
} ) ;
//Kobold
const { fileContents : koboldai _settings , fileNames : koboldai _setting _names }
= readPresetsFromDirectory ( directories . koboldAI _Settings , {
sortFunction : sortByName ( directories . koboldAI _Settings ) , removeFileExtension : true
} )
const worldFiles = fs
. readdirSync ( directories . worlds )
. filter ( file => path . extname ( file ) . toLowerCase ( ) === '.json' )
. sort ( ( a , b ) => a < b ) ;
const world _names = worldFiles . map ( item => path . parse ( item ) . name ) ;
const themes = readAndParseFromDirectory ( directories . themes ) ;
const movingUIPresets = readAndParseFromDirectory ( directories . movingUI ) ;
2023-07-29 23:22:03 +02:00
const quickReplyPresets = readAndParseFromDirectory ( directories . quickreplies ) ;
2023-07-20 19:32:15 +02:00
const instruct = readAndParseFromDirectory ( directories . instruct ) ;
const context = readAndParseFromDirectory ( directories . context ) ;
response . send ( {
settings ,
koboldai _settings ,
koboldai _setting _names ,
world _names ,
novelai _settings ,
novelai _setting _names ,
openai _settings ,
openai _setting _names ,
textgenerationwebui _presets ,
textgenerationwebui _preset _names ,
themes ,
movingUIPresets ,
2023-07-29 23:22:03 +02:00
quickReplyPresets ,
2023-07-20 19:32:15 +02:00
instruct ,
context ,
enable _extensions : enableExtensions ,
} ) ;
} ) ;
app . post ( '/getworldinfo' , jsonParser , ( request , response ) => {
if ( ! request . body ? . name ) {
return response . sendStatus ( 400 ) ;
}
const file = readWorldInfoFile ( request . body . name ) ;
return response . send ( file ) ;
} ) ;
app . post ( '/deleteworldinfo' , jsonParser , ( request , response ) => {
if ( ! request . body ? . name ) {
return response . sendStatus ( 400 ) ;
}
const worldInfoName = request . body . name ;
const filename = sanitize ( ` ${ worldInfoName } .json ` ) ;
const pathToWorldInfo = path . join ( directories . worlds , filename ) ;
if ( ! fs . existsSync ( pathToWorldInfo ) ) {
throw new Error ( ` World info file ${ filename } doesn't exist. ` ) ;
}
fs . rmSync ( pathToWorldInfo ) ;
return response . sendStatus ( 200 ) ;
} ) ;
app . post ( '/savetheme' , jsonParser , ( request , response ) => {
if ( ! request . body || ! request . body . name ) {
return response . sendStatus ( 400 ) ;
}
const filename = path . join ( directories . themes , sanitize ( request . body . name ) + '.json' ) ;
2023-08-17 14:20:02 +02:00
writeFileAtomicSync ( filename , JSON . stringify ( request . body , null , 4 ) , 'utf8' ) ;
2023-07-20 19:32:15 +02:00
return response . sendStatus ( 200 ) ;
} ) ;
app . post ( '/savemovingui' , jsonParser , ( request , response ) => {
if ( ! request . body || ! request . body . name ) {
return response . sendStatus ( 400 ) ;
}
const filename = path . join ( directories . movingUI , sanitize ( request . body . name ) + '.json' ) ;
2023-08-17 14:20:02 +02:00
writeFileAtomicSync ( filename , JSON . stringify ( request . body , null , 4 ) , 'utf8' ) ;
2023-07-20 19:32:15 +02:00
return response . sendStatus ( 200 ) ;
} ) ;
2023-07-29 23:22:03 +02:00
app . post ( '/savequickreply' , jsonParser , ( request , response ) => {
if ( ! request . body || ! request . body . name ) {
return response . sendStatus ( 400 ) ;
}
const filename = path . join ( directories . quickreplies , sanitize ( request . body . name ) + '.json' ) ;
2023-08-17 14:20:02 +02:00
writeFileAtomicSync ( filename , JSON . stringify ( request . body , null , 4 ) , 'utf8' ) ;
2023-07-29 23:22:03 +02:00
return response . sendStatus ( 200 ) ;
} ) ;
2023-07-20 19:32:15 +02:00
function convertWorldInfoToCharacterBook ( name , entries ) {
const result = { entries : [ ] , name } ;
for ( const index in entries ) {
const entry = entries [ index ] ;
const originalEntry = {
id : entry . uid ,
keys : entry . key ,
secondary _keys : entry . keysecondary ,
comment : entry . comment ,
content : entry . content ,
constant : entry . constant ,
selective : entry . selective ,
insertion _order : entry . order ,
enabled : ! entry . disable ,
position : entry . position == 0 ? 'before_char' : 'after_char' ,
extensions : {
position : entry . position ,
exclude _recursion : entry . excludeRecursion ,
display _index : entry . displayIndex ,
probability : entry . probability ? ? null ,
useProbability : entry . useProbability ? ? false ,
}
} ;
result . entries . push ( originalEntry ) ;
}
return result ;
}
function readWorldInfoFile ( worldInfoName ) {
if ( ! worldInfoName ) {
return { entries : { } } ;
}
const filename = ` ${ worldInfoName } .json ` ;
const pathToWorldInfo = path . join ( directories . worlds , filename ) ;
if ( ! fs . existsSync ( pathToWorldInfo ) ) {
throw new Error ( ` World info file ${ filename } doesn't exist. ` ) ;
}
const worldInfoText = fs . readFileSync ( pathToWorldInfo , 'utf8' ) ;
const worldInfo = json5 . parse ( worldInfoText ) ;
return worldInfo ;
}
function getImages ( path ) {
return fs
. readdirSync ( path )
. filter ( file => {
const type = mime . lookup ( file ) ;
return type && type . startsWith ( 'image/' ) ;
} )
. sort ( Intl . Collator ( ) . compare ) ;
}
//***********Novel.ai API
app . post ( "/getstatus_novelai" , jsonParser , function ( request , response _getstatus _novel = response ) {
if ( ! request . body ) return response _getstatus _novel . sendStatus ( 400 ) ;
const api _key _novel = readSecret ( SECRET _KEYS . NOVEL ) ;
if ( ! api _key _novel ) {
2023-08-05 20:15:49 +02:00
return response _getstatus _novel . sendStatus ( 401 ) ;
2023-07-20 19:32:15 +02:00
}
var data = { } ;
var args = {
data : data ,
headers : { "Content-Type" : "application/json" , "Authorization" : "Bearer " + api _key _novel }
} ;
2023-08-29 23:05:18 +02:00
2023-07-20 19:32:15 +02:00
client . get ( api _novelai + "/user/subscription" , args , function ( data , response ) {
if ( response . statusCode == 200 ) {
//console.log(data);
response _getstatus _novel . send ( data ) ; //data);
}
2023-08-05 20:15:49 +02:00
else {
if ( response . statusCode == 401 ) {
console . log ( 'Access Token is incorrect.' ) ;
}
2023-07-20 19:32:15 +02:00
console . log ( data ) ;
response _getstatus _novel . send ( { error : true } ) ;
}
} ) . on ( 'error' , function ( ) {
response _getstatus _novel . send ( { error : true } ) ;
} ) ;
} ) ;
app . post ( "/generate_novelai" , jsonParser , async function ( request , response _generate _novel = response ) {
if ( ! request . body ) return response _generate _novel . sendStatus ( 400 ) ;
const api _key _novel = readSecret ( SECRET _KEYS . NOVEL ) ;
if ( ! api _key _novel ) {
return response _generate _novel . sendStatus ( 401 ) ;
}
const controller = new AbortController ( ) ;
request . socket . removeAllListeners ( 'close' ) ;
request . socket . on ( 'close' , function ( ) {
controller . abort ( ) ;
} ) ;
2023-07-30 23:14:17 +02:00
const novelai = require ( './src/novelai' ) ;
2023-07-29 07:56:31 +02:00
const isNewModel = ( request . body . model . includes ( 'clio' ) || request . body . model . includes ( 'kayra' ) ) ;
2023-08-27 17:27:34 +02:00
const badWordsList = novelai . getBadWordsList ( request . body . model ) ;
2023-08-15 14:51:14 +02:00
// Add customized bad words for Clio and Kayra
if ( isNewModel && Array . isArray ( request . body . bad _words _ids ) ) {
for ( const badWord of request . body . bad _words _ids ) {
if ( Array . isArray ( badWord ) && badWord . every ( x => Number . isInteger ( x ) ) ) {
badWordsList . push ( badWord ) ;
}
}
}
2023-08-17 07:14:04 +02:00
// Add default biases for dinkus and asterism
2023-08-27 17:27:34 +02:00
const logit _bias _exp = isNewModel ? novelai . logitBiasExp . slice ( ) : [ ] ;
2023-08-17 12:05:22 +02:00
if ( Array . isArray ( logit _bias _exp ) && Array . isArray ( request . body . logit _bias _exp ) ) {
logit _bias _exp . push ( ... request . body . logit _bias _exp ) ;
}
2023-08-17 07:14:04 +02:00
2023-07-20 19:32:15 +02:00
const data = {
"input" : request . body . input ,
"model" : request . body . model ,
"parameters" : {
"use_string" : request . body . use _string ,
"temperature" : request . body . temperature ,
"max_length" : request . body . max _length ,
"min_length" : request . body . min _length ,
"tail_free_sampling" : request . body . tail _free _sampling ,
"repetition_penalty" : request . body . repetition _penalty ,
"repetition_penalty_range" : request . body . repetition _penalty _range ,
"repetition_penalty_slope" : request . body . repetition _penalty _slope ,
"repetition_penalty_frequency" : request . body . repetition _penalty _frequency ,
"repetition_penalty_presence" : request . body . repetition _penalty _presence ,
2023-07-30 23:14:17 +02:00
"repetition_penalty_whitelist" : isNewModel ? novelai . repPenaltyAllowList : null ,
2023-07-20 19:32:15 +02:00
"top_a" : request . body . top _a ,
"top_p" : request . body . top _p ,
"top_k" : request . body . top _k ,
"typical_p" : request . body . typical _p ,
2023-08-12 02:58:40 +02:00
"mirostat_lr" : request . body . mirostat _lr ,
"mirostat_tau" : request . body . mirostat _tau ,
2023-07-29 01:23:10 +02:00
"cfg_scale" : request . body . cfg _scale ,
"cfg_uc" : request . body . cfg _uc ,
"phrase_rep_pen" : request . body . phrase _rep _pen ,
2023-08-02 21:35:05 +02:00
"stop_sequences" : request . body . stop _sequences ,
2023-08-15 14:51:14 +02:00
"bad_words_ids" : badWordsList ,
2023-08-17 07:14:04 +02:00
"logit_bias_exp" : logit _bias _exp ,
2023-08-24 03:04:56 +02:00
"generate_until_sentence" : request . body . generate _until _sentence ,
2023-07-20 19:32:15 +02:00
"use_cache" : request . body . use _cache ,
2023-08-27 17:27:34 +02:00
"use_string" : request . body . use _string ? ? true ,
2023-07-20 19:32:15 +02:00
"return_full_text" : request . body . return _full _text ,
"prefix" : request . body . prefix ,
"order" : request . body . order
}
} ;
2023-07-30 21:37:48 +02:00
const util = require ( 'util' ) ;
console . log ( util . inspect ( data , { depth : 4 } ) )
2023-07-20 19:32:15 +02:00
const args = {
body : JSON . stringify ( data ) ,
headers : { "Content-Type" : "application/json" , "Authorization" : "Bearer " + api _key _novel } ,
signal : controller . signal ,
} ;
try {
const fetch = require ( 'node-fetch' ) . default ;
const url = request . body . streaming ? ` ${ api _novelai } /ai/generate-stream ` : ` ${ api _novelai } /ai/generate ` ;
const response = await fetch ( url , { method : 'POST' , timeout : 0 , ... args } ) ;
if ( request . body . streaming ) {
// Pipe remote SSE stream to Express response
response . body . pipe ( response _generate _novel ) ;
request . socket . on ( 'close' , function ( ) {
response . body . destroy ( ) ; // Close the remote stream
response _generate _novel . end ( ) ; // End the Express response
} ) ;
response . body . on ( 'end' , function ( ) {
console . log ( "Streaming request finished" ) ;
response _generate _novel . end ( ) ;
} ) ;
} else {
if ( ! response . ok ) {
2023-08-05 20:15:49 +02:00
const text = await response . text ( ) ;
let message = text ;
console . log ( ` Novel API returned error: ${ response . status } ${ response . statusText } ${ text } ` ) ;
try {
const data = JSON . parse ( text ) ;
message = data . message ;
}
catch {
// ignore
}
return response _generate _novel . status ( response . status ) . send ( { error : { message } } ) ;
2023-07-20 19:32:15 +02:00
}
const data = await response . json ( ) ;
2023-08-15 03:40:43 +02:00
console . log ( data ) ;
2023-07-20 19:32:15 +02:00
return response _generate _novel . send ( data ) ;
}
} catch ( error ) {
return response _generate _novel . send ( { error : true } ) ;
}
} ) ;
app . post ( "/getallchatsofcharacter" , jsonParser , function ( request , response ) {
if ( ! request . body ) return response . sendStatus ( 400 ) ;
var char _dir = ( request . body . avatar _url ) . replace ( '.png' , '' )
fs . readdir ( chatsPath + char _dir , ( err , files ) => {
if ( err ) {
console . log ( 'found error in history loading' ) ;
console . error ( err ) ;
response . send ( { error : true } ) ;
return ;
}
// filter for JSON files
const jsonFiles = files . filter ( file => path . extname ( file ) === '.jsonl' ) ;
// sort the files by name
//jsonFiles.sort().reverse();
// print the sorted file names
var chatData = { } ;
let ii = jsonFiles . length ; //this is the number of files belonging to the character
if ( ii !== 0 ) {
//console.log('found '+ii+' chat logs to load');
for ( let i = jsonFiles . length - 1 ; i >= 0 ; i -- ) {
const file = jsonFiles [ i ] ;
const fileStream = fs . createReadStream ( chatsPath + char _dir + '/' + file ) ;
const fullPathAndFile = chatsPath + char _dir + '/' + file
const stats = fs . statSync ( fullPathAndFile ) ;
const fileSizeInKB = ( stats . size / 1024 ) . toFixed ( 2 ) + "kb" ;
//console.log(fileSizeInKB);
const rl = readline . createInterface ( {
input : fileStream ,
crlfDelay : Infinity
} ) ;
let lastLine ;
let itemCounter = 0 ;
rl . on ( 'line' , ( line ) => {
itemCounter ++ ;
lastLine = line ;
} ) ;
rl . on ( 'close' , ( ) => {
ii -- ;
if ( lastLine ) {
2023-08-08 16:56:13 +02:00
let jsonData = tryParse ( lastLine ) ;
if ( jsonData && ( jsonData . name !== undefined || jsonData . character _name !== undefined ) ) {
2023-07-20 19:32:15 +02:00
chatData [ i ] = { } ;
chatData [ i ] [ 'file_name' ] = file ;
chatData [ i ] [ 'file_size' ] = fileSizeInKB ;
chatData [ i ] [ 'chat_items' ] = itemCounter - 1 ;
chatData [ i ] [ 'mes' ] = jsonData [ 'mes' ] || '[The chat is empty]' ;
chatData [ i ] [ 'last_mes' ] = jsonData [ 'send_date' ] || Date . now ( ) ;
2023-08-08 16:56:13 +02:00
} else {
console . log ( 'Found an invalid or corrupted chat file: ' + fullPathAndFile ) ;
2023-07-20 19:32:15 +02:00
}
}
if ( ii === 0 ) {
//console.log('ii count went to zero, responding with chatData');
response . send ( chatData ) ;
}
//console.log('successfully closing getallchatsofcharacter');
rl . close ( ) ;
} ) ;
} ;
} else {
//console.log('Found No Chats. Exiting Load Routine.');
response . send ( { error : true } ) ;
} ;
} )
} ) ;
function getPngName ( file ) {
let i = 1 ;
let base _name = file ;
while ( fs . existsSync ( charactersPath + file + '.png' ) ) {
file = base _name + i ;
i ++ ;
}
return file ;
}
app . post ( "/importcharacter" , urlencodedParser , async function ( request , response ) {
if ( ! request . body ) return response . sendStatus ( 400 ) ;
let png _name = '' ;
let filedata = request . file ;
2023-08-19 16:43:56 +02:00
let uploadPath = path . join ( UPLOADS _PATH , filedata . filename ) ;
2023-07-20 19:32:15 +02:00
var format = request . body . file _type ;
const defaultAvatarPath = './public/img/ai4.png' ;
//console.log(format);
if ( filedata ) {
if ( format == 'json' ) {
fs . readFile ( uploadPath , 'utf8' , async ( err , data ) => {
2023-08-19 16:50:16 +02:00
fs . unlinkSync ( uploadPath ) ;
2023-07-20 19:32:15 +02:00
if ( err ) {
console . log ( err ) ;
response . send ( { error : true } ) ;
}
let jsonData = json5 . parse ( data ) ;
if ( jsonData . spec !== undefined ) {
console . log ( 'importing from v2 json' ) ;
importRisuSprites ( jsonData ) ;
unsetFavFlag ( jsonData ) ;
jsonData = readFromV2 ( jsonData ) ;
png _name = getPngName ( jsonData . data ? . name || jsonData . name ) ;
let char = JSON . stringify ( jsonData ) ;
charaWrite ( defaultAvatarPath , char , png _name , response , { file _name : png _name } ) ;
} else if ( jsonData . name !== undefined ) {
console . log ( 'importing from v1 json' ) ;
jsonData . name = sanitize ( jsonData . name ) ;
if ( jsonData . creator _notes ) {
jsonData . creator _notes = jsonData . creator _notes . replace ( "Creator's notes go here." , "" ) ;
}
png _name = getPngName ( jsonData . name ) ;
let char = {
"name" : jsonData . name ,
"description" : jsonData . description ? ? '' ,
"creatorcomment" : jsonData . creatorcomment ? ? jsonData . creator _notes ? ? '' ,
"personality" : jsonData . personality ? ? '' ,
"first_mes" : jsonData . first _mes ? ? '' ,
"avatar" : 'none' ,
"chat" : jsonData . name + " - " + humanizedISO8601DateTime ( ) ,
"mes_example" : jsonData . mes _example ? ? '' ,
"scenario" : jsonData . scenario ? ? '' ,
"create_date" : humanizedISO8601DateTime ( ) ,
"talkativeness" : jsonData . talkativeness ? ? 0.5 ,
"creator" : jsonData . creator ? ? '' ,
"tags" : jsonData . tags ? ? '' ,
} ;
char = convertToV2 ( char ) ;
char = JSON . stringify ( char ) ;
charaWrite ( defaultAvatarPath , char , png _name , response , { file _name : png _name } ) ;
} else if ( jsonData . char _name !== undefined ) { //json Pygmalion notepad
console . log ( 'importing from gradio json' ) ;
jsonData . char _name = sanitize ( jsonData . char _name ) ;
if ( jsonData . creator _notes ) {
jsonData . creator _notes = jsonData . creator _notes . replace ( "Creator's notes go here." , "" ) ;
}
png _name = getPngName ( jsonData . char _name ) ;
let char = {
"name" : jsonData . char _name ,
"description" : jsonData . char _persona ? ? '' ,
"creatorcomment" : jsonData . creatorcomment ? ? jsonData . creator _notes ? ? '' ,
"personality" : '' ,
"first_mes" : jsonData . char _greeting ? ? '' ,
"avatar" : 'none' ,
"chat" : jsonData . name + " - " + humanizedISO8601DateTime ( ) ,
"mes_example" : jsonData . example _dialogue ? ? '' ,
"scenario" : jsonData . world _scenario ? ? '' ,
"create_date" : humanizedISO8601DateTime ( ) ,
"talkativeness" : jsonData . talkativeness ? ? 0.5 ,
"creator" : jsonData . creator ? ? '' ,
"tags" : jsonData . tags ? ? '' ,
} ;
char = convertToV2 ( char ) ;
char = JSON . stringify ( char ) ;
charaWrite ( defaultAvatarPath , char , png _name , response , { file _name : png _name } ) ;
} else {
console . log ( 'Incorrect character format .json' ) ;
response . send ( { error : true } ) ;
}
} ) ;
} else {
try {
var img _data = await charaRead ( uploadPath , format ) ;
let jsonData = json5 . parse ( img _data ) ;
jsonData . name = sanitize ( jsonData . data ? . name || jsonData . name ) ;
png _name = getPngName ( jsonData . name ) ;
if ( format == 'webp' ) {
try {
2023-08-19 16:43:56 +02:00
let convertedPath = path . join ( UPLOADS _PATH , path . basename ( uploadPath , ".webp" ) + ".png" )
2023-07-20 19:32:15 +02:00
await webp . dwebp ( uploadPath , convertedPath , "-o" ) ;
2023-08-19 16:50:16 +02:00
fs . unlinkSync ( uploadPath ) ;
2023-07-20 19:32:15 +02:00
uploadPath = convertedPath ;
}
catch {
console . error ( 'WEBP image conversion failed. Using the default character image.' ) ;
uploadPath = defaultAvatarPath ;
}
}
if ( jsonData . spec !== undefined ) {
console . log ( 'Found a v2 character file.' ) ;
importRisuSprites ( jsonData ) ;
unsetFavFlag ( jsonData ) ;
jsonData = readFromV2 ( jsonData ) ;
let char = JSON . stringify ( jsonData ) ;
2023-08-19 16:50:16 +02:00
await charaWrite ( uploadPath , char , png _name , response , { file _name : png _name } ) ;
fs . unlinkSync ( uploadPath ) ;
2023-07-20 19:32:15 +02:00
} else if ( jsonData . name !== undefined ) {
console . log ( 'Found a v1 character file.' ) ;
if ( jsonData . creator _notes ) {
jsonData . creator _notes = jsonData . creator _notes . replace ( "Creator's notes go here." , "" ) ;
}
let char = {
"name" : jsonData . name ,
"description" : jsonData . description ? ? '' ,
"creatorcomment" : jsonData . creatorcomment ? ? jsonData . creator _notes ? ? '' ,
"personality" : jsonData . personality ? ? '' ,
"first_mes" : jsonData . first _mes ? ? '' ,
"avatar" : 'none' ,
"chat" : jsonData . name + " - " + humanizedISO8601DateTime ( ) ,
"mes_example" : jsonData . mes _example ? ? '' ,
"scenario" : jsonData . scenario ? ? '' ,
"create_date" : humanizedISO8601DateTime ( ) ,
"talkativeness" : jsonData . talkativeness ? ? 0.5 ,
"creator" : jsonData . creator ? ? '' ,
"tags" : jsonData . tags ? ? '' ,
} ;
char = convertToV2 ( char ) ;
char = JSON . stringify ( char ) ;
await charaWrite ( uploadPath , char , png _name , response , { file _name : png _name } ) ;
2023-08-19 16:50:16 +02:00
fs . unlinkSync ( uploadPath ) ;
2023-07-20 19:32:15 +02:00
} else {
console . log ( 'Unknown character card format' ) ;
response . send ( { error : true } ) ;
}
} catch ( err ) {
console . log ( err ) ;
response . send ( { error : true } ) ;
}
}
}
} ) ;
app . post ( "/dupecharacter" , jsonParser , async function ( request , response ) {
try {
if ( ! request . body . avatar _url ) {
console . log ( "avatar URL not found in request body" ) ;
console . log ( request . body ) ;
return response . sendStatus ( 400 ) ;
}
let filename = path . join ( directories . characters , sanitize ( request . body . avatar _url ) ) ;
if ( ! fs . existsSync ( filename ) ) {
console . log ( 'file for dupe not found' ) ;
console . log ( filename ) ;
return response . sendStatus ( 404 ) ;
}
let suffix = 1 ;
let newFilename = filename ;
while ( fs . existsSync ( newFilename ) ) {
let suffixStr = "_" + suffix ;
let ext = path . extname ( filename ) ;
newFilename = filename . slice ( 0 , - ext . length ) + suffixStr + ext ;
suffix ++ ;
}
fs . copyFile ( filename , newFilename , ( err ) => {
if ( err ) throw err ;
console . log ( ` ${ filename } was copied to ${ newFilename } ` ) ;
response . sendStatus ( 200 ) ;
} ) ;
}
catch ( error ) {
console . error ( error ) ;
return response . send ( { error : true } ) ;
}
} ) ;
app . post ( "/exportchat" , jsonParser , async function ( request , response ) {
if ( ! request . body . file || ( ! request . body . avatar _url && request . body . is _group === false ) ) {
return response . sendStatus ( 400 ) ;
}
const pathToFolder = request . body . is _group
? directories . groupChats
: path . join ( directories . chats , String ( request . body . avatar _url ) . replace ( '.png' , '' ) ) ;
let filename = path . join ( pathToFolder , request . body . file ) ;
let exportfilename = request . body . exportfilename
if ( ! fs . existsSync ( filename ) ) {
const errorMessage = {
message : ` Could not find JSONL file to export. Source chat file: ${ filename } . `
}
console . log ( errorMessage . message ) ;
return response . status ( 404 ) . json ( errorMessage ) ;
}
try {
// Short path for JSONL files
if ( request . body . format == 'jsonl' ) {
try {
const rawFile = fs . readFileSync ( filename , 'utf8' ) ;
const successMessage = {
message : ` Chat saved to ${ exportfilename } ` ,
result : rawFile ,
}
console . log ( ` Chat exported as ${ exportfilename } ` ) ;
return response . status ( 200 ) . json ( successMessage ) ;
}
catch ( err ) {
console . error ( err ) ;
const errorMessage = {
message : ` Could not read JSONL file to export. Source chat file: ${ filename } . `
}
console . log ( errorMessage . message ) ;
return response . status ( 500 ) . json ( errorMessage ) ;
}
}
const readline = require ( 'readline' ) ;
const readStream = fs . createReadStream ( filename ) ;
const rl = readline . createInterface ( {
input : readStream ,
} ) ;
let buffer = '' ;
rl . on ( 'line' , ( line ) => {
const data = JSON . parse ( line ) ;
if ( data . mes ) {
const name = data . name ;
const message = ( data ? . extra ? . display _text || data ? . mes || '' ) . replace ( /\r?\n/g , '\n' ) ;
buffer += ( ` ${ name } : ${ message } \n \n ` ) ;
}
} ) ;
rl . on ( 'close' , ( ) => {
const successMessage = {
message : ` Chat saved to ${ exportfilename } ` ,
result : buffer ,
}
console . log ( ` Chat exported as ${ exportfilename } ` ) ;
return response . status ( 200 ) . json ( successMessage ) ;
} ) ;
}
catch ( err ) {
console . log ( "chat export failed." )
console . log ( err ) ;
return response . sendStatus ( 400 ) ;
}
} )
app . post ( "/exportcharacter" , jsonParser , async function ( request , response ) {
if ( ! request . body . format || ! request . body . avatar _url ) {
return response . sendStatus ( 400 ) ;
}
let filename = path . join ( directories . characters , sanitize ( request . body . avatar _url ) ) ;
if ( ! fs . existsSync ( filename ) ) {
return response . sendStatus ( 404 ) ;
}
switch ( request . body . format ) {
case 'png' :
return response . sendFile ( filename , { root : process . cwd ( ) } ) ;
case 'json' : {
try {
let json = await charaRead ( filename ) ;
let jsonObject = getCharaCardV2 ( json5 . parse ( json ) ) ;
return response . type ( 'json' ) . send ( jsonObject )
}
catch {
return response . sendStatus ( 400 ) ;
}
}
case 'webp' : {
try {
let json = await charaRead ( filename ) ;
let stringByteArray = utf8Encode . encode ( json ) . toString ( ) ;
2023-08-19 16:43:56 +02:00
let inputWebpPath = path . join ( UPLOADS _PATH , ` ${ Date . now ( ) } _input.webp ` ) ;
let outputWebpPath = path . join ( UPLOADS _PATH , ` ${ Date . now ( ) } _output.webp ` ) ;
let metadataPath = path . join ( UPLOADS _PATH , ` ${ Date . now ( ) } _metadata.exif ` ) ;
2023-07-20 19:32:15 +02:00
let metadata =
{
"Exif" : {
[ exif . ExifIFD . UserComment ] : stringByteArray ,
} ,
} ;
const exifString = exif . dump ( metadata ) ;
2023-08-17 14:20:02 +02:00
writeFileAtomicSync ( metadataPath , exifString , 'binary' ) ;
2023-07-20 19:32:15 +02:00
await webp . cwebp ( filename , inputWebpPath , '-q 95' ) ;
await webp . webpmux _add ( inputWebpPath , outputWebpPath , metadataPath , 'exif' ) ;
response . sendFile ( outputWebpPath , { root : process . cwd ( ) } , ( ) => {
fs . rmSync ( inputWebpPath ) ;
fs . rmSync ( metadataPath ) ;
fs . rmSync ( outputWebpPath ) ;
} ) ;
return ;
}
catch ( err ) {
console . log ( err ) ;
return response . sendStatus ( 400 ) ;
}
}
}
return response . sendStatus ( 400 ) ;
} ) ;
app . post ( "/importgroupchat" , urlencodedParser , function ( request , response ) {
try {
const filedata = request . file ;
const chatname = humanizedISO8601DateTime ( ) ;
2023-08-19 16:43:56 +02:00
const pathToUpload = path . join ( UPLOADS _PATH , filedata . filename ) ;
const pathToNewFile = path . join ( directories . groupChats , ` ${ chatname } .jsonl ` ) ;
fs . copyFileSync ( pathToUpload , pathToNewFile ) ;
fs . unlinkSync ( pathToUpload ) ;
2023-07-20 19:32:15 +02:00
return response . send ( { res : chatname } ) ;
} catch ( error ) {
console . error ( error ) ;
return response . send ( { error : true } ) ;
}
} ) ;
app . post ( "/importchat" , urlencodedParser , function ( request , response ) {
if ( ! request . body ) return response . sendStatus ( 400 ) ;
var format = request . body . file _type ;
let filedata = request . file ;
let avatar _url = ( request . body . avatar _url ) . replace ( '.png' , '' ) ;
let ch _name = request . body . character _name ;
let user _name = request . body . user _name || 'You' ;
if ( filedata ) {
if ( format === 'json' ) {
2023-08-19 16:43:56 +02:00
fs . readFile ( path . join ( UPLOADS _PATH , filedata . filename ) , 'utf8' , ( err , data ) => {
2023-07-20 19:32:15 +02:00
if ( err ) {
console . log ( err ) ;
response . send ( { error : true } ) ;
}
const jsonData = json5 . parse ( data ) ;
if ( jsonData . histories !== undefined ) {
//console.log('/importchat confirms JSON histories are defined');
const chat = {
from ( history ) {
return [
{
user _name : user _name ,
character _name : ch _name ,
create _date : humanizedISO8601DateTime ( ) ,
} ,
... history . msgs . map (
( message ) => ( {
name : message . src . is _human ? user _name : ch _name ,
is _user : message . src . is _human ,
is _name : true ,
send _date : humanizedISO8601DateTime ( ) ,
mes : message . text ,
} )
) ] ;
}
}
const newChats = [ ] ;
( jsonData . histories . histories ? ? [ ] ) . forEach ( ( history ) => {
newChats . push ( chat . from ( history ) ) ;
} ) ;
const errors = [ ] ;
2023-08-18 11:11:18 +02:00
for ( const chat of newChats ) {
const filePath = ` ${ chatsPath + avatar _url } / ${ ch _name } - ${ humanizedISO8601DateTime ( ) } imported.jsonl ` ;
const fileContent = chat . map ( tryParse ) . filter ( x => x ) . join ( '\n' ) ;
try {
writeFileAtomicSync ( filePath , fileContent , 'utf8' ) ;
} catch ( err ) {
errors . push ( err ) ;
}
}
2023-07-20 19:32:15 +02:00
if ( 0 < errors . length ) {
response . send ( 'Errors occurred while writing character files. Errors: ' + JSON . stringify ( errors ) ) ;
}
response . send ( { res : true } ) ;
} else if ( Array . isArray ( jsonData . data _visible ) ) {
// oobabooga's format
const chat = [ {
user _name : user _name ,
character _name : ch _name ,
create _date : humanizedISO8601DateTime ( ) ,
} ] ;
for ( const arr of jsonData . data _visible ) {
if ( arr [ 0 ] ) {
const userMessage = {
name : user _name ,
is _user : true ,
is _name : true ,
send _date : humanizedISO8601DateTime ( ) ,
mes : arr [ 0 ] ,
} ;
chat . push ( userMessage ) ;
}
if ( arr [ 1 ] ) {
const charMessage = {
name : ch _name ,
is _user : false ,
is _name : true ,
send _date : humanizedISO8601DateTime ( ) ,
mes : arr [ 1 ] ,
} ;
chat . push ( charMessage ) ;
}
}
2023-08-17 14:20:02 +02:00
writeFileAtomicSync ( ` ${ chatsPath + avatar _url } / ${ ch _name } - ${ humanizedISO8601DateTime ( ) } imported.jsonl ` , chat . map ( JSON . stringify ) . join ( '\n' ) , 'utf8' ) ;
2023-07-20 19:32:15 +02:00
response . send ( { res : true } ) ;
} else {
response . send ( { error : true } ) ;
}
} ) ;
}
if ( format === 'jsonl' ) {
//console.log(humanizedISO8601DateTime()+':imported chat format is JSONL');
2023-08-19 16:43:56 +02:00
const fileStream = fs . createReadStream ( path . join ( UPLOADS _PATH , filedata . filename ) ) ;
2023-07-20 19:32:15 +02:00
const rl = readline . createInterface ( {
input : fileStream ,
crlfDelay : Infinity
} ) ;
rl . once ( 'line' , ( line ) => {
let jsonData = json5 . parse ( line ) ;
if ( jsonData . user _name !== undefined || jsonData . name !== undefined ) {
2023-08-19 16:43:56 +02:00
fs . copyFile ( path . join ( UPLOADS _PATH , filedata . filename ) , ( ` ${ chatsPath + avatar _url } / ${ ch _name } - ${ humanizedISO8601DateTime ( ) } .jsonl ` ) , ( err ) => {
2023-07-20 19:32:15 +02:00
if ( err ) {
response . send ( { error : true } ) ;
return console . log ( err ) ;
} else {
response . send ( { res : true } ) ;
return ;
}
} ) ;
} else {
response . send ( { error : true } ) ;
return ;
}
rl . close ( ) ;
} ) ;
}
}
} ) ;
app . post ( '/importworldinfo' , urlencodedParser , ( request , response ) => {
if ( ! request . file ) return response . sendStatus ( 400 ) ;
const filename = ` ${ path . parse ( sanitize ( request . file . originalname ) ) . name } .json ` ;
let fileContents = null ;
if ( request . body . convertedData ) {
fileContents = request . body . convertedData ;
} else {
2023-08-19 16:43:56 +02:00
const pathToUpload = path . join ( UPLOADS _PATH , request . file . filename ) ;
2023-07-20 19:32:15 +02:00
fileContents = fs . readFileSync ( pathToUpload , 'utf8' ) ;
2023-08-19 16:43:56 +02:00
fs . unlinkSync ( pathToUpload ) ;
2023-07-20 19:32:15 +02:00
}
try {
const worldContent = json5 . parse ( fileContents ) ;
if ( ! ( 'entries' in worldContent ) ) {
throw new Error ( 'File must contain a world info entries list' ) ;
}
} catch ( err ) {
return response . status ( 400 ) . send ( 'Is not a valid world info file' ) ;
}
const pathToNewFile = path . join ( directories . worlds , filename ) ;
const worldName = path . parse ( pathToNewFile ) . name ;
if ( ! worldName ) {
return response . status ( 400 ) . send ( 'World file must have a name' ) ;
}
2023-08-17 14:20:02 +02:00
writeFileAtomicSync ( pathToNewFile , fileContents ) ;
2023-07-20 19:32:15 +02:00
return response . send ( { name : worldName } ) ;
} ) ;
app . post ( '/editworldinfo' , jsonParser , ( request , response ) => {
if ( ! request . body ) {
return response . sendStatus ( 400 ) ;
}
if ( ! request . body . name ) {
return response . status ( 400 ) . send ( 'World file must have a name' ) ;
}
try {
if ( ! ( 'entries' in request . body . data ) ) {
throw new Error ( 'World info must contain an entries list' ) ;
}
} catch ( err ) {
return response . status ( 400 ) . send ( 'Is not a valid world info file' ) ;
}
const filename = ` ${ sanitize ( request . body . name ) } .json ` ;
const pathToFile = path . join ( directories . worlds , filename ) ;
2023-08-17 14:20:02 +02:00
writeFileAtomicSync ( pathToFile , JSON . stringify ( request . body . data , null , 4 ) ) ;
2023-07-20 19:32:15 +02:00
return response . send ( { ok : true } ) ;
} ) ;
app . post ( '/uploaduseravatar' , urlencodedParser , async ( request , response ) => {
if ( ! request . file ) return response . sendStatus ( 400 ) ;
try {
2023-08-19 16:43:56 +02:00
const pathToUpload = path . join ( UPLOADS _PATH , request . file . filename ) ;
2023-07-20 19:32:15 +02:00
const crop = tryParse ( request . query . crop ) ;
let rawImg = await jimp . read ( pathToUpload ) ;
if ( typeof crop == 'object' && [ crop . x , crop . y , crop . width , crop . height ] . every ( x => typeof x === 'number' ) ) {
rawImg = rawImg . crop ( crop . x , crop . y , crop . width , crop . height ) ;
}
const image = await rawImg . cover ( AVATAR _WIDTH , AVATAR _HEIGHT ) . getBufferAsync ( jimp . MIME _PNG ) ;
const filename = request . body . overwrite _name || ` ${ Date . now ( ) } .png ` ;
const pathToNewFile = path . join ( directories . avatars , filename ) ;
2023-08-17 14:20:02 +02:00
writeFileAtomicSync ( pathToNewFile , image ) ;
2023-07-20 19:32:15 +02:00
fs . rmSync ( pathToUpload ) ;
return response . send ( { path : filename } ) ;
} catch ( err ) {
return response . status ( 400 ) . send ( 'Is not a valid image' ) ;
}
} ) ;
2023-08-20 05:01:09 +02:00
2023-08-20 06:15:57 +02:00
/ * *
* Ensure the directory for the provided file path exists .
* If not , it will recursively create the directory .
2023-08-20 11:37:38 +02:00
*
2023-08-20 06:15:57 +02:00
* @ param { string } filePath - The full path of the file for which the directory should be ensured .
* /
2023-08-20 05:01:09 +02:00
function ensureDirectoryExistence ( filePath ) {
const dirname = path . dirname ( filePath ) ;
if ( fs . existsSync ( dirname ) ) {
return true ;
}
ensureDirectoryExistence ( dirname ) ;
fs . mkdirSync ( dirname ) ;
}
2023-08-20 06:15:57 +02:00
/ * *
* Endpoint to handle image uploads .
* The image should be provided in the request body in base64 format .
* Optionally , a character name can be provided to save the image in a sub - folder .
2023-08-20 11:37:38 +02:00
*
2023-08-20 06:15:57 +02:00
* @ route POST / uploadimage
* @ param { Object } request . body - The request payload .
* @ param { string } request . body . image - The base64 encoded image data .
* @ param { string } [ request . body . ch _name ] - Optional character name to determine the sub - directory .
* @ returns { Object } response - The response object containing the path where the image was saved .
* /
2023-08-20 05:01:09 +02:00
app . post ( '/uploadimage' , jsonParser , async ( request , response ) => {
// Check for image data
if ( ! request . body || ! request . body . image ) {
return response . status ( 400 ) . send ( { error : "No image data provided" } ) ;
}
// Extracting the base64 data and the image format
const match = request . body . image . match ( /^data:image\/(png|jpg|webp);base64,(.+)$/ ) ;
if ( ! match ) {
return response . status ( 400 ) . send ( { error : "Invalid image format" } ) ;
}
const [ , format , base64Data ] = match ;
// Constructing filename and path
2023-08-20 07:41:58 +02:00
let filename = ` ${ Date . now ( ) } . ${ format } ` ;
2023-08-20 11:37:38 +02:00
if ( request . body . filename ) {
2023-08-20 07:41:58 +02:00
filename = ` ${ request . body . filename } . ${ format } ` ;
}
2023-08-20 05:01:09 +02:00
// if character is defined, save to a sub folder for that character
2023-08-20 11:37:38 +02:00
let pathToNewFile = path . join ( directories . userImages , filename ) ;
if ( request . body . ch _name ) {
2023-08-20 05:01:09 +02:00
pathToNewFile = path . join ( directories . userImages , request . body . ch _name , filename ) ;
}
try {
ensureDirectoryExistence ( pathToNewFile ) ;
const imageBuffer = Buffer . from ( base64Data , 'base64' ) ;
await fs . promises . writeFile ( pathToNewFile , imageBuffer ) ;
// send the path to the image, relative to the client folder, which means removing the first folder from the path which is 'public'
pathToNewFile = pathToNewFile . split ( path . sep ) . slice ( 1 ) . join ( path . sep ) ;
2023-08-20 11:37:38 +02:00
response . send ( { path : pathToNewFile } ) ;
2023-08-20 05:01:09 +02:00
} catch ( error ) {
console . log ( error ) ;
response . status ( 500 ) . send ( { error : "Failed to save the image" } ) ;
}
} ) ;
2023-07-20 19:32:15 +02:00
app . post ( '/getgroups' , jsonParser , ( _ , response ) => {
const groups = [ ] ;
if ( ! fs . existsSync ( directories . groups ) ) {
fs . mkdirSync ( directories . groups ) ;
}
const files = fs . readdirSync ( directories . groups ) . filter ( x => path . extname ( x ) === '.json' ) ;
const chats = fs . readdirSync ( directories . groupChats ) . filter ( x => path . extname ( x ) === '.jsonl' ) ;
files . forEach ( function ( file ) {
try {
const filePath = path . join ( directories . groups , file ) ;
const fileContents = fs . readFileSync ( filePath , 'utf8' ) ;
const group = json5 . parse ( fileContents ) ;
const groupStat = fs . statSync ( filePath ) ;
group [ 'date_added' ] = groupStat . birthtimeMs ;
let chat _size = 0 ;
let date _last _chat = 0 ;
if ( Array . isArray ( group . chats ) && Array . isArray ( chats ) ) {
for ( const chat of chats ) {
if ( group . chats . includes ( path . parse ( chat ) . name ) ) {
const chatStat = fs . statSync ( path . join ( directories . groupChats , chat ) ) ;
chat _size += chatStat . size ;
date _last _chat = Math . max ( date _last _chat , chatStat . mtimeMs ) ;
}
}
}
group [ 'date_last_chat' ] = date _last _chat ;
group [ 'chat_size' ] = chat _size ;
groups . push ( group ) ;
}
catch ( error ) {
console . error ( error ) ;
}
} ) ;
return response . send ( groups ) ;
} ) ;
app . post ( '/creategroup' , jsonParser , ( request , response ) => {
if ( ! request . body ) {
return response . sendStatus ( 400 ) ;
}
2023-08-19 21:22:24 +02:00
const id = String ( Date . now ( ) ) ;
2023-07-20 19:32:15 +02:00
const groupMetadata = {
id : id ,
name : request . body . name ? ? 'New Group' ,
members : request . body . members ? ? [ ] ,
avatar _url : request . body . avatar _url ,
allow _self _responses : ! ! request . body . allow _self _responses ,
activation _strategy : request . body . activation _strategy ? ? 0 ,
disabled _members : request . body . disabled _members ? ? [ ] ,
chat _metadata : request . body . chat _metadata ? ? { } ,
fav : request . body . fav ,
chat _id : request . body . chat _id ? ? id ,
chats : request . body . chats ? ? [ id ] ,
} ;
const pathToFile = path . join ( directories . groups , ` ${ id } .json ` ) ;
const fileData = JSON . stringify ( groupMetadata ) ;
if ( ! fs . existsSync ( directories . groups ) ) {
fs . mkdirSync ( directories . groups ) ;
}
2023-08-17 14:20:02 +02:00
writeFileAtomicSync ( pathToFile , fileData ) ;
2023-07-20 19:32:15 +02:00
return response . send ( groupMetadata ) ;
} ) ;
app . post ( '/editgroup' , jsonParser , ( request , response ) => {
if ( ! request . body || ! request . body . id ) {
return response . sendStatus ( 400 ) ;
}
const id = request . body . id ;
const pathToFile = path . join ( directories . groups , ` ${ id } .json ` ) ;
const fileData = JSON . stringify ( request . body ) ;
2023-08-17 14:20:02 +02:00
writeFileAtomicSync ( pathToFile , fileData ) ;
2023-07-20 19:32:15 +02:00
return response . send ( { ok : true } ) ;
} ) ;
app . post ( '/getgroupchat' , jsonParser , ( request , response ) => {
if ( ! request . body || ! request . body . id ) {
return response . sendStatus ( 400 ) ;
}
const id = request . body . id ;
const pathToFile = path . join ( directories . groupChats , ` ${ id } .jsonl ` ) ;
if ( fs . existsSync ( pathToFile ) ) {
const data = fs . readFileSync ( pathToFile , 'utf8' ) ;
const lines = data . split ( '\n' ) ;
// Iterate through the array of strings and parse each line as JSON
const jsonData = lines . map ( json5 . parse ) ;
return response . send ( jsonData ) ;
} else {
return response . send ( [ ] ) ;
}
} ) ;
app . post ( '/deletegroupchat' , jsonParser , ( request , response ) => {
if ( ! request . body || ! request . body . id ) {
return response . sendStatus ( 400 ) ;
}
const id = request . body . id ;
const pathToFile = path . join ( directories . groupChats , ` ${ id } .jsonl ` ) ;
if ( fs . existsSync ( pathToFile ) ) {
fs . rmSync ( pathToFile ) ;
return response . send ( { ok : true } ) ;
}
return response . send ( { error : true } ) ;
} ) ;
app . post ( '/savegroupchat' , jsonParser , ( request , response ) => {
if ( ! request . body || ! request . body . id ) {
return response . sendStatus ( 400 ) ;
}
const id = request . body . id ;
const pathToFile = path . join ( directories . groupChats , ` ${ id } .jsonl ` ) ;
if ( ! fs . existsSync ( directories . groupChats ) ) {
fs . mkdirSync ( directories . groupChats ) ;
}
let chat _data = request . body . chat ;
let jsonlData = chat _data . map ( JSON . stringify ) . join ( '\n' ) ;
2023-08-17 14:20:02 +02:00
writeFileAtomicSync ( pathToFile , jsonlData , 'utf8' ) ;
2023-07-20 19:32:15 +02:00
return response . send ( { ok : true } ) ;
} ) ;
app . post ( '/deletegroup' , jsonParser , async ( request , response ) => {
if ( ! request . body || ! request . body . id ) {
return response . sendStatus ( 400 ) ;
}
const id = request . body . id ;
const pathToGroup = path . join ( directories . groups , sanitize ( ` ${ id } .json ` ) ) ;
try {
// Delete group chats
const group = json5 . parse ( fs . readFileSync ( pathToGroup ) ) ;
if ( group && Array . isArray ( group . chats ) ) {
for ( const chat of group . chats ) {
console . log ( 'Deleting group chat' , chat ) ;
const pathToFile = path . join ( directories . groupChats , ` ${ id } .jsonl ` ) ;
if ( fs . existsSync ( pathToFile ) ) {
fs . rmSync ( pathToFile ) ;
}
}
}
} catch ( error ) {
console . error ( 'Could not delete group chats. Clean them up manually.' , error ) ;
}
if ( fs . existsSync ( pathToGroup ) ) {
fs . rmSync ( pathToGroup ) ;
}
return response . send ( { ok : true } ) ;
} ) ;
/ * *
* Discover the extension folders
* If the folder is called third - party , search for subfolders instead
* /
app . get ( '/discover_extensions' , jsonParser , function ( _ , response ) {
// get all folders in the extensions folder, except third-party
const extensions = fs
. readdirSync ( directories . extensions )
. filter ( f => fs . statSync ( path . join ( directories . extensions , f ) ) . isDirectory ( ) )
. filter ( f => f !== 'third-party' ) ;
// get all folders in the third-party folder, if it exists
if ( ! fs . existsSync ( path . join ( directories . extensions , 'third-party' ) ) ) {
return response . send ( extensions ) ;
}
const thirdPartyExtensions = fs
. readdirSync ( path . join ( directories . extensions , 'third-party' ) )
. filter ( f => fs . statSync ( path . join ( directories . extensions , 'third-party' , f ) ) . isDirectory ( ) ) ;
// add the third-party extensions to the extensions array
extensions . push ( ... thirdPartyExtensions . map ( f => ` third-party/ ${ f } ` ) ) ;
console . log ( extensions ) ;
return response . send ( extensions ) ;
} ) ;
app . get ( '/get_sprites' , jsonParser , function ( request , response ) {
const name = request . query . name ;
const spritesPath = path . join ( directories . characters , name ) ;
let sprites = [ ] ;
try {
if ( fs . existsSync ( spritesPath ) && fs . statSync ( spritesPath ) . isDirectory ( ) ) {
sprites = fs . readdirSync ( spritesPath )
. filter ( file => {
const mimeType = mime . lookup ( file ) ;
return mimeType && mimeType . startsWith ( 'image/' ) ;
} )
. map ( ( file ) => {
const pathToSprite = path . join ( spritesPath , file ) ;
return {
label : path . parse ( pathToSprite ) . name . toLowerCase ( ) ,
path : ` /characters/ ${ name } / ${ file } ` ,
} ;
} ) ;
}
}
catch ( err ) {
console . log ( err ) ;
}
finally {
return response . send ( sprites ) ;
}
} ) ;
function getThumbnailFolder ( type ) {
let thumbnailFolder ;
switch ( type ) {
case 'bg' :
thumbnailFolder = directories . thumbnailsBg ;
break ;
case 'avatar' :
thumbnailFolder = directories . thumbnailsAvatar ;
break ;
}
return thumbnailFolder ;
}
function getOriginalFolder ( type ) {
let originalFolder ;
switch ( type ) {
case 'bg' :
originalFolder = directories . backgrounds ;
break ;
case 'avatar' :
originalFolder = directories . characters ;
break ;
}
return originalFolder ;
}
function invalidateThumbnail ( type , file ) {
const folder = getThumbnailFolder ( type ) ;
const pathToThumbnail = path . join ( folder , file ) ;
if ( fs . existsSync ( pathToThumbnail ) ) {
fs . rmSync ( pathToThumbnail ) ;
}
}
2023-08-19 16:43:56 +02:00
function cleanUploads ( ) {
try {
if ( fs . existsSync ( UPLOADS _PATH ) ) {
const uploads = fs . readdirSync ( UPLOADS _PATH ) ;
if ( ! uploads . length ) {
return ;
}
console . debug ( ` Cleaning uploads folder ( ${ uploads . length } files) ` ) ;
uploads . forEach ( file => {
const pathToFile = path . join ( UPLOADS _PATH , file ) ;
fs . unlinkSync ( pathToFile ) ;
} ) ;
}
} catch ( err ) {
console . error ( err ) ;
}
}
2023-07-20 19:32:15 +02:00
async function ensureThumbnailCache ( ) {
const cacheFiles = fs . readdirSync ( directories . thumbnailsBg ) ;
// files exist, all ok
if ( cacheFiles . length ) {
return ;
}
console . log ( 'Generating thumbnails cache. Please wait...' ) ;
const bgFiles = fs . readdirSync ( directories . backgrounds ) ;
const tasks = [ ] ;
for ( const file of bgFiles ) {
tasks . push ( generateThumbnail ( 'bg' , file ) ) ;
}
await Promise . all ( tasks ) ;
console . log ( ` Done! Generated: ${ bgFiles . length } preview images ` ) ;
}
async function generateThumbnail ( type , file ) {
const pathToCachedFile = path . join ( getThumbnailFolder ( type ) , file ) ;
const pathToOriginalFile = path . join ( getOriginalFolder ( type ) , file ) ;
const cachedFileExists = fs . existsSync ( pathToCachedFile ) ;
const originalFileExists = fs . existsSync ( pathToOriginalFile ) ;
// to handle cases when original image was updated after thumb creation
let shouldRegenerate = false ;
if ( cachedFileExists && originalFileExists ) {
const originalStat = fs . statSync ( pathToOriginalFile ) ;
const cachedStat = fs . statSync ( pathToCachedFile ) ;
if ( originalStat . mtimeMs > cachedStat . ctimeMs ) {
//console.log('Original file changed. Regenerating thumbnail...');
shouldRegenerate = true ;
}
}
if ( cachedFileExists && ! shouldRegenerate ) {
return pathToCachedFile ;
}
if ( ! originalFileExists ) {
return null ;
}
const imageSizes = { 'bg' : [ 160 , 90 ] , 'avatar' : [ 96 , 144 ] } ;
const mySize = imageSizes [ type ] ;
try {
let buffer ;
try {
const image = await jimp . read ( pathToOriginalFile ) ;
buffer = await image . cover ( mySize [ 0 ] , mySize [ 1 ] ) . quality ( 95 ) . getBufferAsync ( mime . lookup ( 'jpg' ) ) ;
}
catch ( inner ) {
console . warn ( ` Thumbnailer can not process the image: ${ pathToOriginalFile } . Using original size ` ) ;
buffer = fs . readFileSync ( pathToOriginalFile ) ;
}
2023-08-17 14:20:02 +02:00
writeFileAtomicSync ( pathToCachedFile , buffer ) ;
2023-07-20 19:32:15 +02:00
}
catch ( outer ) {
return null ;
}
return pathToCachedFile ;
}
app . get ( '/thumbnail' , jsonParser , async function ( request , response ) {
const type = request . query . type ;
const file = sanitize ( request . query . file ) ;
if ( ! type || ! file ) {
return response . sendStatus ( 400 ) ;
}
if ( ! ( type == 'bg' || type == 'avatar' ) ) {
return response . sendStatus ( 400 ) ;
}
if ( sanitize ( file ) !== file ) {
console . error ( 'Malicious filename prevented' ) ;
return response . sendStatus ( 403 ) ;
}
if ( config . disableThumbnails == true ) {
const pathToOriginalFile = path . join ( getOriginalFolder ( type ) , file ) ;
return response . sendFile ( pathToOriginalFile , { root : process . cwd ( ) } ) ;
}
const pathToCachedFile = await generateThumbnail ( type , file ) ;
if ( ! pathToCachedFile ) {
return response . sendStatus ( 404 ) ;
}
return response . sendFile ( pathToCachedFile , { root : process . cwd ( ) } ) ;
} ) ;
/* OpenAI */
app . post ( "/getstatus_openai" , jsonParser , function ( request , response _getstatus _openai = response ) {
if ( ! request . body ) return response _getstatus _openai . sendStatus ( 400 ) ;
let api _url ;
let api _key _openai ;
let headers ;
if ( request . body . use _openrouter == false ) {
api _url = new URL ( request . body . reverse _proxy || api _openai ) . toString ( ) ;
2023-07-28 20:33:29 +02:00
api _key _openai = request . body . reverse _proxy ? request . body . proxy _password : readSecret ( SECRET _KEYS . OPENAI ) ;
2023-07-20 19:32:15 +02:00
headers = { } ;
} else {
api _url = 'https://openrouter.ai/api/v1' ;
api _key _openai = readSecret ( SECRET _KEYS . OPENROUTER ) ;
// OpenRouter needs to pass the referer: https://openrouter.ai/docs
headers = { 'HTTP-Referer' : request . headers . referer } ;
}
2023-07-28 20:33:29 +02:00
if ( ! api _key _openai && ! request . body . reverse _proxy ) {
2023-07-20 19:32:15 +02:00
return response _getstatus _openai . status ( 401 ) . send ( { error : true } ) ;
}
const args = {
headers : {
"Authorization" : "Bearer " + api _key _openai ,
... headers ,
} ,
} ;
client . get ( api _url + "/models" , args , function ( data , response ) {
if ( response . statusCode == 200 ) {
response _getstatus _openai . send ( data ) ;
2023-08-10 06:08:26 +02:00
if ( request . body . use _openrouter ) {
let models = [ ] ;
2023-08-10 19:06:18 +02:00
data . data . forEach ( model => {
const context _length = model . context _length ;
2023-08-10 21:13:24 +02:00
const tokens _dollar = parseFloat ( 1 / ( 1000 * model . pricing . prompt ) ) ;
const tokens _rounded = ( Math . round ( tokens _dollar * 1000 ) / 1000 ) . toFixed ( 0 ) ;
2023-08-10 19:06:18 +02:00
models [ model . id ] = {
2023-08-10 21:13:24 +02:00
tokens _per _dollar : tokens _rounded + 'k' ,
2023-08-10 19:06:18 +02:00
context _length : context _length ,
} ;
} ) ;
2023-08-10 06:08:26 +02:00
console . log ( 'Available OpenRouter models:' , models ) ;
} else {
const modelIds = data ? . data ? . map ( x => x . id ) ? . sort ( ) ;
console . log ( 'Available OpenAI models:' , modelIds ) ;
}
2023-07-20 19:32:15 +02:00
}
if ( response . statusCode == 401 ) {
console . log ( 'Access Token is incorrect.' ) ;
response _getstatus _openai . send ( { error : true } ) ;
}
if ( response . statusCode == 404 ) {
console . log ( 'Endpoint not found.' ) ;
response _getstatus _openai . send ( { error : true } ) ;
}
if ( response . statusCode == 500 || response . statusCode == 501 || response . statusCode == 501 || response . statusCode == 503 || response . statusCode == 507 ) {
console . log ( data ) ;
response _getstatus _openai . send ( { error : true } ) ;
}
} ) . on ( 'error' , function ( ) {
response _getstatus _openai . send ( { error : true } ) ;
} ) ;
} ) ;
app . post ( "/openai_bias" , jsonParser , async function ( request , response ) {
if ( ! request . body || ! Array . isArray ( request . body ) )
return response . sendStatus ( 400 ) ;
let result = { } ;
const model = getTokenizerModel ( String ( request . query . model || '' ) ) ;
// no bias for claude
if ( model == 'claude' ) {
return response . send ( result ) ;
}
const tokenizer = getTiktokenTokenizer ( model ) ;
for ( const entry of request . body ) {
if ( ! entry || ! entry . text ) {
continue ;
}
try {
const tokens = tokenizer . encode ( entry . text ) ;
for ( const token of tokens ) {
result [ token ] = entry . value ;
}
} catch {
console . warn ( 'Tokenizer failed to encode:' , entry . text ) ;
}
}
// not needed for cached tokenizers
//tokenizer.free();
return response . send ( result ) ;
} ) ;
app . post ( "/deletepreset_openai" , jsonParser , function ( request , response ) {
if ( ! request . body || ! request . body . name ) {
return response . sendStatus ( 400 ) ;
}
const name = request . body . name ;
const pathToFile = path . join ( directories . openAI _Settings , ` ${ name } .settings ` ) ;
if ( fs . existsSync ( pathToFile ) ) {
fs . rmSync ( pathToFile ) ;
return response . send ( { ok : true } ) ;
}
return response . send ( { error : true } ) ;
} ) ;
function convertChatMLPrompt ( messages ) {
const messageStrings = [ ] ;
messages . forEach ( m => {
if ( m . role === 'system' && m . name === undefined ) {
messageStrings . push ( "System: " + m . content ) ;
}
else if ( m . role === 'system' && m . name !== undefined ) {
messageStrings . push ( m . name + ": " + m . content ) ;
}
else {
messageStrings . push ( m . role + ": " + m . content ) ;
}
} ) ;
return messageStrings . join ( "\n" ) ;
}
// Prompt Conversion script taken from RisuAI by @kwaroran (GPLv3).
function convertClaudePrompt ( messages , addHumanPrefix , addAssistantPostfix ) {
// Claude doesn't support message names, so we'll just add them to the message content.
for ( const message of messages ) {
if ( message . name && message . role !== "system" ) {
message . content = message . name + ": " + message . content ;
delete message . name ;
}
}
let requestPrompt = messages . map ( ( v ) => {
let prefix = '' ;
switch ( v . role ) {
case "assistant" :
prefix = "\n\nAssistant: " ;
break
case "user" :
prefix = "\n\nHuman: " ;
break
case "system" :
// According to the Claude docs, H: and A: should be used for example conversations.
if ( v . name === "example_assistant" ) {
prefix = "\n\nA: " ;
} else if ( v . name === "example_user" ) {
prefix = "\n\nH: " ;
} else {
prefix = "\n\n" ;
}
break
}
return prefix + v . content ;
} ) . join ( '' ) ;
if ( addHumanPrefix ) {
requestPrompt = "\n\nHuman: " + requestPrompt ;
}
if ( addAssistantPostfix ) {
requestPrompt = requestPrompt + '\n\nAssistant: ' ;
}
return requestPrompt ;
}
async function sendScaleRequest ( request , response ) {
const fetch = require ( 'node-fetch' ) . default ;
const api _url = new URL ( request . body . api _url _scale ) . toString ( ) ;
const api _key _scale = readSecret ( SECRET _KEYS . SCALE ) ;
if ( ! api _key _scale ) {
return response . status ( 401 ) . send ( { error : true } ) ;
}
const requestPrompt = convertChatMLPrompt ( request . body . messages ) ;
console . log ( 'Scale request:' , requestPrompt ) ;
try {
const controller = new AbortController ( ) ;
request . socket . removeAllListeners ( 'close' ) ;
request . socket . on ( 'close' , function ( ) {
controller . abort ( ) ;
} ) ;
const generateResponse = await fetch ( api _url , {
method : "POST" ,
body : JSON . stringify ( { input : { input : requestPrompt } } ) ,
headers : {
'Content-Type' : 'application/json' ,
'Authorization' : ` Basic ${ api _key _scale } ` ,
} ,
timeout : 0 ,
} ) ;
if ( ! generateResponse . ok ) {
console . log ( ` Scale API returned error: ${ generateResponse . status } ${ generateResponse . statusText } ${ await generateResponse . text ( ) } ` ) ;
return response . status ( generateResponse . status ) . send ( { error : true } ) ;
}
const generateResponseJson = await generateResponse . json ( ) ;
console . log ( 'Scale response:' , generateResponseJson ) ;
const reply = { choices : [ { "message" : { "content" : generateResponseJson . output , } } ] } ;
return response . send ( reply ) ;
} catch ( error ) {
console . log ( error ) ;
if ( ! response . headersSent ) {
return response . status ( 500 ) . send ( { error : true } ) ;
}
}
}
2023-08-20 12:55:37 +02:00
app . post ( "/generate_altscale" , jsonParser , function ( request , response _generate _scale ) {
2023-08-22 16:46:37 +02:00
if ( ! request . body ) return response _generate _scale . sendStatus ( 400 ) ;
2023-08-20 12:55:37 +02:00
fetch ( 'https://dashboard.scale.com/spellbook/api/trpc/v2.variant.run' , {
method : 'POST' ,
headers : {
'Content-Type' : 'application/json' ,
'cookie' : ` _jwt= ${ readSecret ( SECRET _KEYS . SCALE _COOKIE ) } ` ,
} ,
body : JSON . stringify ( {
json : {
variant : {
name : 'New Variant' ,
appId : '' ,
taxonomy : null
} ,
prompt : {
id : '' ,
template : '{{input}}\n' ,
exampleVariables : { } ,
variablesSourceDataId : null ,
systemMessage : request . body . sysprompt
} ,
modelParameters : {
id : '' ,
modelId : 'GPT4' ,
modelType : 'OpenAi' ,
maxTokens : request . body . max _tokens ,
temperature : request . body . temp ,
2023-08-22 13:29:18 +02:00
stop : "user:" ,
2023-08-20 12:55:37 +02:00
suffix : null ,
2023-08-22 13:29:18 +02:00
topP : request . body . top _p ,
2023-08-20 12:55:37 +02:00
logprobs : null ,
2023-08-22 13:29:18 +02:00
logitBias : request . body . logit _bias
2023-08-20 12:55:37 +02:00
} ,
inputs : [
{
index : '-1' ,
valueByName : {
input : request . body . prompt
}
}
]
} ,
meta : {
values : {
'variant.taxonomy' : [ 'undefined' ] ,
'prompt.variablesSourceDataId' : [ 'undefined' ] ,
'modelParameters.suffix' : [ 'undefined' ] ,
'modelParameters.logprobs' : [ 'undefined' ] ,
}
}
} )
} )
. then ( response => response . json ( ) )
. then ( data => {
console . log ( data . result . data . json . outputs [ 0 ] )
2023-08-22 16:46:37 +02:00
return response _generate _scale . send ( { output : data . result . data . json . outputs [ 0 ] } ) ;
2023-08-20 12:55:37 +02:00
} )
. catch ( ( error ) => {
console . error ( 'Error:' , error )
2023-08-22 16:46:37 +02:00
return response _generate _scale . send ( { error : true } )
2023-08-20 12:55:37 +02:00
} ) ;
} ) ;
2023-07-20 19:32:15 +02:00
async function sendClaudeRequest ( request , response ) {
const fetch = require ( 'node-fetch' ) . default ;
const api _url = new URL ( request . body . reverse _proxy || api _claude ) . toString ( ) ;
2023-07-28 20:33:29 +02:00
const api _key _claude = request . body . reverse _proxy ? request . body . proxy _password : readSecret ( SECRET _KEYS . CLAUDE ) ;
2023-07-20 19:32:15 +02:00
if ( ! api _key _claude ) {
return response . status ( 401 ) . send ( { error : true } ) ;
}
try {
const controller = new AbortController ( ) ;
request . socket . removeAllListeners ( 'close' ) ;
request . socket . on ( 'close' , function ( ) {
controller . abort ( ) ;
} ) ;
2023-08-19 19:09:50 +02:00
let requestPrompt = convertClaudePrompt ( request . body . messages , true , ! request . body . exclude _assistant ) ;
2023-07-30 00:51:59 +02:00
2023-08-19 19:09:50 +02:00
if ( request . body . assistant _prefill && ! request . body . exclude _assistant ) {
2023-07-30 00:51:59 +02:00
requestPrompt += request . body . assistant _prefill ;
}
2023-07-20 19:32:15 +02:00
console . log ( 'Claude request:' , requestPrompt ) ;
2023-08-25 23:12:11 +02:00
const stop _sequences = [ "\n\nHuman:" , "\n\nSystem:" , "\n\nAssistant:" ] ;
// Add custom stop sequences
if ( Array . isArray ( request . body . stop ) ) {
stop _sequences . push ( ... request . body . stop ) ;
}
2023-07-20 19:32:15 +02:00
const generateResponse = await fetch ( api _url + '/complete' , {
method : "POST" ,
signal : controller . signal ,
body : JSON . stringify ( {
prompt : requestPrompt ,
model : request . body . model ,
max _tokens _to _sample : request . body . max _tokens ,
2023-08-25 23:12:11 +02:00
stop _sequences : stop _sequences ,
2023-07-20 19:32:15 +02:00
temperature : request . body . temperature ,
top _p : request . body . top _p ,
top _k : request . body . top _k ,
stream : request . body . stream ,
} ) ,
headers : {
"Content-Type" : "application/json" ,
"anthropic-version" : '2023-06-01' ,
"x-api-key" : api _key _claude ,
} ,
timeout : 0 ,
} ) ;
if ( request . body . stream ) {
// Pipe remote SSE stream to Express response
generateResponse . body . pipe ( response ) ;
request . socket . on ( 'close' , function ( ) {
generateResponse . body . destroy ( ) ; // Close the remote stream
response . end ( ) ; // End the Express response
} ) ;
generateResponse . body . on ( 'end' , function ( ) {
console . log ( "Streaming request finished" ) ;
response . end ( ) ;
} ) ;
} else {
if ( ! generateResponse . ok ) {
console . log ( ` Claude API returned error: ${ generateResponse . status } ${ generateResponse . statusText } ${ await generateResponse . text ( ) } ` ) ;
return response . status ( generateResponse . status ) . send ( { error : true } ) ;
}
const generateResponseJson = await generateResponse . json ( ) ;
const responseText = generateResponseJson . completion ;
console . log ( 'Claude response:' , responseText ) ;
// Wrap it back to OAI format
const reply = { choices : [ { "message" : { "content" : responseText , } } ] } ;
return response . send ( reply ) ;
}
} catch ( error ) {
console . log ( 'Error communicating with Claude: ' , error ) ;
if ( ! response . headersSent ) {
return response . status ( 500 ) . send ( { error : true } ) ;
}
}
}
app . post ( "/generate_openai" , jsonParser , function ( request , response _generate _openai ) {
if ( ! request . body ) return response _generate _openai . status ( 400 ) . send ( { error : true } ) ;
if ( request . body . use _claude ) {
return sendClaudeRequest ( request , response _generate _openai ) ;
}
if ( request . body . use _scale ) {
return sendScaleRequest ( request , response _generate _openai ) ;
}
2023-08-19 17:20:42 +02:00
if ( request . body . use _ai21 ) {
return sendAI21Request ( request , response _generate _openai ) ;
}
2023-07-20 19:32:15 +02:00
let api _url ;
let api _key _openai ;
let headers ;
2023-08-01 17:49:03 +02:00
let bodyParams ;
2023-07-20 19:32:15 +02:00
if ( ! request . body . use _openrouter ) {
api _url = new URL ( request . body . reverse _proxy || api _openai ) . toString ( ) ;
2023-07-28 20:33:29 +02:00
api _key _openai = request . body . reverse _proxy ? request . body . proxy _password : readSecret ( SECRET _KEYS . OPENAI ) ;
2023-07-20 19:32:15 +02:00
headers = { } ;
2023-08-01 17:49:03 +02:00
bodyParams = { } ;
2023-07-20 19:32:15 +02:00
} else {
api _url = 'https://openrouter.ai/api/v1' ;
api _key _openai = readSecret ( SECRET _KEYS . OPENROUTER ) ;
// OpenRouter needs to pass the referer: https://openrouter.ai/docs
headers = { 'HTTP-Referer' : request . headers . referer } ;
2023-08-01 17:49:03 +02:00
bodyParams = { 'transforms' : [ "middle-out" ] } ;
2023-08-24 02:21:17 +02:00
if ( request . body . use _fallback ) {
bodyParams [ 'route' ] = 'fallback' ;
}
2023-07-20 19:32:15 +02:00
}
2023-07-28 20:33:29 +02:00
if ( ! api _key _openai && ! request . body . reverse _proxy ) {
2023-07-20 19:32:15 +02:00
return response _generate _openai . status ( 401 ) . send ( { error : true } ) ;
}
2023-08-25 23:12:11 +02:00
// Add custom stop sequences
2023-08-27 12:17:20 +02:00
if ( Array . isArray ( request . body . stop ) && request . body . stop . length > 0 ) {
2023-08-25 23:12:11 +02:00
bodyParams [ 'stop' ] = request . body . stop ;
}
2023-07-20 19:32:15 +02:00
const isTextCompletion = Boolean ( request . body . model && ( request . body . model . startsWith ( 'text-' ) || request . body . model . startsWith ( 'code-' ) ) ) ;
const textPrompt = isTextCompletion ? convertChatMLPrompt ( request . body . messages ) : '' ;
const endpointUrl = isTextCompletion ? ` ${ api _url } /completions ` : ` ${ api _url } /chat/completions ` ;
const controller = new AbortController ( ) ;
request . socket . removeAllListeners ( 'close' ) ;
request . socket . on ( 'close' , function ( ) {
controller . abort ( ) ;
} ) ;
const config = {
method : 'post' ,
url : endpointUrl ,
headers : {
'Content-Type' : 'application/json' ,
'Authorization' : 'Bearer ' + api _key _openai ,
... headers ,
} ,
data : {
"messages" : isTextCompletion === false ? request . body . messages : undefined ,
"prompt" : isTextCompletion === true ? textPrompt : undefined ,
"model" : request . body . model ,
"temperature" : request . body . temperature ,
"max_tokens" : request . body . max _tokens ,
"stream" : request . body . stream ,
"presence_penalty" : request . body . presence _penalty ,
"frequency_penalty" : request . body . frequency _penalty ,
"top_p" : request . body . top _p ,
"top_k" : request . body . top _k ,
"stop" : request . body . stop ,
2023-08-01 17:49:03 +02:00
"logit_bias" : request . body . logit _bias ,
... bodyParams ,
2023-07-20 19:32:15 +02:00
} ,
signal : controller . signal ,
} ;
console . log ( config . data ) ;
if ( request . body . stream ) {
config . responseType = 'stream' ;
}
2023-08-20 15:25:16 +02:00
async function makeRequest ( config , response _generate _openai , request , retries = 5 , timeout = 5000 ) {
2023-07-20 19:32:15 +02:00
try {
const response = await axios ( config ) ;
if ( response . status <= 299 ) {
if ( request . body . stream ) {
console . log ( 'Streaming request in progress' ) ;
response . data . pipe ( response _generate _openai ) ;
response . data . on ( 'end' , ( ) => {
console . log ( 'Streaming request finished' ) ;
response _generate _openai . end ( ) ;
} ) ;
} else {
response _generate _openai . send ( response . data ) ;
console . log ( response . data ) ;
console . log ( response . data ? . choices [ 0 ] ? . message ) ;
}
} else {
handleErrorResponse ( response , response _generate _openai , request ) ;
}
} catch ( error ) {
if ( error . response && error . response . status === 429 && retries > 0 ) {
2023-08-20 15:25:16 +02:00
console . log ( ` Out of quota, retrying in ${ Math . round ( timeout / 1000 ) } s ` ) ;
2023-07-20 19:32:15 +02:00
setTimeout ( ( ) => {
makeRequest ( config , response _generate _openai , request , retries - 1 ) ;
} , timeout ) ;
} else {
2023-08-08 22:11:27 +02:00
let errorData = error ? . response ? . data ;
2023-08-08 19:07:41 +02:00
if ( request . body . stream ) {
try {
const chunks = await readAllChunks ( errorData ) ;
const blob = new Blob ( chunks , { type : 'application/json' } ) ;
const text = await blob . text ( ) ;
errorData = JSON . parse ( text ) ;
} catch {
console . warn ( 'Error parsing streaming response' ) ;
}
} else {
errorData = typeof errorData === 'string' ? tryParse ( errorData ) : errorData ;
}
handleError ( error , response _generate _openai , errorData ) ;
2023-07-20 19:32:15 +02:00
}
}
}
function handleErrorResponse ( response , response _generate _openai , request ) {
if ( response . status >= 400 && response . status <= 504 ) {
console . log ( 'Error occurred:' , response . status , response . data ) ;
response _generate _openai . send ( { error : true } ) ;
}
}
2023-08-08 19:07:41 +02:00
function handleError ( error , response _generate _openai , errorData ) {
2023-08-08 22:11:27 +02:00
console . error ( 'Error:' , error ? . message ) ;
2023-07-20 19:32:15 +02:00
let message = error ? . response ? . statusText ;
2023-08-08 19:07:41 +02:00
const statusMessages = {
400 : 'Bad request' ,
401 : 'Unauthorized' ,
402 : 'Credit limit reached' ,
403 : 'Forbidden' ,
404 : 'Not found' ,
429 : 'Too many requests' ,
451 : 'Unavailable for legal reasons' ,
} ;
const status = error ? . response ? . status ;
if ( statusMessages . hasOwnProperty ( status ) ) {
message = errorData ? . error ? . message || statusMessages [ status ] ;
console . log ( message ) ;
}
const quota _error = error ? . response ? . status === 429 && errorData ? . error ? . type === 'insufficient_quota' ;
2023-07-20 19:32:15 +02:00
const response = { error : { message } , quota _error : quota _error }
if ( ! response _generate _openai . headersSent ) {
response _generate _openai . send ( response ) ;
} else if ( ! response _generate _openai . writableEnded ) {
response _generate _openai . write ( response ) ;
}
}
makeRequest ( config , response _generate _openai , request ) ;
} ) ;
app . post ( "/tokenize_openai" , jsonParser , function ( request , response _tokenize _openai = response ) {
if ( ! request . body ) return response _tokenize _openai . sendStatus ( 400 ) ;
let num _tokens = 0 ;
const model = getTokenizerModel ( String ( request . query . model || '' ) ) ;
if ( model == 'claude' ) {
num _tokens = countClaudeTokens ( claude _tokenizer , request . body ) ;
return response _tokenize _openai . send ( { "token_count" : num _tokens } ) ;
}
const tokensPerName = model . includes ( 'gpt-4' ) ? 1 : - 1 ;
const tokensPerMessage = model . includes ( 'gpt-4' ) ? 3 : 4 ;
const tokensPadding = 3 ;
const tokenizer = getTiktokenTokenizer ( model ) ;
for ( const msg of request . body ) {
try {
num _tokens += tokensPerMessage ;
for ( const [ key , value ] of Object . entries ( msg ) ) {
num _tokens += tokenizer . encode ( value ) . length ;
if ( key == "name" ) {
num _tokens += tokensPerName ;
}
}
} catch {
console . warn ( "Error tokenizing message:" , msg ) ;
}
}
num _tokens += tokensPadding ;
// not needed for cached tokenizers
//tokenizer.free();
response _tokenize _openai . send ( { "token_count" : num _tokens } ) ;
} ) ;
2023-08-19 17:20:42 +02:00
async function sendAI21Request ( request , response ) {
if ( ! request . body ) return response . sendStatus ( 400 ) ;
const controller = new AbortController ( ) ;
console . log ( request . body . messages )
request . socket . removeAllListeners ( 'close' ) ;
request . socket . on ( 'close' , function ( ) {
controller . abort ( ) ;
} ) ;
const options = {
method : 'POST' ,
headers : {
accept : 'application/json' ,
'content-type' : 'application/json' ,
Authorization : ` Bearer ${ readSecret ( SECRET _KEYS . AI21 ) } `
} ,
body : JSON . stringify ( {
numResults : 1 ,
maxTokens : request . body . max _tokens ,
minTokens : 0 ,
temperature : request . body . temperature ,
topP : request . body . top _p ,
stopSequences : request . body . stop _tokens ,
topKReturn : request . body . top _k ,
frequencyPenalty : {
scale : request . body . frequency _penalty * 100 ,
applyToWhitespaces : false ,
applyToPunctuations : false ,
applyToNumbers : false ,
applyToStopwords : false ,
applyToEmojis : false
} ,
presencePenalty : {
scale : request . body . presence _penalty ,
applyToWhitespaces : false ,
applyToPunctuations : false ,
applyToNumbers : false ,
applyToStopwords : false ,
applyToEmojis : false
} ,
countPenalty : {
scale : request . body . count _pen ,
applyToWhitespaces : false ,
applyToPunctuations : false ,
applyToNumbers : false ,
applyToStopwords : false ,
applyToEmojis : false
} ,
prompt : request . body . messages
} ) ,
signal : controller . signal ,
} ;
fetch ( ` https://api.ai21.com/studio/v1/ ${ request . body . model } /complete ` , options )
. then ( r => r . json ( ) )
. then ( r => {
if ( r . completions === undefined ) {
console . log ( r )
} else {
console . log ( r . completions [ 0 ] . data . text )
}
const reply = { choices : [ { "message" : { "content" : r . completions [ 0 ] . data . text , } } ] } ;
return response . send ( reply )
} )
. catch ( err => {
console . error ( err )
2023-08-19 17:52:06 +02:00
return response . send ( { error : true } )
2023-08-19 17:20:42 +02:00
} ) ;
}
app . post ( "/tokenize_ai21" , jsonParser , function ( request , response _tokenize _ai21 = response ) {
if ( ! request . body ) return response _tokenize _ai21 . sendStatus ( 400 ) ;
const options = {
method : 'POST' ,
headers : {
accept : 'application/json' ,
'content-type' : 'application/json' ,
Authorization : ` Bearer ${ readSecret ( SECRET _KEYS . AI21 ) } `
} ,
2023-08-19 17:52:06 +02:00
body : JSON . stringify ( { text : request . body [ 0 ] . content } )
2023-08-19 17:20:42 +02:00
} ;
fetch ( 'https://api.ai21.com/studio/v1/tokenize' , options )
. then ( response => response . json ( ) )
2023-08-19 17:52:06 +02:00
. then ( response => response _tokenize _ai21 . send ( { "token_count" : response . tokens . length } ) )
2023-08-19 17:20:42 +02:00
. catch ( err => console . error ( err ) ) ;
} ) ;
2023-07-23 22:52:31 +02:00
app . post ( "/save_preset" , jsonParser , function ( request , response ) {
const name = sanitize ( request . body . name ) ;
if ( ! request . body . preset || ! name ) {
return response . sendStatus ( 400 ) ;
}
2023-08-20 23:56:53 +02:00
const settings = getPresetSettingsByAPI ( request . body . apiId ) ;
const filename = name + settings . extension ;
2023-07-23 22:52:31 +02:00
2023-08-20 23:56:53 +02:00
if ( ! settings . folder ) {
2023-07-23 22:52:31 +02:00
return response . sendStatus ( 400 ) ;
}
2023-08-20 23:56:53 +02:00
const fullpath = path . join ( settings . folder , filename ) ;
2023-08-17 14:20:02 +02:00
writeFileAtomicSync ( fullpath , JSON . stringify ( request . body . preset , null , 4 ) , 'utf-8' ) ;
2023-07-23 22:52:31 +02:00
return response . send ( { name } ) ;
} ) ;
app . post ( "/delete_preset" , jsonParser , function ( request , response ) {
const name = sanitize ( request . body . name ) ;
if ( ! name ) {
return response . sendStatus ( 400 ) ;
}
2023-08-20 23:56:53 +02:00
const settings = getPresetSettingsByAPI ( request . body . apiId ) ;
const filename = name + settings . extension ;
2023-07-23 22:52:31 +02:00
2023-08-20 23:56:53 +02:00
if ( ! settings . folder ) {
2023-07-23 22:52:31 +02:00
return response . sendStatus ( 400 ) ;
}
2023-08-20 23:56:53 +02:00
const fullpath = path . join ( settings . folder , filename ) ;
2023-07-23 22:52:31 +02:00
2023-08-20 23:56:53 +02:00
if ( fs . existsSync ( fullpath ) ) {
2023-07-23 22:52:31 +02:00
fs . unlinkSync ( fullpath ) ;
return response . sendStatus ( 200 ) ;
} else {
return response . sendStatus ( 404 ) ;
}
} ) ;
2023-07-20 19:32:15 +02:00
app . post ( "/savepreset_openai" , jsonParser , function ( request , response ) {
const name = sanitize ( request . query . name ) ;
if ( ! request . body || ! name ) {
return response . sendStatus ( 400 ) ;
}
const filename = ` ${ name } .settings ` ;
const fullpath = path . join ( directories . openAI _Settings , filename ) ;
2023-08-17 14:20:02 +02:00
writeFileAtomicSync ( fullpath , JSON . stringify ( request . body , null , 4 ) , 'utf-8' ) ;
2023-07-20 19:32:15 +02:00
return response . send ( { name } ) ;
} ) ;
2023-08-20 23:56:53 +02:00
function getPresetSettingsByAPI ( apiId ) {
2023-07-23 22:52:31 +02:00
switch ( apiId ) {
case 'kobold' :
case 'koboldhorde' :
2023-08-20 23:56:53 +02:00
return { folder : directories . koboldAI _Settings , extension : '.settings' } ;
2023-07-23 22:52:31 +02:00
case 'novel' :
2023-08-20 23:56:53 +02:00
return { folder : directories . novelAI _Settings , extension : '.settings' } ;
2023-07-23 22:52:31 +02:00
case 'textgenerationwebui' :
2023-08-20 23:56:53 +02:00
return { folder : directories . textGen _Settings , extension : '.settings' } ;
case 'instruct' :
return { folder : directories . instruct , extension : '.json' } ;
2023-08-26 13:38:30 +02:00
case 'context' :
return { folder : directories . context , extension : '.json' } ;
2023-07-23 22:52:31 +02:00
default :
2023-08-20 23:56:53 +02:00
return { folder : null , extension : null } ;
2023-07-23 22:52:31 +02:00
}
}
2023-08-27 17:27:34 +02:00
function createSentencepieceEncodingHandler ( getTokenizerFn ) {
2023-07-20 19:32:15 +02:00
return async function ( request , response ) {
2023-08-27 17:27:34 +02:00
try {
if ( ! request . body ) {
return response . sendStatus ( 400 ) ;
}
const text = request . body . text || '' ;
const tokenizer = getTokenizerFn ( ) ;
const { ids , count } = await countSentencepieceTokens ( tokenizer , text ) ;
return response . send ( { ids , count } ) ;
} catch ( error ) {
console . log ( error ) ;
return response . send ( { ids : [ ] , count : 0 } ) ;
2023-07-20 19:32:15 +02:00
}
2023-08-27 17:27:34 +02:00
} ;
}
function createSentencepieceDecodingHandler ( getTokenizerFn ) {
return async function ( request , response ) {
try {
if ( ! request . body ) {
return response . sendStatus ( 400 ) ;
}
2023-07-20 19:32:15 +02:00
2023-08-27 17:27:34 +02:00
const ids = request . body . ids || [ ] ;
const tokenizer = getTokenizerFn ( ) ;
const text = await tokenizer . decodeIds ( ids ) ;
return response . send ( { text } ) ;
} catch ( error ) {
console . log ( error ) ;
return response . send ( { text : '' } ) ;
}
2023-07-20 19:32:15 +02:00
} ;
}
2023-08-27 17:27:34 +02:00
function createTiktokenEncodingHandler ( modelId ) {
return async function ( request , response ) {
try {
if ( ! request . body ) {
return response . sendStatus ( 400 ) ;
}
const text = request . body . text || '' ;
const tokenizer = getTiktokenTokenizer ( modelId ) ;
const tokens = Object . values ( tokenizer . encode ( text ) ) ;
return response . send ( { ids : tokens , count : tokens . length } ) ;
} catch ( error ) {
console . log ( error ) ;
return response . send ( { ids : [ ] , count : 0 } ) ;
}
}
}
function createTiktokenDecodingHandler ( modelId ) {
return async function ( request , response ) {
try {
if ( ! request . body ) {
return response . sendStatus ( 400 ) ;
}
const ids = request . body . ids || [ ] ;
const tokenizer = getTiktokenTokenizer ( modelId ) ;
const textBytes = tokenizer . decode ( new Uint32Array ( ids ) ) ;
const text = new TextDecoder ( ) . decode ( textBytes ) ;
return response . send ( { text } ) ;
} catch ( error ) {
console . log ( error ) ;
return response . send ( { text : '' } ) ;
}
}
}
app . post ( "/tokenize_llama" , jsonParser , createSentencepieceEncodingHandler ( ( ) => spp _llama ) ) ;
app . post ( "/tokenize_nerdstash" , jsonParser , createSentencepieceEncodingHandler ( ( ) => spp _nerd ) ) ;
app . post ( "/tokenize_nerdstash_v2" , jsonParser , createSentencepieceEncodingHandler ( ( ) => spp _nerd _v2 ) ) ;
app . post ( "/tokenize_gpt2" , jsonParser , createTiktokenEncodingHandler ( 'gpt2' ) ) ;
app . post ( "/decode_llama" , jsonParser , createSentencepieceDecodingHandler ( ( ) => spp _llama ) ) ;
app . post ( "/decode_nerdstash" , jsonParser , createSentencepieceDecodingHandler ( ( ) => spp _nerd ) ) ;
app . post ( "/decode_nerdstash_v2" , jsonParser , createSentencepieceDecodingHandler ( ( ) => spp _nerd _v2 ) ) ;
app . post ( "/decode_gpt2" , jsonParser , createTiktokenDecodingHandler ( 'gpt2' ) ) ;
2023-07-20 19:32:15 +02:00
app . post ( "/tokenize_via_api" , jsonParser , async function ( request , response ) {
if ( ! request . body ) {
return response . sendStatus ( 400 ) ;
}
const text = request . body . text || '' ;
try {
const args = {
body : JSON . stringify ( { "prompt" : text } ) ,
headers : { "Content-Type" : "application/json" }
} ;
2023-08-04 13:41:00 +02:00
2023-08-03 05:25:24 +02:00
if ( main _api == 'textgenerationwebui' && request . body . use _mancer ) {
args . headers = Object . assign ( args . headers , get _mancer _headers ( ) ) ;
2023-08-24 19:19:57 +02:00
const data = await postAsync ( api _server + "/v1/token-count" , args ) ;
return response . send ( { count : data [ 'results' ] [ 0 ] [ 'tokens' ] } ) ;
2023-08-03 05:25:24 +02:00
}
2023-07-20 19:32:15 +02:00
2023-08-24 19:19:57 +02:00
else if ( main _api == 'kobold' ) {
const data = await postAsync ( api _server + "/extra/tokencount" , args ) ;
const count = data [ 'value' ] ;
return response . send ( { count : count } ) ;
}
else {
return response . send ( { error : true } ) ;
}
2023-07-20 19:32:15 +02:00
} catch ( error ) {
console . log ( error ) ;
return response . send ( { error : true } ) ;
}
} ) ;
// ** REST CLIENT ASYNC WRAPPERS **
async function postAsync ( url , args ) {
const fetch = require ( 'node-fetch' ) . default ;
const response = await fetch ( url , { method : 'POST' , timeout : 0 , ... args } ) ;
if ( response . ok ) {
const data = await response . json ( ) ;
return data ;
}
throw response ;
}
function getAsync ( url , args ) {
return new Promise ( ( resolve , reject ) => {
client . get ( url , args , ( data , response ) => {
if ( response . statusCode >= 400 ) {
reject ( data ) ;
}
resolve ( data ) ;
} ) . on ( 'error' , e => reject ( e ) ) ;
} )
}
// ** END **
const tavernUrl = new URL (
( cliArguments . ssl ? 'https://' : 'http://' ) +
( listen ? '0.0.0.0' : '127.0.0.1' ) +
( ':' + server _port )
) ;
const autorunUrl = new URL (
( cliArguments . ssl ? 'https://' : 'http://' ) +
( '127.0.0.1' ) +
( ':' + server _port )
) ;
const setupTasks = async function ( ) {
const version = getVersion ( ) ;
console . log ( ` SillyTavern ${ version . pkgVersion } ` + ( version . gitBranch ? ` ' ${ version . gitBranch } ' ( ${ version . gitRevision } ) ` : '' ) ) ;
backupSettings ( ) ;
migrateSecrets ( ) ;
ensurePublicDirectoriesExist ( ) ;
await ensureThumbnailCache ( ) ;
2023-07-21 14:28:32 +02:00
contentManager . checkForNewContent ( ) ;
2023-08-19 16:43:56 +02:00
cleanUploads ( ) ;
2023-07-20 19:32:15 +02:00
// Colab users could run the embedded tool
if ( ! is _colab ) await convertWebp ( ) ;
[ spp _llama , spp _nerd , spp _nerd _v2 , claude _tokenizer ] = await Promise . all ( [
loadSentencepieceTokenizer ( 'src/sentencepiece/tokenizer.model' ) ,
loadSentencepieceTokenizer ( 'src/sentencepiece/nerdstash.model' ) ,
loadSentencepieceTokenizer ( 'src/sentencepiece/nerdstash_v2.model' ) ,
loadClaudeTokenizer ( 'src/claude.json' ) ,
] ) ;
await statsHelpers . loadStatsFile ( directories . chats , directories . characters ) ;
// Set up event listeners for a graceful shutdown
process . on ( 'SIGINT' , statsHelpers . writeStatsToFileAndExit ) ;
process . on ( 'SIGTERM' , statsHelpers . writeStatsToFileAndExit ) ;
process . on ( 'uncaughtException' , ( err ) => {
console . error ( 'Uncaught exception:' , err ) ;
statsHelpers . writeStatsToFileAndExit ( ) ;
} ) ;
setInterval ( statsHelpers . saveStatsToFile , 5 * 60 * 1000 ) ;
console . log ( 'Launching...' ) ;
if ( autorun ) open ( autorunUrl . toString ( ) ) ;
2023-08-19 14:58:17 +02:00
2023-08-26 13:17:57 +02:00
console . log ( color . green ( 'SillyTavern is listening on: ' + tavernUrl ) ) ;
2023-07-20 19:32:15 +02:00
if ( listen ) {
2023-08-26 13:17:57 +02:00
console . log ( '\n0.0.0.0 means SillyTavern is listening on all network interfaces (Wi-Fi, LAN, localhost). If you want to limit it only to internal localhost (127.0.0.1), change the setting in config.conf to "listen=false". Check "access.log" file in the SillyTavern directory if you want to inspect incoming connections.\n' ) ;
2023-07-20 19:32:15 +02:00
}
}
if ( listen && ! config . whitelistMode && ! config . basicAuthMode ) {
2023-08-26 13:17:57 +02:00
if ( config . securityOverride ) {
console . warn ( color . red ( "Security has been overridden. If it's not a trusted network, change the settings." ) ) ;
}
2023-07-20 19:32:15 +02:00
else {
2023-08-26 13:17:57 +02:00
console . error ( color . red ( 'Your SillyTavern is currently unsecurely open to the public. Enable whitelisting or basic authentication.' ) ) ;
2023-07-20 19:32:15 +02:00
process . exit ( 1 ) ;
}
}
2023-08-26 13:17:57 +02:00
2023-07-20 19:32:15 +02:00
if ( true === cliArguments . ssl )
https . createServer (
{
cert : fs . readFileSync ( cliArguments . certPath ) ,
key : fs . readFileSync ( cliArguments . keyPath )
} , app )
. listen (
tavernUrl . port || 443 ,
tavernUrl . hostname ,
setupTasks
) ;
else
http . createServer ( app ) . listen (
tavernUrl . port || 80 ,
tavernUrl . hostname ,
setupTasks
) ;
async function convertWebp ( ) {
const files = fs . readdirSync ( directories . characters ) . filter ( e => e . endsWith ( ".webp" ) ) ;
if ( ! files . length ) {
return ;
}
console . log ( ` ${ files . length } WEBP files will be automatically converted. ` ) ;
for ( const file of files ) {
try {
const source = path . join ( directories . characters , file ) ;
const dest = path . join ( directories . characters , path . basename ( file , ".webp" ) + ".png" ) ;
if ( fs . existsSync ( dest ) ) {
console . log ( ` ${ dest } already exists. Delete ${ source } manually ` ) ;
continue ;
}
console . log ( ` Read... ${ source } ` ) ;
const data = await charaRead ( source ) ;
console . log ( ` Convert... ${ source } -> ${ dest } ` ) ;
await webp . dwebp ( source , dest , "-o" ) ;
console . log ( ` Write... ${ dest } ` ) ;
const success = await charaWrite ( dest , data , path . parse ( dest ) . name ) ;
if ( ! success ) {
console . log ( ` Failure on ${ source } -> ${ dest } ` ) ;
continue ;
}
console . log ( ` Remove... ${ source } ` ) ;
fs . rmSync ( source ) ;
} catch ( err ) {
console . log ( err ) ;
}
}
}
function backupSettings ( ) {
const MAX _BACKUPS = 25 ;
function generateTimestamp ( ) {
const now = new Date ( ) ;
const year = now . getFullYear ( ) ;
const month = String ( now . getMonth ( ) + 1 ) . padStart ( 2 , '0' ) ;
const day = String ( now . getDate ( ) ) . padStart ( 2 , '0' ) ;
const hours = String ( now . getHours ( ) ) . padStart ( 2 , '0' ) ;
const minutes = String ( now . getMinutes ( ) ) . padStart ( 2 , '0' ) ;
const seconds = String ( now . getSeconds ( ) ) . padStart ( 2 , '0' ) ;
return ` ${ year } ${ month } ${ day } - ${ hours } ${ minutes } ${ seconds } ` ;
}
try {
if ( ! fs . existsSync ( directories . backups ) ) {
fs . mkdirSync ( directories . backups ) ;
}
const backupFile = path . join ( directories . backups , ` settings_ ${ generateTimestamp ( ) } .json ` ) ;
fs . copyFileSync ( SETTINGS _FILE , backupFile ) ;
2023-08-29 22:28:44 +02:00
let files = fs . readdirSync ( directories . backups ) . filter ( f => f . startsWith ( 'settings_' ) ) ;
2023-07-20 19:32:15 +02:00
if ( files . length > MAX _BACKUPS ) {
files = files . map ( f => path . join ( directories . backups , f ) ) ;
files . sort ( ( a , b ) => fs . statSync ( a ) . mtimeMs - fs . statSync ( b ) . mtimeMs ) ;
fs . rmSync ( files [ 0 ] ) ;
}
} catch ( err ) {
console . log ( 'Could not backup settings file' , err ) ;
}
}
function ensurePublicDirectoriesExist ( ) {
for ( const dir of Object . values ( directories ) ) {
if ( ! fs . existsSync ( dir ) ) {
fs . mkdirSync ( dir , { recursive : true } ) ;
}
}
}
const SECRETS _FILE = './secrets.json' ;
const SETTINGS _FILE = './public/settings.json' ;
const SECRET _KEYS = {
HORDE : 'api_key_horde' ,
2023-08-03 05:25:24 +02:00
MANCER : 'api_key_mancer' ,
2023-07-20 19:32:15 +02:00
OPENAI : 'api_key_openai' ,
NOVEL : 'api_key_novel' ,
CLAUDE : 'api_key_claude' ,
DEEPL : 'deepl' ,
2023-08-26 18:53:57 +02:00
LIBRE : 'libre' ,
LIBRE _URL : 'libre_url' ,
2023-07-20 19:32:15 +02:00
OPENROUTER : 'api_key_openrouter' ,
SCALE : 'api_key_scale' ,
2023-08-20 12:55:37 +02:00
AI21 : 'api_key_ai21' ,
SCALE _COOKIE : 'scale_cookie' ,
2023-07-20 19:32:15 +02:00
}
function migrateSecrets ( ) {
if ( ! fs . existsSync ( SETTINGS _FILE ) ) {
console . log ( 'Settings file does not exist' ) ;
return ;
}
try {
let modified = false ;
const fileContents = fs . readFileSync ( SETTINGS _FILE ) ;
const settings = JSON . parse ( fileContents ) ;
const oaiKey = settings ? . api _key _openai ;
const hordeKey = settings ? . horde _settings ? . api _key ;
const novelKey = settings ? . api _key _novel ;
if ( typeof oaiKey === 'string' ) {
console . log ( 'Migrating OpenAI key...' ) ;
writeSecret ( SECRET _KEYS . OPENAI , oaiKey ) ;
delete settings . api _key _openai ;
modified = true ;
}
if ( typeof hordeKey === 'string' ) {
console . log ( 'Migrating Horde key...' ) ;
writeSecret ( SECRET _KEYS . HORDE , hordeKey ) ;
delete settings . horde _settings . api _key ;
modified = true ;
}
if ( typeof novelKey === 'string' ) {
console . log ( 'Migrating Novel key...' ) ;
writeSecret ( SECRET _KEYS . NOVEL , novelKey ) ;
delete settings . api _key _novel ;
modified = true ;
}
if ( modified ) {
console . log ( 'Writing updated settings.json...' ) ;
const settingsContent = JSON . stringify ( settings ) ;
2023-08-17 14:20:02 +02:00
writeFileAtomicSync ( SETTINGS _FILE , settingsContent , "utf-8" ) ;
2023-07-20 19:32:15 +02:00
}
}
catch ( error ) {
console . error ( 'Could not migrate secrets file. Proceed with caution.' ) ;
}
}
app . post ( '/writesecret' , jsonParser , ( request , response ) => {
const key = request . body . key ;
const value = request . body . value ;
writeSecret ( key , value ) ;
return response . send ( 'ok' ) ;
} ) ;
app . post ( '/readsecretstate' , jsonParser , ( _ , response ) => {
if ( ! fs . existsSync ( SECRETS _FILE ) ) {
return response . send ( { } ) ;
}
try {
const fileContents = fs . readFileSync ( SECRETS _FILE ) ;
const secrets = JSON . parse ( fileContents ) ;
const state = { } ;
for ( const key of Object . values ( SECRET _KEYS ) ) {
state [ key ] = ! ! secrets [ key ] ; // convert to boolean
}
return response . send ( state ) ;
} catch ( error ) {
console . error ( error ) ;
return response . send ( { } ) ;
}
} ) ;
const ANONYMOUS _KEY = "0000000000" ;
app . post ( '/generate_horde' , jsonParser , async ( request , response ) => {
const api _key _horde = readSecret ( SECRET _KEYS . HORDE ) || ANONYMOUS _KEY ;
const url = 'https://horde.koboldai.net/api/v2/generate/text/async' ;
const args = {
"body" : JSON . stringify ( request . body ) ,
"headers" : {
"Content-Type" : "application/json" ,
"Client-Agent" : request . header ( 'Client-Agent' ) ,
"apikey" : api _key _horde ,
}
} ;
console . log ( args . body ) ;
try {
const data = await postAsync ( url , args ) ;
return response . send ( data ) ;
} catch ( error ) {
2023-08-27 12:31:14 +02:00
console . log ( 'Horde returned an error:' , error . statusText ) ;
if ( typeof error . text === 'function' ) {
const message = await error . text ( ) ;
console . log ( message ) ;
return response . send ( { error : { message } } ) ;
} else {
return response . send ( { error : true } ) ;
}
2023-07-20 19:32:15 +02:00
}
} ) ;
app . post ( '/viewsecrets' , jsonParser , async ( _ , response ) => {
if ( ! allowKeysExposure ) {
console . error ( 'secrets.json could not be viewed unless the value of allowKeysExposure in config.conf is set to true' ) ;
return response . sendStatus ( 403 ) ;
}
if ( ! fs . existsSync ( SECRETS _FILE ) ) {
console . error ( 'secrets.json does not exist' ) ;
return response . sendStatus ( 404 ) ;
}
try {
const fileContents = fs . readFileSync ( SECRETS _FILE ) ;
const secrets = JSON . parse ( fileContents ) ;
return response . send ( secrets ) ;
} catch ( error ) {
console . error ( error ) ;
return response . sendStatus ( 500 ) ;
}
} ) ;
app . post ( '/horde_samplers' , jsonParser , async ( _ , response ) => {
try {
2023-08-07 18:34:10 +02:00
const ai _horde = getHordeClient ( ) ;
2023-07-20 19:32:15 +02:00
const samplers = Object . values ( ai _horde . ModelGenerationInputStableSamplers ) ;
response . send ( samplers ) ;
} catch ( error ) {
console . error ( error ) ;
response . sendStatus ( 500 ) ;
}
} ) ;
app . post ( '/horde_models' , jsonParser , async ( _ , response ) => {
try {
2023-08-07 18:34:10 +02:00
const ai _horde = getHordeClient ( ) ;
2023-07-20 19:32:15 +02:00
const models = await ai _horde . getModels ( ) ;
response . send ( models ) ;
} catch ( error ) {
console . error ( error ) ;
response . sendStatus ( 500 ) ;
}
} ) ;
app . post ( '/horde_userinfo' , jsonParser , async ( _ , response ) => {
const api _key _horde = readSecret ( SECRET _KEYS . HORDE ) ;
if ( ! api _key _horde ) {
return response . send ( { anonymous : true } ) ;
}
try {
2023-08-07 18:34:10 +02:00
const ai _horde = getHordeClient ( ) ;
2023-07-20 19:32:15 +02:00
const user = await ai _horde . findUser ( { token : api _key _horde } ) ;
return response . send ( user ) ;
} catch ( error ) {
console . error ( error ) ;
return response . sendStatus ( 500 ) ;
}
} )
app . post ( '/horde_generateimage' , jsonParser , async ( request , response ) => {
const MAX _ATTEMPTS = 200 ;
const CHECK _INTERVAL = 3000 ;
const api _key _horde = readSecret ( SECRET _KEYS . HORDE ) || ANONYMOUS _KEY ;
console . log ( 'Stable Horde request:' , request . body ) ;
try {
2023-08-07 18:34:10 +02:00
const ai _horde = getHordeClient ( ) ;
2023-07-20 19:32:15 +02:00
const generation = await ai _horde . postAsyncImageGenerate (
{
prompt : ` ${ request . body . prompt _prefix } ${ request . body . prompt } ### ${ request . body . negative _prompt } ` ,
params :
{
sampler _name : request . body . sampler ,
hires _fix : request . body . enable _hr ,
use _gfpgan : request . body . restore _faces ,
cfg _scale : request . body . scale ,
steps : request . body . steps ,
width : request . body . width ,
height : request . body . height ,
karras : Boolean ( request . body . karras ) ,
n : 1 ,
} ,
r2 : false ,
nsfw : request . body . nfsw ,
models : [ request . body . model ] ,
} ,
{ token : api _key _horde } ) ;
if ( ! generation . id ) {
console . error ( 'Image generation request is not satisfyable:' , generation . message || 'unknown error' ) ;
return response . sendStatus ( 400 ) ;
}
for ( let attempt = 0 ; attempt < MAX _ATTEMPTS ; attempt ++ ) {
await delay ( CHECK _INTERVAL ) ;
const check = await ai _horde . getImageGenerationCheck ( generation . id ) ;
console . log ( check ) ;
if ( check . done ) {
const result = await ai _horde . getImageGenerationStatus ( generation . id ) ;
return response . send ( result . generations [ 0 ] . img ) ;
}
/ *
if ( ! check . is _possible ) {
return response . sendStatus ( 503 ) ;
}
* /
if ( check . faulted ) {
return response . sendStatus ( 500 ) ;
}
}
return response . sendStatus ( 504 ) ;
} catch ( error ) {
console . error ( error ) ;
return response . sendStatus ( 500 ) ;
}
} ) ;
2023-08-26 18:53:57 +02:00
app . post ( '/libre_translate' , jsonParser , async ( request , response ) => {
const key = readSecret ( SECRET _KEYS . LIBRE ) ;
const url = readSecret ( SECRET _KEYS . LIBRE _URL ) ;
const text = request . body . text ;
const lang = request . body . lang ;
if ( ! text || ! lang ) {
return response . sendStatus ( 400 ) ;
}
console . log ( 'Input text: ' + text ) ;
try {
2023-08-27 17:27:34 +02:00
const result = await fetch ( url , {
method : "POST" ,
body : JSON . stringify ( {
q : text ,
source : "auto" ,
target : lang ,
format : "text" ,
api _key : key
} ) ,
headers : { "Content-Type" : "application/json" }
} ) ;
2023-08-26 18:53:57 +02:00
if ( ! result . ok ) {
return response . sendStatus ( result . status ) ;
}
const json = await result . json ( ) ;
console . log ( 'Translated text: ' + json . translatedText ) ;
return response . send ( json . translatedText ) ;
} catch ( error ) {
console . log ( "Translation error: " + error . message ) ;
return response . sendStatus ( 500 ) ;
}
} ) ;
2023-07-20 19:32:15 +02:00
app . post ( '/google_translate' , jsonParser , async ( request , response ) => {
const { generateRequestUrl , normaliseResponse } = require ( 'google-translate-api-browser' ) ;
const text = request . body . text ;
const lang = request . body . lang ;
if ( ! text || ! lang ) {
return response . sendStatus ( 400 ) ;
}
console . log ( 'Input text: ' + text ) ;
const url = generateRequestUrl ( text , { to : lang } ) ;
https . get ( url , ( resp ) => {
let data = '' ;
resp . on ( 'data' , ( chunk ) => {
data += chunk ;
} ) ;
resp . on ( 'end' , ( ) => {
const result = normaliseResponse ( JSON . parse ( data ) ) ;
console . log ( 'Translated text: ' + result . text ) ;
return response . send ( result . text ) ;
} ) ;
} ) . on ( "error" , ( err ) => {
console . log ( "Translation error: " + err . message ) ;
return response . sendStatus ( 500 ) ;
} ) ;
} ) ;
app . post ( '/deepl_translate' , jsonParser , async ( request , response ) => {
const key = readSecret ( SECRET _KEYS . DEEPL ) ;
if ( ! key ) {
return response . sendStatus ( 401 ) ;
}
const text = request . body . text ;
const lang = request . body . lang ;
if ( ! text || ! lang ) {
return response . sendStatus ( 400 ) ;
}
console . log ( 'Input text: ' + text ) ;
const fetch = require ( 'node-fetch' ) . default ;
const params = new URLSearchParams ( ) ;
params . append ( 'text' , text ) ;
params . append ( 'target_lang' , lang ) ;
try {
const result = await fetch ( 'https://api-free.deepl.com/v2/translate' , {
method : 'POST' ,
body : params ,
headers : {
'Accept' : 'application/json' ,
'Authorization' : ` DeepL-Auth-Key ${ key } ` ,
'Content-Type' : 'application/x-www-form-urlencoded' ,
} ,
timeout : 0 ,
} ) ;
if ( ! result . ok ) {
return response . sendStatus ( result . status ) ;
}
const json = await result . json ( ) ;
console . log ( 'Translated text: ' + json . translations [ 0 ] . text ) ;
return response . send ( json . translations [ 0 ] . text ) ;
} catch ( error ) {
console . log ( "Translation error: " + error . message ) ;
return response . sendStatus ( 500 ) ;
}
} ) ;
app . post ( '/novel_tts' , jsonParser , async ( request , response ) => {
const token = readSecret ( SECRET _KEYS . NOVEL ) ;
if ( ! token ) {
return response . sendStatus ( 401 ) ;
}
const text = request . body . text ;
const voice = request . body . voice ;
if ( ! text || ! voice ) {
return response . sendStatus ( 400 ) ;
}
try {
const fetch = require ( 'node-fetch' ) . default ;
const url = ` ${ api _novelai } /ai/generate-voice?text= ${ encodeURIComponent ( text ) } &voice=-1&seed= ${ encodeURIComponent ( voice ) } &opus=false&version=v2 ` ;
const result = await fetch ( url , {
method : 'GET' ,
headers : {
'Authorization' : ` Bearer ${ token } ` ,
'Accept' : 'audio/mpeg' ,
} ,
timeout : 0 ,
} ) ;
if ( ! result . ok ) {
return response . sendStatus ( result . status ) ;
}
const chunks = await readAllChunks ( result . body ) ;
const buffer = Buffer . concat ( chunks ) ;
response . setHeader ( 'Content-Type' , 'audio/mpeg' ) ;
return response . send ( buffer ) ;
}
catch ( error ) {
console . error ( error ) ;
return response . sendStatus ( 500 ) ;
}
} ) ;
app . post ( '/delete_sprite' , jsonParser , async ( request , response ) => {
const label = request . body . label ;
const name = request . body . name ;
if ( ! label || ! name ) {
return response . sendStatus ( 400 ) ;
}
try {
const spritesPath = path . join ( directories . characters , name ) ;
// No sprites folder exists, or not a directory
if ( ! fs . existsSync ( spritesPath ) || ! fs . statSync ( spritesPath ) . isDirectory ( ) ) {
return response . sendStatus ( 404 ) ;
}
const files = fs . readdirSync ( spritesPath ) ;
// Remove existing sprite with the same label
for ( const file of files ) {
if ( path . parse ( file ) . name === label ) {
fs . rmSync ( path . join ( spritesPath , file ) ) ;
}
}
return response . sendStatus ( 200 ) ;
} catch ( error ) {
console . error ( error ) ;
return response . sendStatus ( 500 ) ;
}
} ) ;
app . post ( '/upload_sprite_pack' , urlencodedParser , async ( request , response ) => {
const file = request . file ;
const name = request . body . name ;
if ( ! file || ! name ) {
return response . sendStatus ( 400 ) ;
}
try {
const spritesPath = path . join ( directories . characters , name ) ;
// Create sprites folder if it doesn't exist
if ( ! fs . existsSync ( spritesPath ) ) {
fs . mkdirSync ( spritesPath ) ;
}
// Path to sprites is not a directory. This should never happen.
if ( ! fs . statSync ( spritesPath ) . isDirectory ( ) ) {
return response . sendStatus ( 404 ) ;
}
2023-08-19 16:43:56 +02:00
const spritePackPath = path . join ( UPLOADS _PATH , file . filename ) ;
2023-07-20 19:32:15 +02:00
const sprites = await getImageBuffers ( spritePackPath ) ;
const files = fs . readdirSync ( spritesPath ) ;
for ( const [ filename , buffer ] of sprites ) {
// Remove existing sprite with the same label
const existingFile = files . find ( file => path . parse ( file ) . name === path . parse ( filename ) . name ) ;
if ( existingFile ) {
fs . rmSync ( path . join ( spritesPath , existingFile ) ) ;
}
// Write sprite buffer to disk
const pathToSprite = path . join ( spritesPath , filename ) ;
2023-08-17 14:20:02 +02:00
writeFileAtomicSync ( pathToSprite , buffer ) ;
2023-07-20 19:32:15 +02:00
}
// Remove uploaded ZIP file
fs . rmSync ( spritePackPath ) ;
return response . send ( { count : sprites . length } ) ;
} catch ( error ) {
console . error ( error ) ;
return response . sendStatus ( 500 ) ;
}
} ) ;
app . post ( '/upload_sprite' , urlencodedParser , async ( request , response ) => {
const file = request . file ;
const label = request . body . label ;
const name = request . body . name ;
if ( ! file || ! label || ! name ) {
return response . sendStatus ( 400 ) ;
}
try {
const spritesPath = path . join ( directories . characters , name ) ;
// Create sprites folder if it doesn't exist
if ( ! fs . existsSync ( spritesPath ) ) {
fs . mkdirSync ( spritesPath ) ;
}
// Path to sprites is not a directory. This should never happen.
if ( ! fs . statSync ( spritesPath ) . isDirectory ( ) ) {
return response . sendStatus ( 404 ) ;
}
const files = fs . readdirSync ( spritesPath ) ;
// Remove existing sprite with the same label
for ( const file of files ) {
if ( path . parse ( file ) . name === label ) {
fs . rmSync ( path . join ( spritesPath , file ) ) ;
}
}
const filename = label + path . parse ( file . originalname ) . ext ;
2023-08-19 16:43:56 +02:00
const spritePath = path . join ( UPLOADS _PATH , file . filename ) ;
2023-07-20 19:32:15 +02:00
const pathToFile = path . join ( spritesPath , filename ) ;
// Copy uploaded file to sprites folder
fs . cpSync ( spritePath , pathToFile ) ;
// Remove uploaded file
fs . rmSync ( spritePath ) ;
return response . sendStatus ( 200 ) ;
} catch ( error ) {
console . error ( error ) ;
return response . sendStatus ( 500 ) ;
}
} ) ;
app . post ( '/import_custom' , jsonParser , async ( request , response ) => {
if ( ! request . body . url ) {
return response . sendStatus ( 400 ) ;
}
try {
const url = request . body . url ;
let result ;
const chubParsed = parseChubUrl ( url ) ;
if ( chubParsed ? . type === 'character' ) {
console . log ( 'Downloading chub character:' , chubParsed . id ) ;
result = await downloadChubCharacter ( chubParsed . id ) ;
}
else if ( chubParsed ? . type === 'lorebook' ) {
console . log ( 'Downloading chub lorebook:' , chubParsed . id ) ;
result = await downloadChubLorebook ( chubParsed . id ) ;
}
else {
return response . sendStatus ( 404 ) ;
}
response . set ( 'Content-Type' , result . fileType ) ;
response . set ( 'Content-Disposition' , ` attachment; filename=" ${ result . fileName } " ` ) ;
response . set ( 'X-Custom-Content-Type' , chubParsed ? . type ) ;
return response . send ( result . buffer ) ;
} catch ( error ) {
console . log ( 'Importing custom content failed' , error ) ;
return response . sendStatus ( 500 ) ;
}
} ) ;
async function downloadChubLorebook ( id ) {
const fetch = require ( 'node-fetch' ) . default ;
const result = await fetch ( 'https://api.chub.ai/api/lorebooks/download' , {
method : 'POST' ,
headers : { 'Content-Type' : 'application/json' } ,
body : JSON . stringify ( {
"fullPath" : id ,
"format" : "SILLYTAVERN" ,
} ) ,
} ) ;
if ( ! result . ok ) {
console . log ( await result . text ( ) ) ;
throw new Error ( 'Failed to download lorebook' ) ;
}
const name = id . split ( '/' ) . pop ( ) ;
const buffer = await result . buffer ( ) ;
const fileName = ` ${ sanitize ( name ) } .json ` ;
const fileType = result . headers . get ( 'content-type' ) ;
return { buffer , fileName , fileType } ;
}
async function downloadChubCharacter ( id ) {
const fetch = require ( 'node-fetch' ) . default ;
const result = await fetch ( 'https://api.chub.ai/api/characters/download' , {
method : 'POST' ,
headers : { 'Content-Type' : 'application/json' } ,
body : JSON . stringify ( {
"format" : "tavern" ,
"fullPath" : id ,
} )
} ) ;
if ( ! result . ok ) {
throw new Error ( 'Failed to download character' ) ;
}
const buffer = await result . buffer ( ) ;
const fileName = result . headers . get ( 'content-disposition' ) ? . split ( 'filename=' ) [ 1 ] || ` ${ sanitize ( id ) } .png ` ;
const fileType = result . headers . get ( 'content-type' ) ;
return { buffer , fileName , fileType } ;
}
function parseChubUrl ( str ) {
const splitStr = str . split ( '/' ) ;
const length = splitStr . length ;
if ( length < 2 ) {
return null ;
}
let domainIndex = - 1 ;
splitStr . forEach ( ( part , index ) => {
if ( part === 'www.chub.ai' || part === 'chub.ai' ) {
domainIndex = index ;
}
} )
const lastTwo = domainIndex !== - 1 ? splitStr . slice ( domainIndex + 1 ) : splitStr ;
const firstPart = lastTwo [ 0 ] . toLowerCase ( ) ;
if ( firstPart === 'characters' || firstPart === 'lorebooks' ) {
const type = firstPart === 'characters' ? 'character' : 'lorebook' ;
const id = type === 'character' ? lastTwo . slice ( 1 ) . join ( '/' ) : lastTwo . join ( '/' ) ;
return {
id : id ,
type : type
} ;
} else if ( length === 2 ) {
return {
id : lastTwo . join ( '/' ) ,
type : 'character'
} ;
}
return null ;
}
function importRisuSprites ( data ) {
try {
const name = data ? . data ? . name ;
const risuData = data ? . data ? . extensions ? . risuai ;
// Not a Risu AI character
if ( ! risuData || ! name ) {
return ;
}
let images = [ ] ;
if ( Array . isArray ( risuData . additionalAssets ) ) {
images = images . concat ( risuData . additionalAssets ) ;
}
if ( Array . isArray ( risuData . emotions ) ) {
images = images . concat ( risuData . emotions ) ;
}
// No sprites to import
if ( images . length === 0 ) {
return ;
}
// Create sprites folder if it doesn't exist
const spritesPath = path . join ( directories . characters , name ) ;
if ( ! fs . existsSync ( spritesPath ) ) {
fs . mkdirSync ( spritesPath ) ;
}
// Path to sprites is not a directory. This should never happen.
if ( ! fs . statSync ( spritesPath ) . isDirectory ( ) ) {
return ;
}
console . log ( ` RisuAI: Found ${ images . length } sprites for ${ name } . Writing to disk. ` ) ;
const files = fs . readdirSync ( spritesPath ) ;
outer : for ( const [ label , fileBase64 ] of images ) {
// Remove existing sprite with the same label
for ( const file of files ) {
if ( path . parse ( file ) . name === label ) {
console . log ( ` RisuAI: The sprite ${ label } for ${ name } already exists. Skipping. ` ) ;
continue outer ;
}
}
const filename = label + '.png' ;
const pathToFile = path . join ( spritesPath , filename ) ;
2023-08-17 14:20:02 +02:00
writeFileAtomicSync ( pathToFile , fileBase64 , { encoding : 'base64' } ) ;
2023-07-20 19:32:15 +02:00
}
// Remove additionalAssets and emotions from data (they are now in the sprites folder)
delete data . data . extensions . risuai . additionalAssets ;
delete data . data . extensions . risuai . emotions ;
} catch ( error ) {
console . error ( error ) ;
}
}
function writeSecret ( key , value ) {
if ( ! fs . existsSync ( SECRETS _FILE ) ) {
const emptyFile = JSON . stringify ( { } ) ;
2023-08-17 14:20:02 +02:00
writeFileAtomicSync ( SECRETS _FILE , emptyFile , "utf-8" ) ;
2023-07-20 19:32:15 +02:00
}
const fileContents = fs . readFileSync ( SECRETS _FILE ) ;
const secrets = JSON . parse ( fileContents ) ;
secrets [ key ] = value ;
2023-08-17 14:20:02 +02:00
writeFileAtomicSync ( SECRETS _FILE , JSON . stringify ( secrets ) , "utf-8" ) ;
2023-07-20 19:32:15 +02:00
}
function readSecret ( key ) {
if ( ! fs . existsSync ( SECRETS _FILE ) ) {
return undefined ;
}
const fileContents = fs . readFileSync ( SECRETS _FILE ) ;
const secrets = JSON . parse ( fileContents ) ;
return secrets [ key ] ;
}
async function readAllChunks ( readableStream ) {
return new Promise ( ( resolve , reject ) => {
// Consume the readable stream
const chunks = [ ] ;
readableStream . on ( 'data' , ( chunk ) => {
chunks . push ( chunk ) ;
} ) ;
readableStream . on ( 'end' , ( ) => {
2023-08-28 22:56:26 +02:00
//console.log('Finished reading the stream.');
2023-07-20 19:32:15 +02:00
resolve ( chunks ) ;
} ) ;
readableStream . on ( 'error' , ( error ) => {
console . error ( 'Error while reading the stream:' , error ) ;
reject ( ) ;
} ) ;
} ) ;
}
async function getImageBuffers ( zipFilePath ) {
return new Promise ( ( resolve , reject ) => {
// Check if the zip file exists
if ( ! fs . existsSync ( zipFilePath ) ) {
reject ( new Error ( 'File not found' ) ) ;
return ;
}
const imageBuffers = [ ] ;
yauzl . open ( zipFilePath , { lazyEntries : true } , ( err , zipfile ) => {
if ( err ) {
reject ( err ) ;
} else {
zipfile . readEntry ( ) ;
zipfile . on ( 'entry' , ( entry ) => {
const mimeType = mime . lookup ( entry . fileName ) ;
if ( mimeType && mimeType . startsWith ( 'image/' ) && ! entry . fileName . startsWith ( '__MACOSX' ) ) {
console . log ( ` Extracting ${ entry . fileName } ` ) ;
zipfile . openReadStream ( entry , ( err , readStream ) => {
if ( err ) {
reject ( err ) ;
} else {
const chunks = [ ] ;
readStream . on ( 'data' , ( chunk ) => {
chunks . push ( chunk ) ;
} ) ;
readStream . on ( 'end' , ( ) => {
imageBuffers . push ( [ path . parse ( entry . fileName ) . base , Buffer . concat ( chunks ) ] ) ;
zipfile . readEntry ( ) ; // Continue to the next entry
} ) ;
}
} ) ;
} else {
zipfile . readEntry ( ) ; // Continue to the next entry
}
} ) ;
zipfile . on ( 'end' , ( ) => {
resolve ( imageBuffers ) ;
} ) ;
zipfile . on ( 'error' , ( err ) => {
reject ( err ) ;
} ) ;
}
} ) ;
} ) ;
}
/ * *
* This function extracts the extension information from the manifest file .
* @ param { string } extensionPath - The path of the extension folder
* @ returns { Object } - Returns the manifest data as an object
* /
async function getManifest ( extensionPath ) {
const manifestPath = path . join ( extensionPath , 'manifest.json' ) ;
// Check if manifest.json exists
if ( ! fs . existsSync ( manifestPath ) ) {
throw new Error ( ` Manifest file not found at ${ manifestPath } ` ) ;
}
const manifest = JSON . parse ( fs . readFileSync ( manifestPath , 'utf8' ) ) ;
return manifest ;
}
async function checkIfRepoIsUpToDate ( extensionPath ) {
const git = simpleGit ( ) ;
await git . cwd ( extensionPath ) . fetch ( 'origin' ) ;
const currentBranch = await git . cwd ( extensionPath ) . branch ( ) ;
const currentCommitHash = await git . cwd ( extensionPath ) . revparse ( [ 'HEAD' ] ) ;
const log = await git . cwd ( extensionPath ) . log ( {
from : currentCommitHash ,
to : ` origin/ ${ currentBranch . current } ` ,
} ) ;
// Fetch remote repository information
const remotes = await git . cwd ( extensionPath ) . getRemotes ( true ) ;
return {
isUpToDate : log . total === 0 ,
remoteUrl : remotes [ 0 ] . refs . fetch , // URL of the remote repository
} ;
}
/ * *
* HTTP POST handler function to clone a git repository from a provided URL , read the extension manifest ,
* and return extension information and path .
*
* @ param { Object } request - HTTP Request object , expects a JSON body with a 'url' property .
* @ param { Object } response - HTTP Response object used to respond to the HTTP request .
*
* @ returns { void }
* /
app . post ( '/get_extension' , jsonParser , async ( request , response ) => {
const git = simpleGit ( ) ;
if ( ! request . body . url ) {
return response . status ( 400 ) . send ( 'Bad Request: URL is required in the request body.' ) ;
}
try {
// make sure the third-party directory exists
if ( ! fs . existsSync ( directories . extensions + '/third-party' ) ) {
fs . mkdirSync ( directories . extensions + '/third-party' ) ;
}
const url = request . body . url ;
const extensionPath = path . join ( directories . extensions , 'third-party' , path . basename ( url , '.git' ) ) ;
if ( fs . existsSync ( extensionPath ) ) {
return response . status ( 409 ) . send ( ` Directory already exists at ${ extensionPath } ` ) ;
}
await git . clone ( url , extensionPath ) ;
console . log ( ` Extension has been cloned at ${ extensionPath } ` ) ;
const { version , author , display _name } = await getManifest ( extensionPath ) ;
return response . send ( { version , author , display _name , extensionPath } ) ;
} catch ( error ) {
console . log ( 'Importing custom content failed' , error ) ;
return response . status ( 500 ) . send ( ` Server Error: ${ error . message } ` ) ;
}
} ) ;
/ * *
* HTTP POST handler function to pull the latest updates from a git repository
* based on the extension name provided in the request body . It returns the latest commit hash ,
* the path of the extension , the status of the repository ( whether it ' s up - to - date or not ) ,
* and the remote URL of the repository .
*
* @ param { Object } request - HTTP Request object , expects a JSON body with an 'extensionName' property .
* @ param { Object } response - HTTP Response object used to respond to the HTTP request .
*
* @ returns { void }
* /
app . post ( '/update_extension' , jsonParser , async ( request , response ) => {
const git = simpleGit ( ) ;
if ( ! request . body . extensionName ) {
return response . status ( 400 ) . send ( 'Bad Request: extensionName is required in the request body.' ) ;
}
try {
const extensionName = request . body . extensionName ;
const extensionPath = path . join ( directories . extensions , 'third-party' , extensionName ) ;
if ( ! fs . existsSync ( extensionPath ) ) {
return response . status ( 404 ) . send ( ` Directory does not exist at ${ extensionPath } ` ) ;
}
const { isUpToDate , remoteUrl } = await checkIfRepoIsUpToDate ( extensionPath ) ;
const currentBranch = await git . cwd ( extensionPath ) . branch ( ) ;
if ( ! isUpToDate ) {
await git . cwd ( extensionPath ) . pull ( 'origin' , currentBranch . current ) ;
console . log ( ` Extension has been updated at ${ extensionPath } ` ) ;
} else {
console . log ( ` Extension is up to date at ${ extensionPath } ` ) ;
}
await git . cwd ( extensionPath ) . fetch ( 'origin' ) ;
const fullCommitHash = await git . cwd ( extensionPath ) . revparse ( [ 'HEAD' ] ) ;
const shortCommitHash = fullCommitHash . slice ( 0 , 7 ) ;
return response . send ( { shortCommitHash , extensionPath , isUpToDate , remoteUrl } ) ;
} catch ( error ) {
console . log ( 'Updating custom content failed' , error ) ;
return response . status ( 500 ) . send ( ` Server Error: ${ error . message } ` ) ;
}
} ) ;
/ * *
* HTTP POST handler function to get the current git commit hash and branch name for a given extension .
* It checks whether the repository is up - to - date with the remote , and returns the status along with
* the remote URL of the repository .
*
* @ param { Object } request - HTTP Request object , expects a JSON body with an 'extensionName' property .
* @ param { Object } response - HTTP Response object used to respond to the HTTP request .
*
* @ returns { void }
* /
app . post ( '/get_extension_version' , jsonParser , async ( request , response ) => {
const git = simpleGit ( ) ;
if ( ! request . body . extensionName ) {
return response . status ( 400 ) . send ( 'Bad Request: extensionName is required in the request body.' ) ;
}
try {
const extensionName = request . body . extensionName ;
const extensionPath = path . join ( directories . extensions , 'third-party' , extensionName ) ;
if ( ! fs . existsSync ( extensionPath ) ) {
return response . status ( 404 ) . send ( ` Directory does not exist at ${ extensionPath } ` ) ;
}
const currentBranch = await git . cwd ( extensionPath ) . branch ( ) ;
// get only the working branch
const currentBranchName = currentBranch . current ;
await git . cwd ( extensionPath ) . fetch ( 'origin' ) ;
const currentCommitHash = await git . cwd ( extensionPath ) . revparse ( [ 'HEAD' ] ) ;
console . log ( currentBranch , currentCommitHash ) ;
const { isUpToDate , remoteUrl } = await checkIfRepoIsUpToDate ( extensionPath ) ;
return response . send ( { currentBranchName , currentCommitHash , isUpToDate , remoteUrl } ) ;
} catch ( error ) {
console . log ( 'Getting extension version failed' , error ) ;
return response . status ( 500 ) . send ( ` Server Error: ${ error . message } ` ) ;
}
}
) ;
/ * *
* HTTP POST handler function to delete a git repository based on the extension name provided in the request body .
*
* @ param { Object } request - HTTP Request object , expects a JSON body with a 'url' property .
* @ param { Object } response - HTTP Response object used to respond to the HTTP request .
*
* @ returns { void }
* /
app . post ( '/delete_extension' , jsonParser , async ( request , response ) => {
if ( ! request . body . extensionName ) {
return response . status ( 400 ) . send ( 'Bad Request: extensionName is required in the request body.' ) ;
}
// Sanatize the extension name to prevent directory traversal
const extensionName = sanitize ( request . body . extensionName ) ;
try {
const extensionPath = path . join ( directories . extensions , 'third-party' , extensionName ) ;
if ( ! fs . existsSync ( extensionPath ) ) {
return response . status ( 404 ) . send ( ` Directory does not exist at ${ extensionPath } ` ) ;
}
await fs . promises . rmdir ( extensionPath , { recursive : true } ) ;
console . log ( ` Extension has been deleted at ${ extensionPath } ` ) ;
return response . send ( ` Extension has been deleted at ${ extensionPath } ` ) ;
} catch ( error ) {
console . log ( 'Deleting custom content failed' , error ) ;
return response . status ( 500 ) . send ( ` Server Error: ${ error . message } ` ) ;
}
} ) ;
2023-08-21 05:16:56 +02:00
2023-08-22 00:46:49 +02:00
/ * *
2023-08-23 03:47:13 +02:00
* HTTP POST handler function to retrieve name of all files of a given folder path .
2023-08-22 00:46:49 +02:00
*
2023-08-23 03:47:13 +02:00
* @ param { Object } request - HTTP Request object . Require folder path in query
* @ param { Object } response - HTTP Response object will contain a list of file path .
2023-08-22 00:46:49 +02:00
*
* @ returns { void }
* /
2023-08-23 18:51:41 +02:00
app . post ( '/get_assets' , jsonParser , async ( request , response ) => {
2023-08-23 05:10:55 +02:00
const folderPath = path . join ( directories . assets ) ;
let output = { }
2023-08-23 03:47:13 +02:00
//console.info("Checking files into",folderPath);
2023-08-22 00:46:49 +02:00
try {
2023-08-23 03:47:13 +02:00
if ( fs . existsSync ( folderPath ) && fs . statSync ( folderPath ) . isDirectory ( ) ) {
2023-08-23 05:10:55 +02:00
const folders = fs . readdirSync ( folderPath )
2023-08-24 20:59:26 +02:00
. filter ( filename => {
return fs . statSync ( path . join ( folderPath , filename ) ) . isDirectory ( ) ;
} ) ;
2023-08-20 02:31:17 +02:00
2023-08-23 05:10:55 +02:00
for ( const folder of folders ) {
2023-08-24 02:34:06 +02:00
if ( folder == "temp" )
continue ;
2023-08-24 20:59:26 +02:00
const files = fs . readdirSync ( path . join ( folderPath , folder ) )
. filter ( filename => {
return filename != ".placeholder" ;
} ) ;
2023-08-23 05:10:55 +02:00
output [ folder ] = [ ] ;
2023-08-24 20:59:26 +02:00
for ( const file of files ) {
output [ folder ] . push ( path . join ( "assets" , folder , file ) ) ;
2023-08-23 05:10:55 +02:00
}
}
2023-08-20 02:31:17 +02:00
}
}
catch ( err ) {
console . log ( err ) ;
}
finally {
2023-08-23 03:47:13 +02:00
return response . send ( output ) ;
2023-08-20 02:31:17 +02:00
}
2023-08-22 04:51:00 +02:00
} ) ;
2023-08-25 00:45:15 +02:00
function checkAssetFileName ( inputFilename ) {
// Sanitize filename
if ( inputFilename . indexOf ( '\0' ) !== - 1 ) {
console . debug ( "Bad request: poisong null bytes in filename." ) ;
return '' ;
}
if ( ! /^[a-zA-Z0-9_\-\.]+$/ . test ( inputFilename ) ) {
console . debug ( "Bad request: illegal character in filename, only alphanumeric, '_', '-' are accepted." ) ;
return '' ;
}
if ( contentManager . unsafeExtensions . some ( ext => inputFilename . toLowerCase ( ) . endsWith ( ext ) ) ) {
console . debug ( "Bad request: forbidden file extension." ) ;
return '' ;
}
if ( inputFilename . startsWith ( '.' ) ) {
console . debug ( "Bad request: filename cannot start with '.'" ) ;
return '' ;
}
return path . normalize ( inputFilename ) . replace ( /^(\.\.(\/|\\|$))+/ , '' ) ; ;
}
2023-08-22 04:51:00 +02:00
/ * *
2023-08-25 00:45:15 +02:00
* HTTP POST handler function to download the requested asset .
2023-08-22 04:51:00 +02:00
*
2023-08-25 00:45:15 +02:00
* @ param { Object } request - HTTP Request object , expects a url , a category and a filename .
* @ param { Object } response - HTTP Response only gives status .
2023-08-22 04:51:00 +02:00
*
* @ returns { void }
* /
2023-08-23 18:51:41 +02:00
app . post ( '/asset_download' , jsonParser , async ( request , response ) => {
2023-08-22 04:51:00 +02:00
const { Readable } = require ( 'stream' ) ;
const { finished } = require ( 'stream/promises' ) ;
2023-08-24 20:59:26 +02:00
const url = request . body . url ;
const inputCategory = request . body . category ;
const inputFilename = sanitize ( request . body . filename ) ;
2023-08-24 22:17:54 +02:00
const validCategories = [ "bgm" , "ambient" ] ;
2023-08-24 20:59:26 +02:00
2023-08-24 00:17:07 +02:00
// Check category
2023-08-24 22:17:54 +02:00
let category = null ;
2023-08-24 20:59:26 +02:00
for ( i of validCategories )
2023-08-24 00:17:07 +02:00
if ( i == inputCategory )
2023-08-24 22:17:54 +02:00
category = i ;
2023-08-24 00:17:07 +02:00
if ( category === null ) {
console . debug ( "Bad request: unsuported asset category." ) ;
return response . sendStatus ( 400 ) ;
}
// Sanitize filename
2023-08-25 00:45:15 +02:00
const safe _input = checkAssetFileName ( inputFilename ) ;
if ( safe _input == '' )
return response . sendFile ( 400 ) ;
2023-08-24 21:47:07 +02:00
2023-08-24 02:34:06 +02:00
const temp _path = path . join ( directories . assets , "temp" , safe _input )
2023-08-24 00:17:07 +02:00
const file _path = path . join ( directories . assets , category , safe _input )
2023-08-24 20:59:26 +02:00
console . debug ( "Request received to download" , url , "to" , file _path ) ;
2023-08-22 04:51:00 +02:00
2023-08-24 00:17:07 +02:00
try {
2023-08-24 02:34:06 +02:00
// Download to temp
const downloadFile = ( async ( url , temp _path ) => {
const res = await fetch ( url ) ;
2023-08-24 21:47:07 +02:00
if ( ! res . ok ) {
throw new Error ( ` Unexpected response ${ res . statusText } ` ) ;
}
2023-08-24 02:34:06 +02:00
const destination = path . resolve ( temp _path ) ;
// Delete if previous download failed
if ( fs . existsSync ( temp _path ) ) {
fs . unlink ( temp _path , ( err ) => {
if ( err ) throw err ;
} ) ;
}
const fileStream = fs . createWriteStream ( destination , { flags : 'wx' } ) ;
await finished ( Readable . fromWeb ( res . body ) . pipe ( fileStream ) ) ;
2023-08-24 00:17:07 +02:00
} ) ;
2023-08-22 04:51:00 +02:00
2023-08-24 02:34:06 +02:00
await downloadFile ( url , temp _path ) ;
// Move into asset place
2023-08-24 20:59:26 +02:00
console . debug ( "Download finished, moving file from" , temp _path , "to" , file _path ) ;
fs . renameSync ( temp _path , file _path ) ;
2023-08-24 00:17:07 +02:00
response . sendStatus ( 200 ) ;
}
2023-08-24 20:59:26 +02:00
catch ( error ) {
2023-08-24 00:17:07 +02:00
console . log ( error ) ;
response . sendStatus ( 500 ) ;
}
2023-08-23 03:47:13 +02:00
} ) ;
2023-08-25 00:45:15 +02:00
/ * *
* HTTP POST handler function to delete the requested asset .
*
* @ param { Object } request - HTTP Request object , expects a category and a filename
* @ param { Object } response - HTTP Response only gives stats .
*
* @ returns { void }
* /
app . post ( '/asset_delete' , jsonParser , async ( request , response ) => {
const { Readable } = require ( 'stream' ) ;
const { finished } = require ( 'stream/promises' ) ;
const inputCategory = request . body . category ;
const inputFilename = sanitize ( request . body . filename ) ;
const validCategories = [ "bgm" , "ambient" ] ;
// Check category
let category = null ;
for ( i of validCategories )
if ( i == inputCategory )
category = i ;
if ( category === null ) {
console . debug ( "Bad request: unsuported asset category." ) ;
return response . sendStatus ( 400 ) ;
}
// Sanitize filename
const safe _input = checkAssetFileName ( inputFilename ) ;
if ( safe _input == '' )
return response . sendFile ( 400 ) ;
const file _path = path . join ( directories . assets , category , safe _input )
console . debug ( "Request received to delete" , category , file _path ) ;
try {
// Delete if previous download failed
if ( fs . existsSync ( file _path ) ) {
fs . unlink ( file _path , ( err ) => {
if ( err ) throw err ;
} ) ;
console . debug ( "Asset deleted." ) ;
}
else {
console . debug ( "Asset not found." ) ;
response . sendStatus ( 400 ) ;
}
// Move into asset place
response . sendStatus ( 200 ) ;
}
catch ( error ) {
console . log ( error ) ;
response . sendStatus ( 500 ) ;
}
} ) ;
2023-08-23 03:47:13 +02:00
///////////////////////////////
/ * *
* HTTP POST handler function to retrieve a character background music list .
*
* @ param { Object } request - HTTP Request object , expects a character name in the query .
* @ param { Object } response - HTTP Response object will contain a list of audio file path .
*
* @ returns { void }
* /
2023-08-23 18:51:41 +02:00
app . post ( '/get_character_assets_list' , jsonParser , async ( request , response ) => {
2023-08-24 20:59:26 +02:00
const name = sanitize ( request . query . name ) ;
2023-08-24 01:42:52 +02:00
const inputCategory = request . query . category ;
2023-08-24 20:59:26 +02:00
const validCategories = [ "bgm" , "ambient" ]
2023-08-24 01:42:52 +02:00
// Check category
let category = null
2023-08-24 20:59:26 +02:00
for ( i of validCategories )
2023-08-24 01:42:52 +02:00
if ( i == inputCategory )
category = i
2023-08-23 03:47:13 +02:00
2023-08-24 01:42:52 +02:00
if ( category === null ) {
console . debug ( "Bad request: unsuported asset category." ) ;
return response . sendStatus ( 400 ) ;
}
const folderPath = path . join ( directories . characters , name , category ) ;
let output = [ ] ;
2023-08-23 03:47:13 +02:00
try {
if ( fs . existsSync ( folderPath ) && fs . statSync ( folderPath ) . isDirectory ( ) ) {
const files = fs . readdirSync ( folderPath )
2023-08-24 20:59:26 +02:00
. filter ( filename => {
return filename != ".placeholder" ;
} ) ;
2023-08-23 03:47:13 +02:00
for ( i of files )
2023-08-24 01:42:52 +02:00
output . push ( ` /characters/ ${ name } / ${ category } / ${ i } ` ) ;
2023-08-24 20:59:26 +02:00
2023-08-23 03:47:13 +02:00
}
2023-08-24 01:42:52 +02:00
return response . send ( output ) ;
2023-08-23 03:47:13 +02:00
}
catch ( err ) {
console . log ( err ) ;
2023-08-24 01:42:52 +02:00
return response . sendStatus ( 500 ) ;
2023-08-23 03:47:13 +02:00
}
2023-08-24 20:59:26 +02:00
} ) ;