2023-07-20 19:32:15 +02:00
#!/usr/bin/env node
createDefaultFiles ( ) ;
function createDefaultFiles ( ) {
const fs = require ( 'fs' ) ;
const path = require ( 'path' ) ;
const files = {
settings : 'public/settings.json' ,
bg _load : 'public/css/bg_load.css' ,
config : 'config.conf' ,
} ;
for ( const file of Object . values ( files ) ) {
try {
if ( ! fs . existsSync ( file ) ) {
const defaultFilePath = path . join ( 'default' , path . parse ( file ) . base ) ;
fs . copyFileSync ( defaultFilePath , file ) ;
console . log ( ` Created default file: ${ file } ` ) ;
}
} catch ( error ) {
console . error ( ` FATAL: Could not write default file: ${ file } ` , error ) ;
}
}
}
const process = require ( 'process' )
const yargs = require ( 'yargs/yargs' ) ;
const { hideBin } = require ( 'yargs/helpers' ) ;
const net = require ( "net" ) ;
// work around a node v20 bug: https://github.com/nodejs/node/issues/47822#issuecomment-1564708870
if ( net . setDefaultAutoSelectFamily ) {
net . setDefaultAutoSelectFamily ( false ) ;
}
const cliArguments = yargs ( hideBin ( process . argv ) )
. option ( 'ssl' , {
type : 'boolean' ,
default : false ,
describe : 'Enables SSL'
} ) . option ( 'certPath' , {
type : 'string' ,
default : 'certs/cert.pem' ,
describe : 'Path to your certificate file.'
} ) . option ( 'keyPath' , {
type : 'string' ,
default : 'certs/privkey.pem' ,
describe : 'Path to your private key file.'
} ) . argv ;
// change all relative paths
const path = require ( 'path' ) ;
const directory = process . pkg ? path . dirname ( process . execPath ) : _ _dirname ;
console . log ( process . pkg ? 'Running from binary' : 'Running from source' ) ;
process . chdir ( directory ) ;
const express = require ( 'express' ) ;
const compression = require ( 'compression' ) ;
const app = express ( ) ;
const responseTime = require ( 'response-time' ) ;
const simpleGit = require ( 'simple-git' ) ;
app . use ( compression ( ) ) ;
app . use ( responseTime ( ) ) ;
const fs = require ( 'fs' ) ;
const readline = require ( 'readline' ) ;
const open = require ( 'open' ) ;
const multer = require ( "multer" ) ;
const http = require ( "http" ) ;
const https = require ( 'https' ) ;
const basicAuthMiddleware = require ( './src/middleware/basicAuthMiddleware' ) ;
2023-07-21 14:28:32 +02:00
const contentManager = require ( './src/content-manager' ) ;
2023-07-20 19:32:15 +02:00
const extract = require ( 'png-chunks-extract' ) ;
const encode = require ( 'png-chunks-encode' ) ;
const PNGtext = require ( 'png-chunk-text' ) ;
const jimp = require ( 'jimp' ) ;
const sanitize = require ( 'sanitize-filename' ) ;
const mime = require ( 'mime-types' ) ;
const cookieParser = require ( 'cookie-parser' ) ;
const crypto = require ( 'crypto' ) ;
const ipaddr = require ( 'ipaddr.js' ) ;
const json5 = require ( 'json5' ) ;
const exif = require ( 'piexifjs' ) ;
const webp = require ( 'webp-converter' ) ;
const DeviceDetector = require ( "device-detector-js" ) ;
const { TextEncoder , TextDecoder } = require ( 'util' ) ;
const utf8Encode = new TextEncoder ( ) ;
const commandExistsSync = require ( 'command-exists' ) . sync ;
// impoort from statsHelpers.js
const statsHelpers = require ( './statsHelpers.js' ) ;
const characterCardParser = require ( './src/character-card-parser.js' ) ;
const config = require ( path . join ( process . cwd ( ) , './config.conf' ) ) ;
const server _port = process . env . SILLY _TAVERN _PORT || config . port ;
const whitelistPath = path . join ( process . cwd ( ) , "./whitelist.txt" ) ;
let whitelist = config . whitelist ;
if ( fs . existsSync ( whitelistPath ) ) {
try {
let whitelistTxt = fs . readFileSync ( whitelistPath , 'utf-8' ) ;
whitelist = whitelistTxt . split ( "\n" ) . filter ( ip => ip ) . map ( ip => ip . trim ( ) ) ;
} catch ( e ) { }
}
const whitelistMode = config . whitelistMode ;
const autorun = config . autorun && ! cliArguments . ssl ;
const enableExtensions = config . enableExtensions ;
const listen = config . listen ;
const allowKeysExposure = config . allowKeysExposure ;
const axios = require ( 'axios' ) ;
const tiktoken = require ( '@dqbd/tiktoken' ) ;
const WebSocket = require ( 'ws' ) ;
const AIHorde = require ( "./src/horde" ) ;
const ai _horde = new AIHorde ( {
client _agent : getVersion ( ) ? . agent || 'SillyTavern:UNKNOWN:Cohee#1207' ,
} ) ;
const ipMatching = require ( 'ip-matching' ) ;
const yauzl = require ( 'yauzl' ) ;
const Client = require ( 'node-rest-client' ) . Client ;
const client = new Client ( ) ;
client . on ( 'error' , ( err ) => {
console . error ( 'An error occurred:' , err ) ;
} ) ;
let api _server = "http://0.0.0.0:5000" ;
let api _novelai = "https://api.novelai.net" ;
let api _openai = "https://api.openai.com/v1" ;
let api _claude = "https://api.anthropic.com/v1" ;
let main _api = "kobold" ;
let response _generate _novel ;
let characters = { } ;
let response _dw _bg ;
let response _getstatus ;
let first _run = true ;
2023-08-03 05:25:24 +02:00
function get _mancer _headers ( ) {
const api _key _mancer = readSecret ( SECRET _KEYS . MANCER ) ;
return api _key _mancer ? { "X-API-KEY" : api _key _mancer } : { } ;
}
2023-07-20 19:32:15 +02:00
//RossAscends: Added function to format dates used in files and chat timestamps to a humanized format.
//Mostly I wanted this to be for file names, but couldn't figure out exactly where the filename save code was as everything seemed to be connected.
//During testing, this performs the same as previous date.now() structure.
//It also does not break old characters/chats, as the code just uses whatever timestamp exists in the chat.
//New chats made with characters will use this new formatting.
//Useable variable is (( humanizedISO8601Datetime ))
const delay = ms => new Promise ( resolve => setTimeout ( resolve , ms ) )
const { SentencePieceProcessor , cleanText } = require ( "sentencepiece-js" ) ;
const { Tokenizer } = require ( '@mlc-ai/web-tokenizers' ) ;
const CHARS _PER _TOKEN = 3.35 ;
let spp _llama ;
let spp _nerd ;
let spp _nerd _v2 ;
let claude _tokenizer ;
async function loadSentencepieceTokenizer ( modelPath ) {
try {
const spp = new SentencePieceProcessor ( ) ;
await spp . load ( modelPath ) ;
return spp ;
} catch ( error ) {
console . error ( "Sentencepiece tokenizer failed to load: " + modelPath , error ) ;
return null ;
}
} ;
async function countSentencepieceTokens ( spp , text ) {
// Fallback to strlen estimation
if ( ! spp ) {
2023-08-02 21:35:05 +02:00
return {
ids : [ ] ,
count : Math . ceil ( text . length / CHARS _PER _TOKEN )
} ;
2023-07-20 19:32:15 +02:00
}
2023-08-02 21:35:05 +02:00
let cleaned = text ; // cleanText(text); <-- cleaning text can result in an incorrect tokenization
2023-07-20 19:32:15 +02:00
let ids = spp . encodeIds ( cleaned ) ;
2023-08-02 21:35:05 +02:00
return {
ids ,
count : ids . length
} ;
2023-07-20 19:32:15 +02:00
}
async function loadClaudeTokenizer ( modelPath ) {
try {
const arrayBuffer = fs . readFileSync ( modelPath ) . buffer ;
const instance = await Tokenizer . fromJSON ( arrayBuffer ) ;
return instance ;
} catch ( error ) {
console . error ( "Claude tokenizer failed to load: " + modelPath , error ) ;
return null ;
}
}
function countClaudeTokens ( tokenizer , messages ) {
const convertedPrompt = convertClaudePrompt ( messages , false , false ) ;
// Fallback to strlen estimation
if ( ! tokenizer ) {
return Math . ceil ( convertedPrompt . length / CHARS _PER _TOKEN ) ;
}
const count = tokenizer . encode ( convertedPrompt ) . length ;
return count ;
}
const tokenizersCache = { } ;
function getTokenizerModel ( requestModel ) {
if ( requestModel . includes ( 'claude' ) ) {
return 'claude' ;
}
if ( requestModel . includes ( 'gpt-4-32k' ) ) {
return 'gpt-4-32k' ;
}
if ( requestModel . includes ( 'gpt-4' ) ) {
return 'gpt-4' ;
}
if ( requestModel . includes ( 'gpt-3.5-turbo' ) ) {
return 'gpt-3.5-turbo' ;
}
if ( requestModel . startsWith ( 'text-' ) || requestModel . startsWith ( 'code-' ) ) {
return requestModel ;
}
// default
return 'gpt-3.5-turbo' ;
}
function getTiktokenTokenizer ( model ) {
if ( tokenizersCache [ model ] ) {
return tokenizersCache [ model ] ;
}
const tokenizer = tiktoken . encoding _for _model ( model ) ;
console . log ( 'Instantiated the tokenizer for' , model ) ;
tokenizersCache [ model ] = tokenizer ;
return tokenizer ;
}
function humanizedISO8601DateTime ( ) {
let baseDate = new Date ( Date . now ( ) ) ;
let humanYear = baseDate . getFullYear ( ) ;
let humanMonth = ( baseDate . getMonth ( ) + 1 ) ;
let humanDate = baseDate . getDate ( ) ;
let humanHour = ( baseDate . getHours ( ) < 10 ? '0' : '' ) + baseDate . getHours ( ) ;
let humanMinute = ( baseDate . getMinutes ( ) < 10 ? '0' : '' ) + baseDate . getMinutes ( ) ;
let humanSecond = ( baseDate . getSeconds ( ) < 10 ? '0' : '' ) + baseDate . getSeconds ( ) ;
let humanMillisecond = ( baseDate . getMilliseconds ( ) < 10 ? '0' : '' ) + baseDate . getMilliseconds ( ) ;
let HumanizedDateTime = ( humanYear + "-" + humanMonth + "-" + humanDate + " @" + humanHour + "h " + humanMinute + "m " + humanSecond + "s " + humanMillisecond + "ms" ) ;
return HumanizedDateTime ;
} ;
var is _colab = process . env . colaburl !== undefined ;
var charactersPath = 'public/characters/' ;
var chatsPath = 'public/chats/' ;
const AVATAR _WIDTH = 400 ;
const AVATAR _HEIGHT = 600 ;
const jsonParser = express . json ( { limit : '100mb' } ) ;
const urlencodedParser = express . urlencoded ( { extended : true , limit : '100mb' } ) ;
const baseRequestArgs = { headers : { "Content-Type" : "application/json" } } ;
const directories = {
worlds : 'public/worlds/' ,
avatars : 'public/User Avatars' ,
groups : 'public/groups/' ,
groupChats : 'public/group chats' ,
chats : 'public/chats/' ,
characters : 'public/characters/' ,
backgrounds : 'public/backgrounds' ,
novelAI _Settings : 'public/NovelAI Settings' ,
koboldAI _Settings : 'public/KoboldAI Settings' ,
openAI _Settings : 'public/OpenAI Settings' ,
textGen _Settings : 'public/TextGen Settings' ,
thumbnails : 'thumbnails/' ,
thumbnailsBg : 'thumbnails/bg/' ,
thumbnailsAvatar : 'thumbnails/avatar/' ,
themes : 'public/themes' ,
movingUI : 'public/movingUI' ,
extensions : 'public/scripts/extensions' ,
instruct : 'public/instruct' ,
context : 'public/context' ,
backups : 'backups/' ,
2023-07-29 23:22:03 +02:00
quickreplies : 'public/QuickReplies'
2023-07-20 19:32:15 +02:00
} ;
// CSRF Protection //
const doubleCsrf = require ( 'csrf-csrf' ) . doubleCsrf ;
const CSRF _SECRET = crypto . randomBytes ( 8 ) . toString ( 'hex' ) ;
const COOKIES _SECRET = crypto . randomBytes ( 8 ) . toString ( 'hex' ) ;
const { generateToken , doubleCsrfProtection } = doubleCsrf ( {
getSecret : ( ) => CSRF _SECRET ,
cookieName : "X-CSRF-Token" ,
cookieOptions : {
httpOnly : true ,
sameSite : "strict" ,
secure : false
} ,
size : 64 ,
getTokenFromRequest : ( req ) => req . headers [ "x-csrf-token" ]
} ) ;
app . get ( "/csrf-token" , ( req , res ) => {
res . json ( {
"token" : generateToken ( res )
} ) ;
} ) ;
app . use ( cookieParser ( COOKIES _SECRET ) ) ;
app . use ( doubleCsrfProtection ) ;
// CORS Settings //
const cors = require ( 'cors' ) ;
const CORS = cors ( {
origin : 'null' ,
methods : [ 'OPTIONS' ]
} ) ;
app . use ( CORS ) ;
if ( listen && config . basicAuthMode ) app . use ( basicAuthMiddleware ) ;
app . use ( function ( req , res , next ) { //Security
let clientIp = req . connection . remoteAddress ;
let ip = ipaddr . parse ( clientIp ) ;
// Check if the IP address is IPv4-mapped IPv6 address
if ( ip . kind ( ) === 'ipv6' && ip . isIPv4MappedAddress ( ) ) {
const ipv4 = ip . toIPv4Address ( ) . toString ( ) ;
clientIp = ipv4 ;
} else {
clientIp = ip ;
clientIp = clientIp . toString ( ) ;
}
//clientIp = req.connection.remoteAddress.split(':').pop();
if ( whitelistMode === true && ! whitelist . some ( x => ipMatching . matches ( clientIp , ipMatching . getMatch ( x ) ) ) ) {
console . log ( 'Forbidden: Connection attempt from ' + clientIp + '. If you are attempting to connect, please add your IP address in whitelist or disable whitelist mode in config.conf in root of SillyTavern folder.\n' ) ;
return res . status ( 403 ) . send ( '<b>Forbidden</b>: Connection attempt from <b>' + clientIp + '</b>. If you are attempting to connect, please add your IP address in whitelist or disable whitelist mode in config.conf in root of SillyTavern folder.' ) ;
}
next ( ) ;
} ) ;
app . use ( ( req , res , next ) => {
if ( req . url . startsWith ( '/characters/' ) && is _colab && process . env . googledrive == 2 ) {
const filePath = path . join ( charactersPath , decodeURIComponent ( req . url . substr ( '/characters' . length ) ) ) ;
console . log ( 'req.url: ' + req . url ) ;
console . log ( filePath ) ;
fs . access ( filePath , fs . constants . R _OK , ( err ) => {
if ( ! err ) {
res . sendFile ( filePath , { root : process . cwd ( ) } ) ;
} else {
res . send ( 'Character not found: ' + filePath ) ;
//next();
}
} ) ;
} else {
next ( ) ;
}
} ) ;
app . use ( express . static ( process . cwd ( ) + "/public" , { refresh : true } ) ) ;
app . use ( '/backgrounds' , ( req , res ) => {
const filePath = decodeURIComponent ( path . join ( process . cwd ( ) , 'public/backgrounds' , req . url . replace ( /%20/g , ' ' ) ) ) ;
fs . readFile ( filePath , ( err , data ) => {
if ( err ) {
res . status ( 404 ) . send ( 'File not found' ) ;
return ;
}
//res.contentType('image/jpeg');
res . send ( data ) ;
} ) ;
} ) ;
app . use ( '/characters' , ( req , res ) => {
const filePath = decodeURIComponent ( path . join ( process . cwd ( ) , charactersPath , req . url . replace ( /%20/g , ' ' ) ) ) ;
fs . readFile ( filePath , ( err , data ) => {
if ( err ) {
res . status ( 404 ) . send ( 'File not found' ) ;
return ;
}
res . send ( data ) ;
} ) ;
} ) ;
app . use ( multer ( { dest : "uploads" , limits : { fieldSize : 10 * 1024 * 1024 } } ) . single ( "avatar" ) ) ;
app . get ( "/" , function ( request , response ) {
response . sendFile ( process . cwd ( ) + "/public/index.html" ) ;
} ) ;
app . get ( "/notes/*" , function ( request , response ) {
response . sendFile ( process . cwd ( ) + "/public" + request . url + ".html" ) ;
} ) ;
app . get ( '/deviceinfo' , function ( request , response ) {
const userAgent = request . header ( 'user-agent' ) ;
const deviceDetector = new DeviceDetector ( ) ;
const deviceInfo = deviceDetector . parse ( userAgent ) ;
return response . send ( deviceInfo ) ;
} ) ;
app . get ( '/version' , function ( _ , response ) {
const data = getVersion ( ) ;
response . send ( data ) ;
} )
//**************Kobold api
app . post ( "/generate" , jsonParser , async function ( request , response _generate = response ) {
if ( ! request . body ) return response _generate . sendStatus ( 400 ) ;
const request _prompt = request . body . prompt ;
const controller = new AbortController ( ) ;
request . socket . removeAllListeners ( 'close' ) ;
request . socket . on ( 'close' , async function ( ) {
if ( request . body . can _abort && ! response _generate . writableEnded ) {
try {
console . log ( 'Aborting Kobold generation...' ) ;
// send abort signal to koboldcpp
const abortResponse = await fetch ( ` ${ api _server } /extra/abort ` , {
method : 'POST' ,
} ) ;
if ( ! abortResponse . ok ) {
console . log ( 'Error sending abort request to Kobold:' , abortResponse . status ) ;
}
} catch ( error ) {
console . log ( error ) ;
}
}
controller . abort ( ) ;
} ) ;
let this _settings = {
prompt : request _prompt ,
use _story : false ,
use _memory : false ,
use _authors _note : false ,
use _world _info : false ,
max _context _length : request . body . max _context _length ,
singleline : ! ! request . body . singleline ,
} ;
if ( request . body . gui _settings == false ) {
const sampler _order = [ request . body . s1 , request . body . s2 , request . body . s3 , request . body . s4 , request . body . s5 , request . body . s6 , request . body . s7 ] ;
this _settings = {
prompt : request _prompt ,
use _story : false ,
use _memory : false ,
use _authors _note : false ,
use _world _info : false ,
max _context _length : request . body . max _context _length ,
max _length : request . body . max _length ,
rep _pen : request . body . rep _pen ,
rep _pen _range : request . body . rep _pen _range ,
rep _pen _slope : request . body . rep _pen _slope ,
temperature : request . body . temperature ,
tfs : request . body . tfs ,
top _a : request . body . top _a ,
top _k : request . body . top _k ,
top _p : request . body . top _p ,
typical : request . body . typical ,
sampler _order : sampler _order ,
singleline : ! ! request . body . singleline ,
} ;
if ( ! ! request . body . stop _sequence ) {
this _settings [ 'stop_sequence' ] = request . body . stop _sequence ;
}
}
console . log ( this _settings ) ;
const args = {
body : JSON . stringify ( this _settings ) ,
headers : { "Content-Type" : "application/json" } ,
signal : controller . signal ,
} ;
const MAX _RETRIES = 50 ;
const delayAmount = 2500 ;
let fetch , url , response ;
for ( let i = 0 ; i < MAX _RETRIES ; i ++ ) {
try {
fetch = require ( 'node-fetch' ) . default ;
url = request . body . streaming ? ` ${ api _server } /extra/generate/stream ` : ` ${ api _server } /v1/generate ` ;
response = await fetch ( url , { method : 'POST' , timeout : 0 , ... args } ) ;
if ( request . body . streaming ) {
request . socket . on ( 'close' , function ( ) {
response . body . destroy ( ) ; // Close the remote stream
response _generate . end ( ) ; // End the Express response
} ) ;
response . body . on ( 'end' , function ( ) {
console . log ( "Streaming request finished" ) ;
response _generate . end ( ) ;
} ) ;
// Pipe remote SSE stream to Express response
return response . body . pipe ( response _generate ) ;
} else {
if ( ! response . ok ) {
2023-08-01 14:22:51 +02:00
const errorText = await response . text ( ) ;
console . log ( ` Kobold returned error: ${ response . status } ${ response . statusText } ${ errorText } ` ) ;
try {
const errorJson = JSON . parse ( errorText ) ;
const message = errorJson ? . detail ? . msg || errorText ;
return response _generate . status ( 400 ) . send ( { error : { message } } ) ;
} catch {
return response _generate . status ( 400 ) . send ( { error : { message : errorText } } ) ;
}
2023-07-20 19:32:15 +02:00
}
const data = await response . json ( ) ;
return response _generate . send ( data ) ;
}
} catch ( error ) {
// response
switch ( error ? . status ) {
case 403 :
case 503 : // retry in case of temporary service issue, possibly caused by a queue failure?
console . debug ( ` KoboldAI is busy. Retry attempt ${ i + 1 } of ${ MAX _RETRIES } ... ` ) ;
await delay ( delayAmount ) ;
break ;
default :
if ( 'status' in error ) {
console . log ( 'Status Code from Kobold:' , error . status ) ;
}
return response _generate . send ( { error : true } ) ;
}
}
}
console . log ( 'Max retries exceeded. Giving up.' ) ;
return response _generate . send ( { error : true } ) ;
} ) ;
//************** Text generation web UI
app . post ( "/generate_textgenerationwebui" , jsonParser , async function ( request , response _generate = response ) {
if ( ! request . body ) return response _generate . sendStatus ( 400 ) ;
console . log ( request . body ) ;
const controller = new AbortController ( ) ;
let isGenerationStopped = false ;
request . socket . removeAllListeners ( 'close' ) ;
request . socket . on ( 'close' , function ( ) {
isGenerationStopped = true ;
controller . abort ( ) ;
} ) ;
if ( request . header ( 'X-Response-Streaming' ) ) {
response _generate . writeHead ( 200 , {
'Content-Type' : 'text/plain;charset=utf-8' ,
'Transfer-Encoding' : 'chunked' ,
'Cache-Control' : 'no-transform' ,
} ) ;
async function * readWebsocket ( ) {
const streamingUrl = request . header ( 'X-Streaming-URL' ) ;
const websocket = new WebSocket ( streamingUrl ) ;
websocket . on ( 'open' , async function ( ) {
console . log ( 'websocket open' ) ;
websocket . send ( JSON . stringify ( request . body ) ) ;
} ) ;
websocket . on ( 'error' , ( err ) => {
console . error ( err ) ;
websocket . close ( ) ;
} ) ;
websocket . on ( 'close' , ( code , buffer ) => {
const reason = new TextDecoder ( ) . decode ( buffer )
console . log ( reason ) ;
} ) ;
while ( true ) {
if ( isGenerationStopped ) {
console . error ( 'Streaming stopped by user. Closing websocket...' ) ;
websocket . close ( ) ;
return ;
}
const rawMessage = await new Promise ( resolve => websocket . once ( 'message' , resolve ) ) ;
const message = json5 . parse ( rawMessage ) ;
switch ( message . event ) {
case 'text_stream' :
yield message . text ;
break ;
case 'stream_end' :
websocket . close ( ) ;
return ;
}
}
}
let reply = '' ;
try {
for await ( const text of readWebsocket ( ) ) {
if ( typeof text !== 'string' ) {
break ;
}
let newText = text ;
if ( ! newText ) {
continue ;
}
reply += text ;
response _generate . write ( newText ) ;
}
console . log ( reply ) ;
}
finally {
response _generate . end ( ) ;
}
}
else {
const args = {
body : JSON . stringify ( request . body ) ,
2023-08-03 05:38:50 +02:00
headers : { "Content-Type" : "application/json" } ,
2023-07-20 19:32:15 +02:00
signal : controller . signal ,
} ;
2023-08-03 05:25:24 +02:00
if ( request . body . use _mancer ) {
args . headers = Object . assign ( args . headers , get _mancer _headers ( ) ) ;
}
2023-07-20 19:32:15 +02:00
try {
const data = await postAsync ( api _server + "/v1/generate" , args ) ;
console . log ( data ) ;
return response _generate . send ( data ) ;
} catch ( error ) {
2023-08-03 13:32:53 +02:00
retval = { error : true , status : error . status , response : error . statusText } ;
2023-08-03 05:38:50 +02:00
console . log ( error ) ;
2023-08-03 13:32:53 +02:00
try {
retval . response = await error . json ( ) ;
retval . response = retval . response . result ;
} catch { }
return response _generate . send ( retval ) ;
2023-07-20 19:32:15 +02:00
}
}
} ) ;
app . post ( "/savechat" , jsonParser , function ( request , response ) {
try {
var dir _name = String ( request . body . avatar _url ) . replace ( '.png' , '' ) ;
let chat _data = request . body . chat ;
let jsonlData = chat _data . map ( JSON . stringify ) . join ( '\n' ) ;
fs . writeFileSync ( ` ${ chatsPath + sanitize ( dir _name ) } / ${ sanitize ( String ( request . body . file _name ) ) } .jsonl ` , jsonlData , 'utf8' ) ;
return response . send ( { result : "ok" } ) ;
} catch ( error ) {
response . send ( error ) ;
return console . log ( error ) ;
}
} ) ;
app . post ( "/getchat" , jsonParser , function ( request , response ) {
try {
const dirName = String ( request . body . avatar _url ) . replace ( '.png' , '' ) ;
const chatDirExists = fs . existsSync ( chatsPath + dirName ) ;
//if no chat dir for the character is found, make one with the character name
if ( ! chatDirExists ) {
fs . mkdirSync ( chatsPath + dirName ) ;
return response . send ( { } ) ;
}
if ( ! request . body . file _name ) {
return response . send ( { } ) ;
}
const fileName = ` ${ chatsPath + dirName } / ${ sanitize ( String ( request . body . file _name ) ) } .jsonl ` ;
const chatFileExists = fs . existsSync ( fileName ) ;
if ( ! chatFileExists ) {
return response . send ( { } ) ;
}
const data = fs . readFileSync ( fileName , 'utf8' ) ;
const lines = data . split ( '\n' ) ;
// Iterate through the array of strings and parse each line as JSON
const jsonData = lines . map ( tryParse ) . filter ( x => x ) ;
return response . send ( jsonData ) ;
} catch ( error ) {
console . error ( error ) ;
return response . send ( { } ) ;
}
} ) ;
app . post ( "/getstatus" , jsonParser , async function ( request , response _getstatus = response ) {
if ( ! request . body ) return response _getstatus . sendStatus ( 400 ) ;
api _server = request . body . api _server ;
main _api = request . body . main _api ;
if ( api _server . indexOf ( 'localhost' ) != - 1 ) {
api _server = api _server . replace ( 'localhost' , '127.0.0.1' ) ;
}
var args = {
headers : { "Content-Type" : "application/json" }
} ;
2023-08-03 05:25:24 +02:00
if ( main _api == 'textgenerationwebui' && request . body . use _mancer ) {
args . headers = Object . assign ( args . headers , get _mancer _headers ( ) ) ;
}
2023-07-20 19:32:15 +02:00
var url = api _server + "/v1/model" ;
let version = '' ;
let koboldVersion = { } ;
if ( main _api == "kobold" ) {
try {
version = ( await getAsync ( api _server + "/v1/info/version" ) ) . result ;
}
catch {
version = '0.0.0' ;
}
try {
koboldVersion = ( await getAsync ( api _server + "/extra/version" ) ) ;
}
catch {
koboldVersion = {
result : 'Kobold' ,
version : '0.0' ,
} ;
}
}
2023-08-03 13:32:53 +02:00
client . get ( url , args , async function ( data , response ) {
2023-07-20 19:32:15 +02:00
if ( typeof data !== 'object' ) {
data = { } ;
}
if ( response . statusCode == 200 ) {
data . version = version ;
data . koboldVersion = koboldVersion ;
2023-08-03 13:32:53 +02:00
if ( data . result == "ReadOnly" ) {
2023-07-20 19:32:15 +02:00
data . result = "no_connection" ;
}
} else {
2023-08-03 13:32:53 +02:00
data . response = data . result ;
2023-07-20 19:32:15 +02:00
data . result = "no_connection" ;
}
response _getstatus . send ( data ) ;
} ) . on ( 'error' , function ( ) {
response _getstatus . send ( { result : "no_connection" } ) ;
} ) ;
} ) ;
const formatApiUrl = ( url ) => ( url . indexOf ( 'localhost' ) !== - 1 )
? url . replace ( 'localhost' , '127.0.0.1' )
: url ;
function getVersion ( ) {
let pkgVersion = 'UNKNOWN' ;
let gitRevision = null ;
let gitBranch = null ;
try {
const pkgJson = require ( './package.json' ) ;
pkgVersion = pkgJson . version ;
if ( ! process . pkg && commandExistsSync ( 'git' ) ) {
gitRevision = require ( 'child_process' )
. execSync ( 'git rev-parse --short HEAD' , { cwd : process . cwd ( ) , stdio : [ 'ignore' , 'pipe' , 'ignore' ] } )
. toString ( ) . trim ( ) ;
gitBranch = require ( 'child_process' )
. execSync ( 'git rev-parse --abbrev-ref HEAD' , { cwd : process . cwd ( ) , stdio : [ 'ignore' , 'pipe' , 'ignore' ] } )
. toString ( ) . trim ( ) ;
}
}
catch {
// suppress exception
}
const agent = ` SillyTavern: ${ pkgVersion } :Cohee#1207 ` ;
return { agent , pkgVersion , gitRevision , gitBranch } ;
}
function tryParse ( str ) {
try {
return json5 . parse ( str ) ;
} catch {
return undefined ;
}
}
function convertToV2 ( char ) {
// Simulate incoming data from frontend form
const result = charaFormatData ( {
json _data : JSON . stringify ( char ) ,
ch _name : char . name ,
description : char . description ,
personality : char . personality ,
scenario : char . scenario ,
first _mes : char . first _mes ,
mes _example : char . mes _example ,
creator _notes : char . creatorcomment ,
talkativeness : char . talkativeness ,
fav : char . fav ,
creator : char . creator ,
tags : char . tags ,
} ) ;
result . chat = char . chat ? ? humanizedISO8601DateTime ( ) ;
result . create _date = char . create _date ;
return result ;
}
function unsetFavFlag ( char ) {
const _ = require ( 'lodash' ) ;
_ . set ( char , 'fav' , false ) ;
_ . set ( char , 'data.extensions.fav' , false ) ;
}
function readFromV2 ( char ) {
const _ = require ( 'lodash' ) ;
if ( _ . isUndefined ( char . data ) ) {
console . warn ( 'Spec v2 data missing' ) ;
return char ;
}
const fieldMappings = {
name : 'name' ,
description : 'description' ,
personality : 'personality' ,
scenario : 'scenario' ,
first _mes : 'first_mes' ,
mes _example : 'mes_example' ,
talkativeness : 'extensions.talkativeness' ,
fav : 'extensions.fav' ,
tags : 'tags' ,
} ;
_ . forEach ( fieldMappings , ( v2Path , charField ) => {
//console.log(`Migrating field: ${charField} from ${v2Path}`);
const v2Value = _ . get ( char . data , v2Path ) ;
if ( _ . isUndefined ( v2Value ) ) {
let defaultValue = undefined ;
// Backfill default values for missing ST extension fields
if ( v2Path === 'extensions.talkativeness' ) {
defaultValue = 0.5 ;
}
if ( v2Path === 'extensions.fav' ) {
defaultValue = false ;
}
if ( ! _ . isUndefined ( defaultValue ) ) {
//console.debug(`Spec v2 extension data missing for field: ${charField}, using default value: ${defaultValue}`);
char [ charField ] = defaultValue ;
} else {
console . debug ( ` Spec v2 data missing for unknown field: ${ charField } ` ) ;
return ;
}
}
if ( ! _ . isUndefined ( char [ charField ] ) && ! _ . isUndefined ( v2Value ) && String ( char [ charField ] ) !== String ( v2Value ) ) {
console . debug ( ` Spec v2 data mismatch with Spec v1 for field: ${ charField } ` , char [ charField ] , v2Value ) ;
}
char [ charField ] = v2Value ;
} ) ;
char [ 'chat' ] = char [ 'chat' ] ? ? humanizedISO8601DateTime ( ) ;
return char ;
}
//***************** Main functions
function charaFormatData ( data ) {
// This is supposed to save all the foreign keys that ST doesn't care about
const _ = require ( 'lodash' ) ;
const char = tryParse ( data . json _data ) || { } ;
// This function uses _.cond() to create a series of conditional checks that return the desired output based on the input data.
// It checks if data.alternate_greetings is an array, a string, or neither, and acts accordingly.
const getAlternateGreetings = data => _ . cond ( [
[ d => Array . isArray ( d . alternate _greetings ) , d => d . alternate _greetings ] ,
[ d => typeof d . alternate _greetings === 'string' , d => [ d . alternate _greetings ] ] ,
[ _ . stubTrue , _ . constant ( [ ] ) ]
] ) ( data ) ;
// Spec V1 fields
_ . set ( char , 'name' , data . ch _name ) ;
_ . set ( char , 'description' , data . description || '' ) ;
_ . set ( char , 'personality' , data . personality || '' ) ;
_ . set ( char , 'scenario' , data . scenario || '' ) ;
_ . set ( char , 'first_mes' , data . first _mes || '' ) ;
_ . set ( char , 'mes_example' , data . mes _example || '' ) ;
// Old ST extension fields (for backward compatibility, will be deprecated)
_ . set ( char , 'creatorcomment' , data . creator _notes ) ;
_ . set ( char , 'avatar' , 'none' ) ;
_ . set ( char , 'chat' , data . ch _name + ' - ' + humanizedISO8601DateTime ( ) ) ;
_ . set ( char , 'talkativeness' , data . talkativeness ) ;
_ . set ( char , 'fav' , data . fav == 'true' ) ;
_ . set ( char , 'create_date' , humanizedISO8601DateTime ( ) ) ;
// Spec V2 fields
_ . set ( char , 'spec' , 'chara_card_v2' ) ;
_ . set ( char , 'spec_version' , '2.0' ) ;
_ . set ( char , 'data.name' , data . ch _name ) ;
_ . set ( char , 'data.description' , data . description || '' ) ;
_ . set ( char , 'data.personality' , data . personality || '' ) ;
_ . set ( char , 'data.scenario' , data . scenario || '' ) ;
_ . set ( char , 'data.first_mes' , data . first _mes || '' ) ;
_ . set ( char , 'data.mes_example' , data . mes _example || '' ) ;
// New V2 fields
_ . set ( char , 'data.creator_notes' , data . creator _notes || '' ) ;
_ . set ( char , 'data.system_prompt' , data . system _prompt || '' ) ;
_ . set ( char , 'data.post_history_instructions' , data . post _history _instructions || '' ) ;
_ . set ( char , 'data.tags' , typeof data . tags == 'string' ? ( data . tags . split ( ',' ) . map ( x => x . trim ( ) ) . filter ( x => x ) ) : data . tags || [ ] ) ;
_ . set ( char , 'data.creator' , data . creator || '' ) ;
_ . set ( char , 'data.character_version' , data . character _version || '' ) ;
_ . set ( char , 'data.alternate_greetings' , getAlternateGreetings ( data ) ) ;
// ST extension fields to V2 object
_ . set ( char , 'data.extensions.talkativeness' , data . talkativeness ) ;
_ . set ( char , 'data.extensions.fav' , data . fav == 'true' ) ;
_ . set ( char , 'data.extensions.world' , data . world || '' ) ;
//_.set(char, 'data.extensions.create_date', humanizedISO8601DateTime());
//_.set(char, 'data.extensions.avatar', 'none');
//_.set(char, 'data.extensions.chat', data.ch_name + ' - ' + humanizedISO8601DateTime());
if ( data . world ) {
try {
const file = readWorldInfoFile ( data . world ) ;
// File was imported - save it to the character book
if ( file && file . originalData ) {
_ . set ( char , 'data.character_book' , file . originalData ) ;
}
// File was not imported - convert the world info to the character book
if ( file && file . entries ) {
_ . set ( char , 'data.character_book' , convertWorldInfoToCharacterBook ( data . world , file . entries ) ) ;
}
} catch {
console . debug ( ` Failed to read world info file: ${ data . world } . Character book will not be available. ` ) ;
}
}
return char ;
}
app . post ( "/createcharacter" , urlencodedParser , function ( request , response ) {
if ( ! request . body ) return response . sendStatus ( 400 ) ;
request . body . ch _name = sanitize ( request . body . ch _name ) ;
const char = JSON . stringify ( charaFormatData ( request . body ) ) ;
const internalName = getPngName ( request . body . ch _name ) ;
const avatarName = ` ${ internalName } .png ` ;
const defaultAvatar = './public/img/ai4.png' ;
const chatsPath = directories . chats + internalName ; //path.join(chatsPath, internalName);
if ( ! fs . existsSync ( chatsPath ) ) fs . mkdirSync ( chatsPath ) ;
if ( ! request . file ) {
charaWrite ( defaultAvatar , char , internalName , response , avatarName ) ;
} else {
const crop = tryParse ( request . query . crop ) ;
const uploadPath = path . join ( "./uploads/" , request . file . filename ) ;
charaWrite ( uploadPath , char , internalName , response , avatarName , crop ) ;
}
} ) ;
app . post ( '/renamechat' , jsonParser , async function ( request , response ) {
if ( ! request . body || ! request . body . original _file || ! request . body . renamed _file ) {
return response . sendStatus ( 400 ) ;
}
const pathToFolder = request . body . is _group
? directories . groupChats
: path . join ( directories . chats , String ( request . body . avatar _url ) . replace ( '.png' , '' ) ) ;
const pathToOriginalFile = path . join ( pathToFolder , request . body . original _file ) ;
const pathToRenamedFile = path . join ( pathToFolder , request . body . renamed _file ) ;
console . log ( 'Old chat name' , pathToOriginalFile ) ;
console . log ( 'New chat name' , pathToRenamedFile ) ;
if ( ! fs . existsSync ( pathToOriginalFile ) || fs . existsSync ( pathToRenamedFile ) ) {
console . log ( 'Either Source or Destination files are not available' ) ;
return response . status ( 400 ) . send ( { error : true } ) ;
}
console . log ( 'Successfully renamed.' ) ;
fs . renameSync ( pathToOriginalFile , pathToRenamedFile ) ;
return response . send ( { ok : true } ) ;
} ) ;
app . post ( "/renamecharacter" , jsonParser , async function ( request , response ) {
if ( ! request . body . avatar _url || ! request . body . new _name ) {
return response . sendStatus ( 400 ) ;
}
const oldAvatarName = request . body . avatar _url ;
const newName = sanitize ( request . body . new _name ) ;
const oldInternalName = path . parse ( request . body . avatar _url ) . name ;
const newInternalName = getPngName ( newName ) ;
const newAvatarName = ` ${ newInternalName } .png ` ;
const oldAvatarPath = path . join ( charactersPath , oldAvatarName ) ;
const oldChatsPath = path . join ( chatsPath , oldInternalName ) ;
const newChatsPath = path . join ( chatsPath , newInternalName ) ;
try {
const _ = require ( 'lodash' ) ;
// Read old file, replace name int it
const rawOldData = await charaRead ( oldAvatarPath ) ;
const oldData = getCharaCardV2 ( json5 . parse ( rawOldData ) ) ;
_ . set ( oldData , 'data.name' , newName ) ;
_ . set ( oldData , 'name' , newName ) ;
const newData = JSON . stringify ( oldData ) ;
// Write data to new location
await charaWrite ( oldAvatarPath , newData , newInternalName ) ;
// Rename chats folder
if ( fs . existsSync ( oldChatsPath ) && ! fs . existsSync ( newChatsPath ) ) {
fs . renameSync ( oldChatsPath , newChatsPath ) ;
}
// Remove the old character file
fs . rmSync ( oldAvatarPath ) ;
// Return new avatar name to ST
return response . send ( { 'avatar' : newAvatarName } ) ;
}
catch ( err ) {
console . error ( err ) ;
return response . sendStatus ( 500 ) ;
}
} ) ;
app . post ( "/editcharacter" , urlencodedParser , async function ( request , response ) {
if ( ! request . body ) {
console . error ( 'Error: no response body detected' ) ;
response . status ( 400 ) . send ( 'Error: no response body detected' ) ;
return ;
}
if ( request . body . ch _name === '' || request . body . ch _name === undefined || request . body . ch _name === '.' ) {
console . error ( 'Error: invalid name.' ) ;
response . status ( 400 ) . send ( 'Error: invalid name.' ) ;
return ;
}
let char = charaFormatData ( request . body ) ;
char . chat = request . body . chat ;
char . create _date = request . body . create _date ;
char = JSON . stringify ( char ) ;
let target _img = ( request . body . avatar _url ) . replace ( '.png' , '' ) ;
try {
if ( ! request . file ) {
const avatarPath = path . join ( charactersPath , request . body . avatar _url ) ;
await charaWrite ( avatarPath , char , target _img , response , 'Character saved' ) ;
} else {
const crop = tryParse ( request . query . crop ) ;
const newAvatarPath = path . join ( "./uploads/" , request . file . filename ) ;
invalidateThumbnail ( 'avatar' , request . body . avatar _url ) ;
await charaWrite ( newAvatarPath , char , target _img , response , 'Character saved' , crop ) ;
}
}
catch {
console . error ( 'An error occured, character edit invalidated.' ) ;
}
} ) ;
/ * *
* Handle a POST request to edit a character attribute .
*
* This function reads the character data from a file , updates the specified attribute ,
* and writes the updated data back to the file .
*
* @ param { Object } request - The HTTP request object .
* @ param { Object } response - The HTTP response object .
* @ returns { void }
* /
app . post ( "/editcharacterattribute" , jsonParser , async function ( request , response ) {
console . log ( request . body ) ;
if ( ! request . body ) {
console . error ( 'Error: no response body detected' ) ;
response . status ( 400 ) . send ( 'Error: no response body detected' ) ;
return ;
}
if ( request . body . ch _name === '' || request . body . ch _name === undefined || request . body . ch _name === '.' ) {
console . error ( 'Error: invalid name.' ) ;
response . status ( 400 ) . send ( 'Error: invalid name.' ) ;
return ;
}
try {
const avatarPath = path . join ( charactersPath , request . body . avatar _url ) ;
charaRead ( avatarPath ) . then ( ( char ) => {
char = JSON . parse ( char ) ;
//check if the field exists
if ( char [ request . body . field ] === undefined && char . data [ request . body . field ] === undefined ) {
console . error ( 'Error: invalid field.' ) ;
response . status ( 400 ) . send ( 'Error: invalid field.' ) ;
return ;
}
char [ request . body . field ] = request . body . value ;
char . data [ request . body . field ] = request . body . value ;
char = JSON . stringify ( char ) ;
return { char } ;
} ) . then ( ( { char } ) => {
charaWrite ( avatarPath , char , ( request . body . avatar _url ) . replace ( '.png' , '' ) , response , 'Character saved' ) ;
} ) . catch ( ( err ) => {
console . error ( 'An error occured, character edit invalidated.' , err ) ;
} ) ;
}
catch {
console . error ( 'An error occured, character edit invalidated.' ) ;
}
} ) ;
2023-07-24 21:05:27 +02:00
app . post ( "/deletecharacter" , jsonParser , async function ( request , response ) {
2023-07-20 19:32:15 +02:00
if ( ! request . body || ! request . body . avatar _url ) {
return response . sendStatus ( 400 ) ;
}
if ( request . body . avatar _url !== sanitize ( request . body . avatar _url ) ) {
console . error ( 'Malicious filename prevented' ) ;
return response . sendStatus ( 403 ) ;
}
const avatarPath = charactersPath + request . body . avatar _url ;
if ( ! fs . existsSync ( avatarPath ) ) {
return response . sendStatus ( 400 ) ;
}
fs . rmSync ( avatarPath ) ;
invalidateThumbnail ( 'avatar' , request . body . avatar _url ) ;
let dir _name = ( request . body . avatar _url . replace ( '.png' , '' ) ) ;
if ( ! dir _name . length ) {
console . error ( 'Malicious dirname prevented' ) ;
return response . sendStatus ( 403 ) ;
}
2023-08-04 13:41:00 +02:00
if ( request . body . delete _chats == true ) {
2023-07-20 19:32:15 +02:00
try {
await fs . promises . rm ( path . join ( chatsPath , sanitize ( dir _name ) ) , { recursive : true , force : true } )
} catch ( err ) {
console . error ( err ) ;
return response . sendStatus ( 500 ) ;
}
}
return response . sendStatus ( 200 ) ;
} ) ;
async function charaWrite ( img _url , data , target _img , response = undefined , mes = 'ok' , crop = undefined ) {
try {
// Read the image, resize, and save it as a PNG into the buffer
const image = await tryReadImage ( img _url , crop ) ;
// Get the chunks
const chunks = extract ( image ) ;
const tEXtChunks = chunks . filter ( chunk => chunk . create _date === 'tEXt' || chunk . name === 'tEXt' ) ;
// Remove all existing tEXt chunks
for ( let tEXtChunk of tEXtChunks ) {
chunks . splice ( chunks . indexOf ( tEXtChunk ) , 1 ) ;
}
// Add new chunks before the IEND chunk
const base64EncodedData = Buffer . from ( data , 'utf8' ) . toString ( 'base64' ) ;
chunks . splice ( - 1 , 0 , PNGtext . encode ( 'chara' , base64EncodedData ) ) ;
//chunks.splice(-1, 0, text.encode('lorem', 'ipsum'));
fs . writeFileSync ( charactersPath + target _img + '.png' , new Buffer . from ( encode ( chunks ) ) ) ;
if ( response !== undefined ) response . send ( mes ) ;
return true ;
} catch ( err ) {
console . log ( err ) ;
if ( response !== undefined ) response . status ( 500 ) . send ( err ) ;
return false ;
}
}
async function tryReadImage ( img _url , crop ) {
try {
let rawImg = await jimp . read ( img _url ) ;
let final _width = rawImg . bitmap . width , final _height = rawImg . bitmap . height
// Apply crop if defined
if ( typeof crop == 'object' && [ crop . x , crop . y , crop . width , crop . height ] . every ( x => typeof x === 'number' ) ) {
rawImg = rawImg . crop ( crop . x , crop . y , crop . width , crop . height ) ;
// Apply standard resize if requested
if ( crop . want _resize ) {
final _width = AVATAR _WIDTH
final _height = AVATAR _HEIGHT
}
}
const image = await rawImg . cover ( final _width , final _height ) . getBufferAsync ( jimp . MIME _PNG ) ;
return image ;
}
// If it's an unsupported type of image (APNG) - just read the file as buffer
catch {
return fs . readFileSync ( img _url ) ;
}
}
async function charaRead ( img _url , input _format ) {
return characterCardParser . parse ( img _url , input _format ) ;
}
/ * *
* calculateChatSize - Calculates the total chat size for a given character .
*
* @ param { string } charDir The directory where the chats are stored .
* @ return { number } The total chat size .
* /
const calculateChatSize = ( charDir ) => {
let chatSize = 0 ;
let dateLastChat = 0 ;
if ( fs . existsSync ( charDir ) ) {
const chats = fs . readdirSync ( charDir ) ;
if ( Array . isArray ( chats ) && chats . length ) {
for ( const chat of chats ) {
const chatStat = fs . statSync ( path . join ( charDir , chat ) ) ;
chatSize += chatStat . size ;
dateLastChat = Math . max ( dateLastChat , chatStat . mtimeMs ) ;
}
}
}
return { chatSize , dateLastChat } ;
}
2023-07-27 22:45:25 +02:00
// Calculate the total string length of the data object
const calculateDataSize = ( data ) => {
return typeof data === 'object' ? Object . values ( data ) . reduce ( ( acc , val ) => acc + new String ( val ) . length , 0 ) : 0 ;
}
2023-07-20 19:32:15 +02:00
/ * *
* processCharacter - Process a given character , read its data and calculate its statistics .
*
* @ param { string } item The name of the character .
* @ param { number } i The index of the character in the characters list .
* @ return { Promise } A Promise that resolves when the character processing is done .
* /
const processCharacter = async ( item , i ) => {
try {
const img _data = await charaRead ( charactersPath + item ) ;
let jsonObject = getCharaCardV2 ( json5 . parse ( img _data ) ) ;
jsonObject . avatar = item ;
characters [ i ] = jsonObject ;
characters [ i ] [ 'json_data' ] = img _data ;
const charStat = fs . statSync ( path . join ( charactersPath , item ) ) ;
characters [ i ] [ 'date_added' ] = charStat . birthtimeMs ;
const char _dir = path . join ( chatsPath , item . replace ( '.png' , '' ) ) ;
const { chatSize , dateLastChat } = calculateChatSize ( char _dir ) ;
characters [ i ] [ 'chat_size' ] = chatSize ;
characters [ i ] [ 'date_last_chat' ] = dateLastChat ;
2023-07-27 22:45:25 +02:00
characters [ i ] [ 'data_size' ] = calculateDataSize ( jsonObject ? . data ) ;
2023-07-20 19:32:15 +02:00
}
catch ( err ) {
characters [ i ] = {
date _added : 0 ,
date _last _chat : 0 ,
chat _size : 0
} ;
console . log ( ` Could not process character: ${ item } ` ) ;
if ( err instanceof SyntaxError ) {
console . log ( "String [" + i + "] is not valid JSON!" ) ;
} else {
console . log ( "An unexpected error occurred: " , err ) ;
}
}
}
/ * *
* HTTP POST endpoint for the "/getcharacters" route .
*
* This endpoint is responsible for reading character files from the ` charactersPath ` directory ,
* parsing character data , calculating stats for each character and responding with the data .
* Stats are calculated only on the first run , on subsequent runs the stats are fetched from
* the ` charStats ` variable .
* The stats are calculated by the ` calculateStats ` function .
* The characters are processed by the ` processCharacter ` function .
*
* @ param { object } request The HTTP request object .
* @ param { object } response The HTTP response object .
* @ return { undefined } Does not return a value .
* /
app . post ( "/getcharacters" , jsonParser , function ( request , response ) {
fs . readdir ( charactersPath , async ( err , files ) => {
if ( err ) {
console . error ( err ) ;
return ;
}
const pngFiles = files . filter ( file => file . endsWith ( '.png' ) ) ;
characters = { } ;
let processingPromises = pngFiles . map ( ( file , index ) => processCharacter ( file , index ) ) ;
await Promise . all ( processingPromises ) ; performance . mark ( 'B' ) ;
response . send ( JSON . stringify ( characters ) ) ;
} ) ;
} ) ;
/ * *
* Handle a POST request to get the stats object
*
* This function returns the stats object that was calculated by the ` calculateStats ` function .
*
*
* @ param { Object } request - The HTTP request object .
* @ param { Object } response - The HTTP response object .
* @ returns { void }
* /
app . post ( "/getstats" , jsonParser , function ( request , response ) {
response . send ( JSON . stringify ( statsHelpers . getCharStats ( ) ) ) ;
} ) ;
/ * *
* Handle a POST request to update the stats object
*
* This function updates the stats object with the data from the request body .
*
* @ param { Object } request - The HTTP request object .
* @ param { Object } response - The HTTP response object .
* @ returns { void }
*
* /
app . post ( "/updatestats" , jsonParser , function ( request , response ) {
if ( ! request . body ) return response . sendStatus ( 400 ) ;
statsHelpers . setCharStats ( request . body ) ;
return response . sendStatus ( 200 ) ;
} ) ;
app . post ( "/getbackgrounds" , jsonParser , function ( request , response ) {
var images = getImages ( "public/backgrounds" ) ;
response . send ( JSON . stringify ( images ) ) ;
} ) ;
app . post ( "/iscolab" , jsonParser , function ( request , response ) {
let send _data = false ;
if ( is _colab ) {
send _data = String ( process . env . colaburl ) . trim ( ) ;
}
response . send ( { colaburl : send _data } ) ;
} ) ;
app . post ( "/getuseravatars" , jsonParser , function ( request , response ) {
var images = getImages ( "public/User Avatars" ) ;
response . send ( JSON . stringify ( images ) ) ;
} ) ;
app . post ( '/deleteuseravatar' , jsonParser , function ( request , response ) {
if ( ! request . body ) return response . sendStatus ( 400 ) ;
if ( request . body . avatar !== sanitize ( request . body . avatar ) ) {
console . error ( 'Malicious avatar name prevented' ) ;
return response . sendStatus ( 403 ) ;
}
const fileName = path . join ( directories . avatars , sanitize ( request . body . avatar ) ) ;
if ( fs . existsSync ( fileName ) ) {
fs . rmSync ( fileName ) ;
return response . send ( { result : 'ok' } ) ;
}
return response . sendStatus ( 404 ) ;
} ) ;
app . post ( "/setbackground" , jsonParser , function ( request , response ) {
var bg = "#bg1 {background-image: url('../backgrounds/" + request . body . bg + "');}" ;
fs . writeFile ( 'public/css/bg_load.css' , bg , 'utf8' , function ( err ) {
if ( err ) {
response . send ( err ) ;
return console . log ( err ) ;
} else {
//response.redirect("/");
response . send ( { result : 'ok' } ) ;
}
} ) ;
} ) ;
app . post ( "/delbackground" , jsonParser , function ( request , response ) {
if ( ! request . body ) return response . sendStatus ( 400 ) ;
if ( request . body . bg !== sanitize ( request . body . bg ) ) {
console . error ( 'Malicious bg name prevented' ) ;
return response . sendStatus ( 403 ) ;
}
const fileName = path . join ( 'public/backgrounds/' , sanitize ( request . body . bg ) ) ;
if ( ! fs . existsSync ( fileName ) ) {
console . log ( 'BG file not found' ) ;
return response . sendStatus ( 400 ) ;
}
fs . rmSync ( fileName ) ;
invalidateThumbnail ( 'bg' , request . body . bg ) ;
return response . send ( 'ok' ) ;
} ) ;
app . post ( "/delchat" , jsonParser , function ( request , response ) {
console . log ( '/delchat entered' ) ;
if ( ! request . body ) {
console . log ( 'no request body seen' ) ;
return response . sendStatus ( 400 ) ;
}
if ( request . body . chatfile !== sanitize ( request . body . chatfile ) ) {
console . error ( 'Malicious chat name prevented' ) ;
return response . sendStatus ( 403 ) ;
}
const dirName = String ( request . body . avatar _url ) . replace ( '.png' , '' ) ;
const fileName = ` ${ chatsPath + dirName } / ${ sanitize ( String ( request . body . chatfile ) ) } ` ;
const chatFileExists = fs . existsSync ( fileName ) ;
if ( ! chatFileExists ) {
console . log ( ` Chat file not found ' ${ fileName } ' ` ) ;
return response . sendStatus ( 400 ) ;
} else {
console . log ( 'found the chat file: ' + fileName ) ;
/* fs.unlinkSync(fileName); */
fs . rmSync ( fileName ) ;
console . log ( 'deleted chat file: ' + fileName ) ;
}
return response . send ( 'ok' ) ;
} ) ;
app . post ( '/renamebackground' , jsonParser , function ( request , response ) {
if ( ! request . body ) return response . sendStatus ( 400 ) ;
const oldFileName = path . join ( 'public/backgrounds/' , sanitize ( request . body . old _bg ) ) ;
const newFileName = path . join ( 'public/backgrounds/' , sanitize ( request . body . new _bg ) ) ;
if ( ! fs . existsSync ( oldFileName ) ) {
console . log ( 'BG file not found' ) ;
return response . sendStatus ( 400 ) ;
}
if ( fs . existsSync ( newFileName ) ) {
console . log ( 'New BG file already exists' ) ;
return response . sendStatus ( 400 ) ;
}
fs . renameSync ( oldFileName , newFileName ) ;
invalidateThumbnail ( 'bg' , request . body . old _bg ) ;
return response . send ( 'ok' ) ;
} ) ;
app . post ( "/downloadbackground" , urlencodedParser , function ( request , response ) {
response _dw _bg = response ;
if ( ! request . body || ! request . file ) return response . sendStatus ( 400 ) ;
const img _path = path . join ( "uploads/" , request . file . filename ) ;
const filename = request . file . originalname ;
try {
fs . copyFileSync ( img _path , path . join ( 'public/backgrounds/' , filename ) ) ;
invalidateThumbnail ( 'bg' , filename ) ;
response _dw _bg . send ( filename ) ;
} catch ( err ) {
console . error ( err ) ;
response _dw _bg . sendStatus ( 500 ) ;
}
} ) ;
app . post ( "/savesettings" , jsonParser , function ( request , response ) {
fs . writeFile ( 'public/settings.json' , JSON . stringify ( request . body , null , 4 ) , 'utf8' , function ( err ) {
if ( err ) {
response . send ( err ) ;
console . log ( err ) ;
} else {
response . send ( { result : "ok" } ) ;
}
} ) ;
/ * f s . w r i t e F i l e ( ' p u b l i c / s e t t i n g s . j s o n ' , J S O N . s t r i n g i f y ( r e q u e s t . b o d y ) , ' u t f 8 ' , f u n c t i o n ( e r r ) {
if ( err ) {
response . send ( err ) ;
return console . log ( err ) ;
//response.send(err);
} else {
//response.redirect("/");
response . send ( { result : "ok" } ) ;
}
} ) ; * /
} ) ;
function getCharaCardV2 ( jsonObject ) {
if ( jsonObject . spec === undefined ) {
jsonObject = convertToV2 ( jsonObject ) ;
} else {
jsonObject = readFromV2 ( jsonObject ) ;
}
return jsonObject ;
}
function readAndParseFromDirectory ( directoryPath , fileExtension = '.json' ) {
const files = fs
. readdirSync ( directoryPath )
. filter ( x => path . parse ( x ) . ext == fileExtension )
. sort ( ) ;
const parsedFiles = [ ] ;
files . forEach ( item => {
try {
const file = fs . readFileSync ( path . join ( directoryPath , item ) , 'utf-8' ) ;
parsedFiles . push ( fileExtension == '.json' ? json5 . parse ( file ) : file ) ;
}
catch {
// skip
}
} ) ;
return parsedFiles ;
}
function sortByModifiedDate ( directory ) {
return ( a , b ) => new Date ( fs . statSync ( ` ${ directory } / ${ b } ` ) . mtime ) - new Date ( fs . statSync ( ` ${ directory } / ${ a } ` ) . mtime ) ;
}
function sortByName ( _ ) {
return ( a , b ) => a . localeCompare ( b ) ;
}
function readPresetsFromDirectory ( directoryPath , options = { } ) {
const {
sortFunction ,
removeFileExtension = false
} = options ;
const files = fs . readdirSync ( directoryPath ) . sort ( sortFunction ) ;
const fileContents = [ ] ;
const fileNames = [ ] ;
files . forEach ( item => {
try {
const file = fs . readFileSync ( path . join ( directoryPath , item ) , 'utf8' ) ;
json5 . parse ( file ) ;
fileContents . push ( file ) ;
fileNames . push ( removeFileExtension ? item . replace ( /\.[^/.]+$/ , '' ) : item ) ;
} catch {
// skip
console . log ( ` ${ item } is not a valid JSON ` ) ;
}
} ) ;
return { fileContents , fileNames } ;
}
// Wintermute's code
app . post ( '/getsettings' , jsonParser , ( request , response ) => {
const settings = fs . readFileSync ( 'public/settings.json' , 'utf8' , ( err , data ) => {
if ( err ) return response . sendStatus ( 500 ) ;
return data ;
} ) ;
// NovelAI Settings
const { fileContents : novelai _settings , fileNames : novelai _setting _names }
= readPresetsFromDirectory ( directories . novelAI _Settings , {
sortFunction : sortByName ( directories . novelAI _Settings ) ,
removeFileExtension : true
} ) ;
// OpenAI Settings
const { fileContents : openai _settings , fileNames : openai _setting _names }
= readPresetsFromDirectory ( directories . openAI _Settings , {
sortFunction : sortByModifiedDate ( directories . openAI _Settings ) , removeFileExtension : true
} ) ;
// TextGenerationWebUI Settings
const { fileContents : textgenerationwebui _presets , fileNames : textgenerationwebui _preset _names }
= readPresetsFromDirectory ( directories . textGen _Settings , {
sortFunction : sortByName ( directories . textGen _Settings ) , removeFileExtension : true
} ) ;
//Kobold
const { fileContents : koboldai _settings , fileNames : koboldai _setting _names }
= readPresetsFromDirectory ( directories . koboldAI _Settings , {
sortFunction : sortByName ( directories . koboldAI _Settings ) , removeFileExtension : true
} )
const worldFiles = fs
. readdirSync ( directories . worlds )
. filter ( file => path . extname ( file ) . toLowerCase ( ) === '.json' )
. sort ( ( a , b ) => a < b ) ;
const world _names = worldFiles . map ( item => path . parse ( item ) . name ) ;
const themes = readAndParseFromDirectory ( directories . themes ) ;
const movingUIPresets = readAndParseFromDirectory ( directories . movingUI ) ;
2023-07-29 23:22:03 +02:00
const quickReplyPresets = readAndParseFromDirectory ( directories . quickreplies ) ;
2023-07-20 19:32:15 +02:00
const instruct = readAndParseFromDirectory ( directories . instruct ) ;
const context = readAndParseFromDirectory ( directories . context ) ;
response . send ( {
settings ,
koboldai _settings ,
koboldai _setting _names ,
world _names ,
novelai _settings ,
novelai _setting _names ,
openai _settings ,
openai _setting _names ,
textgenerationwebui _presets ,
textgenerationwebui _preset _names ,
themes ,
movingUIPresets ,
2023-07-29 23:22:03 +02:00
quickReplyPresets ,
2023-07-20 19:32:15 +02:00
instruct ,
context ,
enable _extensions : enableExtensions ,
} ) ;
} ) ;
app . post ( '/getworldinfo' , jsonParser , ( request , response ) => {
if ( ! request . body ? . name ) {
return response . sendStatus ( 400 ) ;
}
const file = readWorldInfoFile ( request . body . name ) ;
return response . send ( file ) ;
} ) ;
app . post ( '/deleteworldinfo' , jsonParser , ( request , response ) => {
if ( ! request . body ? . name ) {
return response . sendStatus ( 400 ) ;
}
const worldInfoName = request . body . name ;
const filename = sanitize ( ` ${ worldInfoName } .json ` ) ;
const pathToWorldInfo = path . join ( directories . worlds , filename ) ;
if ( ! fs . existsSync ( pathToWorldInfo ) ) {
throw new Error ( ` World info file ${ filename } doesn't exist. ` ) ;
}
fs . rmSync ( pathToWorldInfo ) ;
return response . sendStatus ( 200 ) ;
} ) ;
app . post ( '/savetheme' , jsonParser , ( request , response ) => {
if ( ! request . body || ! request . body . name ) {
return response . sendStatus ( 400 ) ;
}
const filename = path . join ( directories . themes , sanitize ( request . body . name ) + '.json' ) ;
fs . writeFileSync ( filename , JSON . stringify ( request . body , null , 4 ) , 'utf8' ) ;
return response . sendStatus ( 200 ) ;
} ) ;
app . post ( '/savemovingui' , jsonParser , ( request , response ) => {
if ( ! request . body || ! request . body . name ) {
return response . sendStatus ( 400 ) ;
}
const filename = path . join ( directories . movingUI , sanitize ( request . body . name ) + '.json' ) ;
fs . writeFileSync ( filename , JSON . stringify ( request . body , null , 4 ) , 'utf8' ) ;
return response . sendStatus ( 200 ) ;
} ) ;
2023-07-29 23:22:03 +02:00
app . post ( '/savequickreply' , jsonParser , ( request , response ) => {
if ( ! request . body || ! request . body . name ) {
return response . sendStatus ( 400 ) ;
}
const filename = path . join ( directories . quickreplies , sanitize ( request . body . name ) + '.json' ) ;
fs . writeFileSync ( filename , JSON . stringify ( request . body , null , 4 ) , 'utf8' ) ;
return response . sendStatus ( 200 ) ;
} ) ;
2023-07-20 19:32:15 +02:00
function convertWorldInfoToCharacterBook ( name , entries ) {
const result = { entries : [ ] , name } ;
for ( const index in entries ) {
const entry = entries [ index ] ;
const originalEntry = {
id : entry . uid ,
keys : entry . key ,
secondary _keys : entry . keysecondary ,
comment : entry . comment ,
content : entry . content ,
constant : entry . constant ,
selective : entry . selective ,
insertion _order : entry . order ,
enabled : ! entry . disable ,
position : entry . position == 0 ? 'before_char' : 'after_char' ,
extensions : {
position : entry . position ,
exclude _recursion : entry . excludeRecursion ,
display _index : entry . displayIndex ,
probability : entry . probability ? ? null ,
useProbability : entry . useProbability ? ? false ,
}
} ;
result . entries . push ( originalEntry ) ;
}
return result ;
}
function readWorldInfoFile ( worldInfoName ) {
if ( ! worldInfoName ) {
return { entries : { } } ;
}
const filename = ` ${ worldInfoName } .json ` ;
const pathToWorldInfo = path . join ( directories . worlds , filename ) ;
if ( ! fs . existsSync ( pathToWorldInfo ) ) {
throw new Error ( ` World info file ${ filename } doesn't exist. ` ) ;
}
const worldInfoText = fs . readFileSync ( pathToWorldInfo , 'utf8' ) ;
const worldInfo = json5 . parse ( worldInfoText ) ;
return worldInfo ;
}
function getImages ( path ) {
return fs
. readdirSync ( path )
. filter ( file => {
const type = mime . lookup ( file ) ;
return type && type . startsWith ( 'image/' ) ;
} )
. sort ( Intl . Collator ( ) . compare ) ;
}
//***********Novel.ai API
app . post ( "/getstatus_novelai" , jsonParser , function ( request , response _getstatus _novel = response ) {
if ( ! request . body ) return response _getstatus _novel . sendStatus ( 400 ) ;
const api _key _novel = readSecret ( SECRET _KEYS . NOVEL ) ;
if ( ! api _key _novel ) {
2023-08-05 20:15:49 +02:00
return response _getstatus _novel . sendStatus ( 401 ) ;
2023-07-20 19:32:15 +02:00
}
var data = { } ;
var args = {
data : data ,
headers : { "Content-Type" : "application/json" , "Authorization" : "Bearer " + api _key _novel }
} ;
client . get ( api _novelai + "/user/subscription" , args , function ( data , response ) {
if ( response . statusCode == 200 ) {
//console.log(data);
response _getstatus _novel . send ( data ) ; //data);
}
2023-08-05 20:15:49 +02:00
else {
if ( response . statusCode == 401 ) {
console . log ( 'Access Token is incorrect.' ) ;
}
2023-07-20 19:32:15 +02:00
console . log ( data ) ;
response _getstatus _novel . send ( { error : true } ) ;
}
} ) . on ( 'error' , function ( ) {
response _getstatus _novel . send ( { error : true } ) ;
} ) ;
} ) ;
app . post ( "/generate_novelai" , jsonParser , async function ( request , response _generate _novel = response ) {
if ( ! request . body ) return response _generate _novel . sendStatus ( 400 ) ;
const api _key _novel = readSecret ( SECRET _KEYS . NOVEL ) ;
if ( ! api _key _novel ) {
return response _generate _novel . sendStatus ( 401 ) ;
}
const controller = new AbortController ( ) ;
request . socket . removeAllListeners ( 'close' ) ;
request . socket . on ( 'close' , function ( ) {
controller . abort ( ) ;
} ) ;
2023-07-30 23:14:17 +02:00
const novelai = require ( './src/novelai' ) ;
2023-07-29 07:56:31 +02:00
const isNewModel = ( request . body . model . includes ( 'clio' ) || request . body . model . includes ( 'kayra' ) ) ;
2023-07-30 21:37:48 +02:00
const isKrake = request . body . model . includes ( 'krake' ) ;
2023-07-20 19:32:15 +02:00
const data = {
"input" : request . body . input ,
"model" : request . body . model ,
"parameters" : {
"use_string" : request . body . use _string ,
"temperature" : request . body . temperature ,
"max_length" : request . body . max _length ,
"min_length" : request . body . min _length ,
"tail_free_sampling" : request . body . tail _free _sampling ,
"repetition_penalty" : request . body . repetition _penalty ,
"repetition_penalty_range" : request . body . repetition _penalty _range ,
"repetition_penalty_slope" : request . body . repetition _penalty _slope ,
"repetition_penalty_frequency" : request . body . repetition _penalty _frequency ,
"repetition_penalty_presence" : request . body . repetition _penalty _presence ,
2023-07-30 23:14:17 +02:00
"repetition_penalty_whitelist" : isNewModel ? novelai . repPenaltyAllowList : null ,
2023-07-20 19:32:15 +02:00
"top_a" : request . body . top _a ,
"top_p" : request . body . top _p ,
"top_k" : request . body . top _k ,
"typical_p" : request . body . typical _p ,
2023-07-29 01:23:10 +02:00
"cfg_scale" : request . body . cfg _scale ,
"cfg_uc" : request . body . cfg _uc ,
"phrase_rep_pen" : request . body . phrase _rep _pen ,
2023-08-02 21:35:05 +02:00
"stop_sequences" : request . body . stop _sequences ,
2023-07-20 19:32:15 +02:00
//"stop_sequences": {{187}},
2023-07-30 23:14:17 +02:00
"bad_words_ids" : isNewModel ? novelai . badWordsList : ( isKrake ? novelai . krakeBadWordsList : novelai . euterpeBadWordsList ) ,
"logit_bias_exp" : isNewModel ? novelai . logitBiasExp : null ,
2023-07-20 19:32:15 +02:00
//generate_until_sentence = true;
"use_cache" : request . body . use _cache ,
"use_string" : true ,
"return_full_text" : request . body . return _full _text ,
"prefix" : request . body . prefix ,
"order" : request . body . order
}
} ;
2023-07-30 21:37:48 +02:00
const util = require ( 'util' ) ;
console . log ( util . inspect ( data , { depth : 4 } ) )
2023-07-20 19:32:15 +02:00
const args = {
body : JSON . stringify ( data ) ,
headers : { "Content-Type" : "application/json" , "Authorization" : "Bearer " + api _key _novel } ,
signal : controller . signal ,
} ;
try {
const fetch = require ( 'node-fetch' ) . default ;
const url = request . body . streaming ? ` ${ api _novelai } /ai/generate-stream ` : ` ${ api _novelai } /ai/generate ` ;
const response = await fetch ( url , { method : 'POST' , timeout : 0 , ... args } ) ;
if ( request . body . streaming ) {
// Pipe remote SSE stream to Express response
response . body . pipe ( response _generate _novel ) ;
request . socket . on ( 'close' , function ( ) {
response . body . destroy ( ) ; // Close the remote stream
response _generate _novel . end ( ) ; // End the Express response
} ) ;
response . body . on ( 'end' , function ( ) {
console . log ( "Streaming request finished" ) ;
response _generate _novel . end ( ) ;
} ) ;
} else {
if ( ! response . ok ) {
2023-08-05 20:15:49 +02:00
const text = await response . text ( ) ;
let message = text ;
console . log ( ` Novel API returned error: ${ response . status } ${ response . statusText } ${ text } ` ) ;
try {
const data = JSON . parse ( text ) ;
message = data . message ;
}
catch {
// ignore
}
return response _generate _novel . status ( response . status ) . send ( { error : { message } } ) ;
2023-07-20 19:32:15 +02:00
}
const data = await response . json ( ) ;
return response _generate _novel . send ( data ) ;
}
} catch ( error ) {
return response _generate _novel . send ( { error : true } ) ;
}
} ) ;
app . post ( "/getallchatsofcharacter" , jsonParser , function ( request , response ) {
if ( ! request . body ) return response . sendStatus ( 400 ) ;
var char _dir = ( request . body . avatar _url ) . replace ( '.png' , '' )
fs . readdir ( chatsPath + char _dir , ( err , files ) => {
if ( err ) {
console . log ( 'found error in history loading' ) ;
console . error ( err ) ;
response . send ( { error : true } ) ;
return ;
}
// filter for JSON files
const jsonFiles = files . filter ( file => path . extname ( file ) === '.jsonl' ) ;
// sort the files by name
//jsonFiles.sort().reverse();
// print the sorted file names
var chatData = { } ;
let ii = jsonFiles . length ; //this is the number of files belonging to the character
if ( ii !== 0 ) {
//console.log('found '+ii+' chat logs to load');
for ( let i = jsonFiles . length - 1 ; i >= 0 ; i -- ) {
const file = jsonFiles [ i ] ;
const fileStream = fs . createReadStream ( chatsPath + char _dir + '/' + file ) ;
const fullPathAndFile = chatsPath + char _dir + '/' + file
const stats = fs . statSync ( fullPathAndFile ) ;
const fileSizeInKB = ( stats . size / 1024 ) . toFixed ( 2 ) + "kb" ;
//console.log(fileSizeInKB);
const rl = readline . createInterface ( {
input : fileStream ,
crlfDelay : Infinity
} ) ;
let lastLine ;
let itemCounter = 0 ;
rl . on ( 'line' , ( line ) => {
itemCounter ++ ;
lastLine = line ;
} ) ;
rl . on ( 'close' , ( ) => {
ii -- ;
if ( lastLine ) {
let jsonData = json5 . parse ( lastLine ) ;
if ( jsonData . name !== undefined || jsonData . character _name !== undefined ) {
chatData [ i ] = { } ;
chatData [ i ] [ 'file_name' ] = file ;
chatData [ i ] [ 'file_size' ] = fileSizeInKB ;
chatData [ i ] [ 'chat_items' ] = itemCounter - 1 ;
chatData [ i ] [ 'mes' ] = jsonData [ 'mes' ] || '[The chat is empty]' ;
chatData [ i ] [ 'last_mes' ] = jsonData [ 'send_date' ] || Date . now ( ) ;
}
}
if ( ii === 0 ) {
//console.log('ii count went to zero, responding with chatData');
response . send ( chatData ) ;
}
//console.log('successfully closing getallchatsofcharacter');
rl . close ( ) ;
} ) ;
} ;
} else {
//console.log('Found No Chats. Exiting Load Routine.');
response . send ( { error : true } ) ;
} ;
} )
} ) ;
function getPngName ( file ) {
let i = 1 ;
let base _name = file ;
while ( fs . existsSync ( charactersPath + file + '.png' ) ) {
file = base _name + i ;
i ++ ;
}
return file ;
}
app . post ( "/importcharacter" , urlencodedParser , async function ( request , response ) {
if ( ! request . body ) return response . sendStatus ( 400 ) ;
let png _name = '' ;
let filedata = request . file ;
let uploadPath = path . join ( './uploads' , filedata . filename ) ;
var format = request . body . file _type ;
const defaultAvatarPath = './public/img/ai4.png' ;
//console.log(format);
if ( filedata ) {
if ( format == 'json' ) {
fs . readFile ( uploadPath , 'utf8' , async ( err , data ) => {
if ( err ) {
console . log ( err ) ;
response . send ( { error : true } ) ;
}
let jsonData = json5 . parse ( data ) ;
if ( jsonData . spec !== undefined ) {
console . log ( 'importing from v2 json' ) ;
importRisuSprites ( jsonData ) ;
unsetFavFlag ( jsonData ) ;
jsonData = readFromV2 ( jsonData ) ;
png _name = getPngName ( jsonData . data ? . name || jsonData . name ) ;
let char = JSON . stringify ( jsonData ) ;
charaWrite ( defaultAvatarPath , char , png _name , response , { file _name : png _name } ) ;
} else if ( jsonData . name !== undefined ) {
console . log ( 'importing from v1 json' ) ;
jsonData . name = sanitize ( jsonData . name ) ;
if ( jsonData . creator _notes ) {
jsonData . creator _notes = jsonData . creator _notes . replace ( "Creator's notes go here." , "" ) ;
}
png _name = getPngName ( jsonData . name ) ;
let char = {
"name" : jsonData . name ,
"description" : jsonData . description ? ? '' ,
"creatorcomment" : jsonData . creatorcomment ? ? jsonData . creator _notes ? ? '' ,
"personality" : jsonData . personality ? ? '' ,
"first_mes" : jsonData . first _mes ? ? '' ,
"avatar" : 'none' ,
"chat" : jsonData . name + " - " + humanizedISO8601DateTime ( ) ,
"mes_example" : jsonData . mes _example ? ? '' ,
"scenario" : jsonData . scenario ? ? '' ,
"create_date" : humanizedISO8601DateTime ( ) ,
"talkativeness" : jsonData . talkativeness ? ? 0.5 ,
"creator" : jsonData . creator ? ? '' ,
"tags" : jsonData . tags ? ? '' ,
} ;
char = convertToV2 ( char ) ;
char = JSON . stringify ( char ) ;
charaWrite ( defaultAvatarPath , char , png _name , response , { file _name : png _name } ) ;
} else if ( jsonData . char _name !== undefined ) { //json Pygmalion notepad
console . log ( 'importing from gradio json' ) ;
jsonData . char _name = sanitize ( jsonData . char _name ) ;
if ( jsonData . creator _notes ) {
jsonData . creator _notes = jsonData . creator _notes . replace ( "Creator's notes go here." , "" ) ;
}
png _name = getPngName ( jsonData . char _name ) ;
let char = {
"name" : jsonData . char _name ,
"description" : jsonData . char _persona ? ? '' ,
"creatorcomment" : jsonData . creatorcomment ? ? jsonData . creator _notes ? ? '' ,
"personality" : '' ,
"first_mes" : jsonData . char _greeting ? ? '' ,
"avatar" : 'none' ,
"chat" : jsonData . name + " - " + humanizedISO8601DateTime ( ) ,
"mes_example" : jsonData . example _dialogue ? ? '' ,
"scenario" : jsonData . world _scenario ? ? '' ,
"create_date" : humanizedISO8601DateTime ( ) ,
"talkativeness" : jsonData . talkativeness ? ? 0.5 ,
"creator" : jsonData . creator ? ? '' ,
"tags" : jsonData . tags ? ? '' ,
} ;
char = convertToV2 ( char ) ;
char = JSON . stringify ( char ) ;
charaWrite ( defaultAvatarPath , char , png _name , response , { file _name : png _name } ) ;
} else {
console . log ( 'Incorrect character format .json' ) ;
response . send ( { error : true } ) ;
}
} ) ;
} else {
try {
var img _data = await charaRead ( uploadPath , format ) ;
let jsonData = json5 . parse ( img _data ) ;
jsonData . name = sanitize ( jsonData . data ? . name || jsonData . name ) ;
png _name = getPngName ( jsonData . name ) ;
if ( format == 'webp' ) {
try {
let convertedPath = path . join ( './uploads' , path . basename ( uploadPath , ".webp" ) + ".png" )
await webp . dwebp ( uploadPath , convertedPath , "-o" ) ;
uploadPath = convertedPath ;
}
catch {
console . error ( 'WEBP image conversion failed. Using the default character image.' ) ;
uploadPath = defaultAvatarPath ;
}
}
if ( jsonData . spec !== undefined ) {
console . log ( 'Found a v2 character file.' ) ;
importRisuSprites ( jsonData ) ;
unsetFavFlag ( jsonData ) ;
jsonData = readFromV2 ( jsonData ) ;
let char = JSON . stringify ( jsonData ) ;
charaWrite ( uploadPath , char , png _name , response , { file _name : png _name } ) ;
} else if ( jsonData . name !== undefined ) {
console . log ( 'Found a v1 character file.' ) ;
if ( jsonData . creator _notes ) {
jsonData . creator _notes = jsonData . creator _notes . replace ( "Creator's notes go here." , "" ) ;
}
let char = {
"name" : jsonData . name ,
"description" : jsonData . description ? ? '' ,
"creatorcomment" : jsonData . creatorcomment ? ? jsonData . creator _notes ? ? '' ,
"personality" : jsonData . personality ? ? '' ,
"first_mes" : jsonData . first _mes ? ? '' ,
"avatar" : 'none' ,
"chat" : jsonData . name + " - " + humanizedISO8601DateTime ( ) ,
"mes_example" : jsonData . mes _example ? ? '' ,
"scenario" : jsonData . scenario ? ? '' ,
"create_date" : humanizedISO8601DateTime ( ) ,
"talkativeness" : jsonData . talkativeness ? ? 0.5 ,
"creator" : jsonData . creator ? ? '' ,
"tags" : jsonData . tags ? ? '' ,
} ;
char = convertToV2 ( char ) ;
char = JSON . stringify ( char ) ;
await charaWrite ( uploadPath , char , png _name , response , { file _name : png _name } ) ;
} else {
console . log ( 'Unknown character card format' ) ;
response . send ( { error : true } ) ;
}
} catch ( err ) {
console . log ( err ) ;
response . send ( { error : true } ) ;
}
}
}
} ) ;
app . post ( "/dupecharacter" , jsonParser , async function ( request , response ) {
try {
if ( ! request . body . avatar _url ) {
console . log ( "avatar URL not found in request body" ) ;
console . log ( request . body ) ;
return response . sendStatus ( 400 ) ;
}
let filename = path . join ( directories . characters , sanitize ( request . body . avatar _url ) ) ;
if ( ! fs . existsSync ( filename ) ) {
console . log ( 'file for dupe not found' ) ;
console . log ( filename ) ;
return response . sendStatus ( 404 ) ;
}
let suffix = 1 ;
let newFilename = filename ;
while ( fs . existsSync ( newFilename ) ) {
let suffixStr = "_" + suffix ;
let ext = path . extname ( filename ) ;
newFilename = filename . slice ( 0 , - ext . length ) + suffixStr + ext ;
suffix ++ ;
}
fs . copyFile ( filename , newFilename , ( err ) => {
if ( err ) throw err ;
console . log ( ` ${ filename } was copied to ${ newFilename } ` ) ;
response . sendStatus ( 200 ) ;
} ) ;
}
catch ( error ) {
console . error ( error ) ;
return response . send ( { error : true } ) ;
}
} ) ;
app . post ( "/exportchat" , jsonParser , async function ( request , response ) {
if ( ! request . body . file || ( ! request . body . avatar _url && request . body . is _group === false ) ) {
return response . sendStatus ( 400 ) ;
}
const pathToFolder = request . body . is _group
? directories . groupChats
: path . join ( directories . chats , String ( request . body . avatar _url ) . replace ( '.png' , '' ) ) ;
let filename = path . join ( pathToFolder , request . body . file ) ;
let exportfilename = request . body . exportfilename
if ( ! fs . existsSync ( filename ) ) {
const errorMessage = {
message : ` Could not find JSONL file to export. Source chat file: ${ filename } . `
}
console . log ( errorMessage . message ) ;
return response . status ( 404 ) . json ( errorMessage ) ;
}
try {
// Short path for JSONL files
if ( request . body . format == 'jsonl' ) {
try {
const rawFile = fs . readFileSync ( filename , 'utf8' ) ;
const successMessage = {
message : ` Chat saved to ${ exportfilename } ` ,
result : rawFile ,
}
console . log ( ` Chat exported as ${ exportfilename } ` ) ;
return response . status ( 200 ) . json ( successMessage ) ;
}
catch ( err ) {
console . error ( err ) ;
const errorMessage = {
message : ` Could not read JSONL file to export. Source chat file: ${ filename } . `
}
console . log ( errorMessage . message ) ;
return response . status ( 500 ) . json ( errorMessage ) ;
}
}
const readline = require ( 'readline' ) ;
const readStream = fs . createReadStream ( filename ) ;
const rl = readline . createInterface ( {
input : readStream ,
} ) ;
let buffer = '' ;
rl . on ( 'line' , ( line ) => {
const data = JSON . parse ( line ) ;
if ( data . mes ) {
const name = data . name ;
const message = ( data ? . extra ? . display _text || data ? . mes || '' ) . replace ( /\r?\n/g , '\n' ) ;
buffer += ( ` ${ name } : ${ message } \n \n ` ) ;
}
} ) ;
rl . on ( 'close' , ( ) => {
const successMessage = {
message : ` Chat saved to ${ exportfilename } ` ,
result : buffer ,
}
console . log ( ` Chat exported as ${ exportfilename } ` ) ;
return response . status ( 200 ) . json ( successMessage ) ;
} ) ;
}
catch ( err ) {
console . log ( "chat export failed." )
console . log ( err ) ;
return response . sendStatus ( 400 ) ;
}
} )
app . post ( "/exportcharacter" , jsonParser , async function ( request , response ) {
if ( ! request . body . format || ! request . body . avatar _url ) {
return response . sendStatus ( 400 ) ;
}
let filename = path . join ( directories . characters , sanitize ( request . body . avatar _url ) ) ;
if ( ! fs . existsSync ( filename ) ) {
return response . sendStatus ( 404 ) ;
}
switch ( request . body . format ) {
case 'png' :
return response . sendFile ( filename , { root : process . cwd ( ) } ) ;
case 'json' : {
try {
let json = await charaRead ( filename ) ;
let jsonObject = getCharaCardV2 ( json5 . parse ( json ) ) ;
return response . type ( 'json' ) . send ( jsonObject )
}
catch {
return response . sendStatus ( 400 ) ;
}
}
case 'webp' : {
try {
let json = await charaRead ( filename ) ;
let stringByteArray = utf8Encode . encode ( json ) . toString ( ) ;
let inputWebpPath = ` ./uploads/ ${ Date . now ( ) } _input.webp ` ;
let outputWebpPath = ` ./uploads/ ${ Date . now ( ) } _output.webp ` ;
let metadataPath = ` ./uploads/ ${ Date . now ( ) } _metadata.exif ` ;
let metadata =
{
"Exif" : {
[ exif . ExifIFD . UserComment ] : stringByteArray ,
} ,
} ;
const exifString = exif . dump ( metadata ) ;
fs . writeFileSync ( metadataPath , exifString , 'binary' ) ;
await webp . cwebp ( filename , inputWebpPath , '-q 95' ) ;
await webp . webpmux _add ( inputWebpPath , outputWebpPath , metadataPath , 'exif' ) ;
response . sendFile ( outputWebpPath , { root : process . cwd ( ) } , ( ) => {
fs . rmSync ( inputWebpPath ) ;
fs . rmSync ( metadataPath ) ;
fs . rmSync ( outputWebpPath ) ;
} ) ;
return ;
}
catch ( err ) {
console . log ( err ) ;
return response . sendStatus ( 400 ) ;
}
}
}
return response . sendStatus ( 400 ) ;
} ) ;
app . post ( "/importgroupchat" , urlencodedParser , function ( request , response ) {
try {
const filedata = request . file ;
const chatname = humanizedISO8601DateTime ( ) ;
fs . copyFileSync ( ` ./uploads/ ${ filedata . filename } ` , ( ` ${ directories . groupChats } / ${ chatname } .jsonl ` ) ) ;
return response . send ( { res : chatname } ) ;
} catch ( error ) {
console . error ( error ) ;
return response . send ( { error : true } ) ;
}
} ) ;
app . post ( "/importchat" , urlencodedParser , function ( request , response ) {
if ( ! request . body ) return response . sendStatus ( 400 ) ;
var format = request . body . file _type ;
let filedata = request . file ;
let avatar _url = ( request . body . avatar _url ) . replace ( '.png' , '' ) ;
let ch _name = request . body . character _name ;
let user _name = request . body . user _name || 'You' ;
if ( filedata ) {
if ( format === 'json' ) {
fs . readFile ( ` ./uploads/ ${ filedata . filename } ` , 'utf8' , ( err , data ) => {
if ( err ) {
console . log ( err ) ;
response . send ( { error : true } ) ;
}
const jsonData = json5 . parse ( data ) ;
if ( jsonData . histories !== undefined ) {
//console.log('/importchat confirms JSON histories are defined');
const chat = {
from ( history ) {
return [
{
user _name : user _name ,
character _name : ch _name ,
create _date : humanizedISO8601DateTime ( ) ,
} ,
... history . msgs . map (
( message ) => ( {
name : message . src . is _human ? user _name : ch _name ,
is _user : message . src . is _human ,
is _name : true ,
send _date : humanizedISO8601DateTime ( ) ,
mes : message . text ,
} )
) ] ;
}
}
const newChats = [ ] ;
( jsonData . histories . histories ? ? [ ] ) . forEach ( ( history ) => {
newChats . push ( chat . from ( history ) ) ;
} ) ;
const errors = [ ] ;
newChats . forEach ( chat => fs . writeFile (
` ${ chatsPath + avatar _url } / ${ ch _name } - ${ humanizedISO8601DateTime ( ) } imported.jsonl ` ,
chat . map ( JSON . stringify ) . join ( '\n' ) ,
'utf8' ,
( err ) => err ? ? errors . push ( err )
)
) ;
if ( 0 < errors . length ) {
response . send ( 'Errors occurred while writing character files. Errors: ' + JSON . stringify ( errors ) ) ;
}
response . send ( { res : true } ) ;
} else if ( Array . isArray ( jsonData . data _visible ) ) {
// oobabooga's format
const chat = [ {
user _name : user _name ,
character _name : ch _name ,
create _date : humanizedISO8601DateTime ( ) ,
} ] ;
for ( const arr of jsonData . data _visible ) {
if ( arr [ 0 ] ) {
const userMessage = {
name : user _name ,
is _user : true ,
is _name : true ,
send _date : humanizedISO8601DateTime ( ) ,
mes : arr [ 0 ] ,
} ;
chat . push ( userMessage ) ;
}
if ( arr [ 1 ] ) {
const charMessage = {
name : ch _name ,
is _user : false ,
is _name : true ,
send _date : humanizedISO8601DateTime ( ) ,
mes : arr [ 1 ] ,
} ;
chat . push ( charMessage ) ;
}
}
fs . writeFileSync ( ` ${ chatsPath + avatar _url } / ${ ch _name } - ${ humanizedISO8601DateTime ( ) } imported.jsonl ` , chat . map ( JSON . stringify ) . join ( '\n' ) , 'utf8' ) ;
response . send ( { res : true } ) ;
} else {
response . send ( { error : true } ) ;
}
} ) ;
}
if ( format === 'jsonl' ) {
//console.log(humanizedISO8601DateTime()+':imported chat format is JSONL');
const fileStream = fs . createReadStream ( './uploads/' + filedata . filename ) ;
const rl = readline . createInterface ( {
input : fileStream ,
crlfDelay : Infinity
} ) ;
rl . once ( 'line' , ( line ) => {
let jsonData = json5 . parse ( line ) ;
if ( jsonData . user _name !== undefined || jsonData . name !== undefined ) {
fs . copyFile ( ` ./uploads/ ${ filedata . filename } ` , ( ` ${ chatsPath + avatar _url } / ${ ch _name } - ${ humanizedISO8601DateTime ( ) } .jsonl ` ) , ( err ) => {
if ( err ) {
response . send ( { error : true } ) ;
return console . log ( err ) ;
} else {
response . send ( { res : true } ) ;
return ;
}
} ) ;
} else {
response . send ( { error : true } ) ;
return ;
}
rl . close ( ) ;
} ) ;
}
}
} ) ;
app . post ( '/importworldinfo' , urlencodedParser , ( request , response ) => {
if ( ! request . file ) return response . sendStatus ( 400 ) ;
const filename = ` ${ path . parse ( sanitize ( request . file . originalname ) ) . name } .json ` ;
let fileContents = null ;
if ( request . body . convertedData ) {
fileContents = request . body . convertedData ;
} else {
const pathToUpload = path . join ( './uploads/' , request . file . filename ) ;
fileContents = fs . readFileSync ( pathToUpload , 'utf8' ) ;
}
try {
const worldContent = json5 . parse ( fileContents ) ;
if ( ! ( 'entries' in worldContent ) ) {
throw new Error ( 'File must contain a world info entries list' ) ;
}
} catch ( err ) {
return response . status ( 400 ) . send ( 'Is not a valid world info file' ) ;
}
const pathToNewFile = path . join ( directories . worlds , filename ) ;
const worldName = path . parse ( pathToNewFile ) . name ;
if ( ! worldName ) {
return response . status ( 400 ) . send ( 'World file must have a name' ) ;
}
fs . writeFileSync ( pathToNewFile , fileContents ) ;
return response . send ( { name : worldName } ) ;
} ) ;
app . post ( '/editworldinfo' , jsonParser , ( request , response ) => {
if ( ! request . body ) {
return response . sendStatus ( 400 ) ;
}
if ( ! request . body . name ) {
return response . status ( 400 ) . send ( 'World file must have a name' ) ;
}
try {
if ( ! ( 'entries' in request . body . data ) ) {
throw new Error ( 'World info must contain an entries list' ) ;
}
} catch ( err ) {
return response . status ( 400 ) . send ( 'Is not a valid world info file' ) ;
}
const filename = ` ${ sanitize ( request . body . name ) } .json ` ;
const pathToFile = path . join ( directories . worlds , filename ) ;
fs . writeFileSync ( pathToFile , JSON . stringify ( request . body . data , null , 4 ) ) ;
return response . send ( { ok : true } ) ;
} ) ;
app . post ( '/uploaduseravatar' , urlencodedParser , async ( request , response ) => {
if ( ! request . file ) return response . sendStatus ( 400 ) ;
try {
const pathToUpload = path . join ( './uploads/' + request . file . filename ) ;
const crop = tryParse ( request . query . crop ) ;
let rawImg = await jimp . read ( pathToUpload ) ;
if ( typeof crop == 'object' && [ crop . x , crop . y , crop . width , crop . height ] . every ( x => typeof x === 'number' ) ) {
rawImg = rawImg . crop ( crop . x , crop . y , crop . width , crop . height ) ;
}
const image = await rawImg . cover ( AVATAR _WIDTH , AVATAR _HEIGHT ) . getBufferAsync ( jimp . MIME _PNG ) ;
const filename = request . body . overwrite _name || ` ${ Date . now ( ) } .png ` ;
const pathToNewFile = path . join ( directories . avatars , filename ) ;
fs . writeFileSync ( pathToNewFile , image ) ;
fs . rmSync ( pathToUpload ) ;
return response . send ( { path : filename } ) ;
} catch ( err ) {
return response . status ( 400 ) . send ( 'Is not a valid image' ) ;
}
} ) ;
app . post ( '/getgroups' , jsonParser , ( _ , response ) => {
const groups = [ ] ;
if ( ! fs . existsSync ( directories . groups ) ) {
fs . mkdirSync ( directories . groups ) ;
}
const files = fs . readdirSync ( directories . groups ) . filter ( x => path . extname ( x ) === '.json' ) ;
const chats = fs . readdirSync ( directories . groupChats ) . filter ( x => path . extname ( x ) === '.jsonl' ) ;
files . forEach ( function ( file ) {
try {
const filePath = path . join ( directories . groups , file ) ;
const fileContents = fs . readFileSync ( filePath , 'utf8' ) ;
const group = json5 . parse ( fileContents ) ;
const groupStat = fs . statSync ( filePath ) ;
group [ 'date_added' ] = groupStat . birthtimeMs ;
let chat _size = 0 ;
let date _last _chat = 0 ;
if ( Array . isArray ( group . chats ) && Array . isArray ( chats ) ) {
for ( const chat of chats ) {
if ( group . chats . includes ( path . parse ( chat ) . name ) ) {
const chatStat = fs . statSync ( path . join ( directories . groupChats , chat ) ) ;
chat _size += chatStat . size ;
date _last _chat = Math . max ( date _last _chat , chatStat . mtimeMs ) ;
}
}
}
group [ 'date_last_chat' ] = date _last _chat ;
group [ 'chat_size' ] = chat _size ;
groups . push ( group ) ;
}
catch ( error ) {
console . error ( error ) ;
}
} ) ;
return response . send ( groups ) ;
} ) ;
app . post ( '/creategroup' , jsonParser , ( request , response ) => {
if ( ! request . body ) {
return response . sendStatus ( 400 ) ;
}
const id = Date . now ( ) ;
const groupMetadata = {
id : id ,
name : request . body . name ? ? 'New Group' ,
members : request . body . members ? ? [ ] ,
avatar _url : request . body . avatar _url ,
allow _self _responses : ! ! request . body . allow _self _responses ,
activation _strategy : request . body . activation _strategy ? ? 0 ,
disabled _members : request . body . disabled _members ? ? [ ] ,
chat _metadata : request . body . chat _metadata ? ? { } ,
fav : request . body . fav ,
chat _id : request . body . chat _id ? ? id ,
chats : request . body . chats ? ? [ id ] ,
} ;
const pathToFile = path . join ( directories . groups , ` ${ id } .json ` ) ;
const fileData = JSON . stringify ( groupMetadata ) ;
if ( ! fs . existsSync ( directories . groups ) ) {
fs . mkdirSync ( directories . groups ) ;
}
fs . writeFileSync ( pathToFile , fileData ) ;
return response . send ( groupMetadata ) ;
} ) ;
app . post ( '/editgroup' , jsonParser , ( request , response ) => {
if ( ! request . body || ! request . body . id ) {
return response . sendStatus ( 400 ) ;
}
const id = request . body . id ;
const pathToFile = path . join ( directories . groups , ` ${ id } .json ` ) ;
const fileData = JSON . stringify ( request . body ) ;
fs . writeFileSync ( pathToFile , fileData ) ;
return response . send ( { ok : true } ) ;
} ) ;
app . post ( '/getgroupchat' , jsonParser , ( request , response ) => {
if ( ! request . body || ! request . body . id ) {
return response . sendStatus ( 400 ) ;
}
const id = request . body . id ;
const pathToFile = path . join ( directories . groupChats , ` ${ id } .jsonl ` ) ;
if ( fs . existsSync ( pathToFile ) ) {
const data = fs . readFileSync ( pathToFile , 'utf8' ) ;
const lines = data . split ( '\n' ) ;
// Iterate through the array of strings and parse each line as JSON
const jsonData = lines . map ( json5 . parse ) ;
return response . send ( jsonData ) ;
} else {
return response . send ( [ ] ) ;
}
} ) ;
app . post ( '/deletegroupchat' , jsonParser , ( request , response ) => {
if ( ! request . body || ! request . body . id ) {
return response . sendStatus ( 400 ) ;
}
const id = request . body . id ;
const pathToFile = path . join ( directories . groupChats , ` ${ id } .jsonl ` ) ;
if ( fs . existsSync ( pathToFile ) ) {
fs . rmSync ( pathToFile ) ;
return response . send ( { ok : true } ) ;
}
return response . send ( { error : true } ) ;
} ) ;
app . post ( '/savegroupchat' , jsonParser , ( request , response ) => {
if ( ! request . body || ! request . body . id ) {
return response . sendStatus ( 400 ) ;
}
const id = request . body . id ;
const pathToFile = path . join ( directories . groupChats , ` ${ id } .jsonl ` ) ;
if ( ! fs . existsSync ( directories . groupChats ) ) {
fs . mkdirSync ( directories . groupChats ) ;
}
let chat _data = request . body . chat ;
let jsonlData = chat _data . map ( JSON . stringify ) . join ( '\n' ) ;
fs . writeFileSync ( pathToFile , jsonlData , 'utf8' ) ;
return response . send ( { ok : true } ) ;
} ) ;
app . post ( '/deletegroup' , jsonParser , async ( request , response ) => {
if ( ! request . body || ! request . body . id ) {
return response . sendStatus ( 400 ) ;
}
const id = request . body . id ;
const pathToGroup = path . join ( directories . groups , sanitize ( ` ${ id } .json ` ) ) ;
try {
// Delete group chats
const group = json5 . parse ( fs . readFileSync ( pathToGroup ) ) ;
if ( group && Array . isArray ( group . chats ) ) {
for ( const chat of group . chats ) {
console . log ( 'Deleting group chat' , chat ) ;
const pathToFile = path . join ( directories . groupChats , ` ${ id } .jsonl ` ) ;
if ( fs . existsSync ( pathToFile ) ) {
fs . rmSync ( pathToFile ) ;
}
}
}
} catch ( error ) {
console . error ( 'Could not delete group chats. Clean them up manually.' , error ) ;
}
if ( fs . existsSync ( pathToGroup ) ) {
fs . rmSync ( pathToGroup ) ;
}
return response . send ( { ok : true } ) ;
} ) ;
/ * *
* Discover the extension folders
* If the folder is called third - party , search for subfolders instead
* /
app . get ( '/discover_extensions' , jsonParser , function ( _ , response ) {
// get all folders in the extensions folder, except third-party
const extensions = fs
. readdirSync ( directories . extensions )
. filter ( f => fs . statSync ( path . join ( directories . extensions , f ) ) . isDirectory ( ) )
. filter ( f => f !== 'third-party' ) ;
// get all folders in the third-party folder, if it exists
if ( ! fs . existsSync ( path . join ( directories . extensions , 'third-party' ) ) ) {
return response . send ( extensions ) ;
}
const thirdPartyExtensions = fs
. readdirSync ( path . join ( directories . extensions , 'third-party' ) )
. filter ( f => fs . statSync ( path . join ( directories . extensions , 'third-party' , f ) ) . isDirectory ( ) ) ;
// add the third-party extensions to the extensions array
extensions . push ( ... thirdPartyExtensions . map ( f => ` third-party/ ${ f } ` ) ) ;
console . log ( extensions ) ;
return response . send ( extensions ) ;
} ) ;
app . get ( '/get_sprites' , jsonParser , function ( request , response ) {
const name = request . query . name ;
const spritesPath = path . join ( directories . characters , name ) ;
let sprites = [ ] ;
try {
if ( fs . existsSync ( spritesPath ) && fs . statSync ( spritesPath ) . isDirectory ( ) ) {
sprites = fs . readdirSync ( spritesPath )
. filter ( file => {
const mimeType = mime . lookup ( file ) ;
return mimeType && mimeType . startsWith ( 'image/' ) ;
} )
. map ( ( file ) => {
const pathToSprite = path . join ( spritesPath , file ) ;
return {
label : path . parse ( pathToSprite ) . name . toLowerCase ( ) ,
path : ` /characters/ ${ name } / ${ file } ` ,
} ;
} ) ;
}
}
catch ( err ) {
console . log ( err ) ;
}
finally {
return response . send ( sprites ) ;
}
} ) ;
function getThumbnailFolder ( type ) {
let thumbnailFolder ;
switch ( type ) {
case 'bg' :
thumbnailFolder = directories . thumbnailsBg ;
break ;
case 'avatar' :
thumbnailFolder = directories . thumbnailsAvatar ;
break ;
}
return thumbnailFolder ;
}
function getOriginalFolder ( type ) {
let originalFolder ;
switch ( type ) {
case 'bg' :
originalFolder = directories . backgrounds ;
break ;
case 'avatar' :
originalFolder = directories . characters ;
break ;
}
return originalFolder ;
}
function invalidateThumbnail ( type , file ) {
const folder = getThumbnailFolder ( type ) ;
const pathToThumbnail = path . join ( folder , file ) ;
if ( fs . existsSync ( pathToThumbnail ) ) {
fs . rmSync ( pathToThumbnail ) ;
}
}
async function ensureThumbnailCache ( ) {
const cacheFiles = fs . readdirSync ( directories . thumbnailsBg ) ;
// files exist, all ok
if ( cacheFiles . length ) {
return ;
}
console . log ( 'Generating thumbnails cache. Please wait...' ) ;
const bgFiles = fs . readdirSync ( directories . backgrounds ) ;
const tasks = [ ] ;
for ( const file of bgFiles ) {
tasks . push ( generateThumbnail ( 'bg' , file ) ) ;
}
await Promise . all ( tasks ) ;
console . log ( ` Done! Generated: ${ bgFiles . length } preview images ` ) ;
}
async function generateThumbnail ( type , file ) {
const pathToCachedFile = path . join ( getThumbnailFolder ( type ) , file ) ;
const pathToOriginalFile = path . join ( getOriginalFolder ( type ) , file ) ;
const cachedFileExists = fs . existsSync ( pathToCachedFile ) ;
const originalFileExists = fs . existsSync ( pathToOriginalFile ) ;
// to handle cases when original image was updated after thumb creation
let shouldRegenerate = false ;
if ( cachedFileExists && originalFileExists ) {
const originalStat = fs . statSync ( pathToOriginalFile ) ;
const cachedStat = fs . statSync ( pathToCachedFile ) ;
if ( originalStat . mtimeMs > cachedStat . ctimeMs ) {
//console.log('Original file changed. Regenerating thumbnail...');
shouldRegenerate = true ;
}
}
if ( cachedFileExists && ! shouldRegenerate ) {
return pathToCachedFile ;
}
if ( ! originalFileExists ) {
return null ;
}
const imageSizes = { 'bg' : [ 160 , 90 ] , 'avatar' : [ 96 , 144 ] } ;
const mySize = imageSizes [ type ] ;
try {
let buffer ;
try {
const image = await jimp . read ( pathToOriginalFile ) ;
buffer = await image . cover ( mySize [ 0 ] , mySize [ 1 ] ) . quality ( 95 ) . getBufferAsync ( mime . lookup ( 'jpg' ) ) ;
}
catch ( inner ) {
console . warn ( ` Thumbnailer can not process the image: ${ pathToOriginalFile } . Using original size ` ) ;
buffer = fs . readFileSync ( pathToOriginalFile ) ;
}
fs . writeFileSync ( pathToCachedFile , buffer ) ;
}
catch ( outer ) {
return null ;
}
return pathToCachedFile ;
}
app . get ( '/thumbnail' , jsonParser , async function ( request , response ) {
const type = request . query . type ;
const file = sanitize ( request . query . file ) ;
if ( ! type || ! file ) {
return response . sendStatus ( 400 ) ;
}
if ( ! ( type == 'bg' || type == 'avatar' ) ) {
return response . sendStatus ( 400 ) ;
}
if ( sanitize ( file ) !== file ) {
console . error ( 'Malicious filename prevented' ) ;
return response . sendStatus ( 403 ) ;
}
if ( config . disableThumbnails == true ) {
const pathToOriginalFile = path . join ( getOriginalFolder ( type ) , file ) ;
return response . sendFile ( pathToOriginalFile , { root : process . cwd ( ) } ) ;
}
const pathToCachedFile = await generateThumbnail ( type , file ) ;
if ( ! pathToCachedFile ) {
return response . sendStatus ( 404 ) ;
}
return response . sendFile ( pathToCachedFile , { root : process . cwd ( ) } ) ;
} ) ;
/* OpenAI */
app . post ( "/getstatus_openai" , jsonParser , function ( request , response _getstatus _openai = response ) {
if ( ! request . body ) return response _getstatus _openai . sendStatus ( 400 ) ;
let api _url ;
let api _key _openai ;
let headers ;
if ( request . body . use _openrouter == false ) {
api _url = new URL ( request . body . reverse _proxy || api _openai ) . toString ( ) ;
2023-07-28 20:33:29 +02:00
api _key _openai = request . body . reverse _proxy ? request . body . proxy _password : readSecret ( SECRET _KEYS . OPENAI ) ;
2023-07-20 19:32:15 +02:00
headers = { } ;
} else {
api _url = 'https://openrouter.ai/api/v1' ;
api _key _openai = readSecret ( SECRET _KEYS . OPENROUTER ) ;
// OpenRouter needs to pass the referer: https://openrouter.ai/docs
headers = { 'HTTP-Referer' : request . headers . referer } ;
}
2023-07-28 20:33:29 +02:00
if ( ! api _key _openai && ! request . body . reverse _proxy ) {
2023-07-20 19:32:15 +02:00
return response _getstatus _openai . status ( 401 ) . send ( { error : true } ) ;
}
const args = {
headers : {
"Authorization" : "Bearer " + api _key _openai ,
... headers ,
} ,
} ;
client . get ( api _url + "/models" , args , function ( data , response ) {
if ( response . statusCode == 200 ) {
response _getstatus _openai . send ( data ) ;
const modelIds = data ? . data ? . map ( x => x . id ) ? . sort ( ) ;
console . log ( 'Available OpenAI models:' , modelIds ) ;
}
if ( response . statusCode == 401 ) {
console . log ( 'Access Token is incorrect.' ) ;
response _getstatus _openai . send ( { error : true } ) ;
}
if ( response . statusCode == 404 ) {
console . log ( 'Endpoint not found.' ) ;
response _getstatus _openai . send ( { error : true } ) ;
}
if ( response . statusCode == 500 || response . statusCode == 501 || response . statusCode == 501 || response . statusCode == 503 || response . statusCode == 507 ) {
console . log ( data ) ;
response _getstatus _openai . send ( { error : true } ) ;
}
} ) . on ( 'error' , function ( ) {
response _getstatus _openai . send ( { error : true } ) ;
} ) ;
} ) ;
app . post ( "/openai_bias" , jsonParser , async function ( request , response ) {
if ( ! request . body || ! Array . isArray ( request . body ) )
return response . sendStatus ( 400 ) ;
let result = { } ;
const model = getTokenizerModel ( String ( request . query . model || '' ) ) ;
// no bias for claude
if ( model == 'claude' ) {
return response . send ( result ) ;
}
const tokenizer = getTiktokenTokenizer ( model ) ;
for ( const entry of request . body ) {
if ( ! entry || ! entry . text ) {
continue ;
}
try {
const tokens = tokenizer . encode ( entry . text ) ;
for ( const token of tokens ) {
result [ token ] = entry . value ;
}
} catch {
console . warn ( 'Tokenizer failed to encode:' , entry . text ) ;
}
}
// not needed for cached tokenizers
//tokenizer.free();
return response . send ( result ) ;
} ) ;
app . post ( "/deletepreset_openai" , jsonParser , function ( request , response ) {
if ( ! request . body || ! request . body . name ) {
return response . sendStatus ( 400 ) ;
}
const name = request . body . name ;
const pathToFile = path . join ( directories . openAI _Settings , ` ${ name } .settings ` ) ;
if ( fs . existsSync ( pathToFile ) ) {
fs . rmSync ( pathToFile ) ;
return response . send ( { ok : true } ) ;
}
return response . send ( { error : true } ) ;
} ) ;
function convertChatMLPrompt ( messages ) {
const messageStrings = [ ] ;
messages . forEach ( m => {
if ( m . role === 'system' && m . name === undefined ) {
messageStrings . push ( "System: " + m . content ) ;
}
else if ( m . role === 'system' && m . name !== undefined ) {
messageStrings . push ( m . name + ": " + m . content ) ;
}
else {
messageStrings . push ( m . role + ": " + m . content ) ;
}
} ) ;
return messageStrings . join ( "\n" ) ;
}
// Prompt Conversion script taken from RisuAI by @kwaroran (GPLv3).
function convertClaudePrompt ( messages , addHumanPrefix , addAssistantPostfix ) {
// Claude doesn't support message names, so we'll just add them to the message content.
for ( const message of messages ) {
if ( message . name && message . role !== "system" ) {
message . content = message . name + ": " + message . content ;
delete message . name ;
}
}
let requestPrompt = messages . map ( ( v ) => {
let prefix = '' ;
switch ( v . role ) {
case "assistant" :
prefix = "\n\nAssistant: " ;
break
case "user" :
prefix = "\n\nHuman: " ;
break
case "system" :
// According to the Claude docs, H: and A: should be used for example conversations.
if ( v . name === "example_assistant" ) {
prefix = "\n\nA: " ;
} else if ( v . name === "example_user" ) {
prefix = "\n\nH: " ;
} else {
prefix = "\n\n" ;
}
break
}
return prefix + v . content ;
} ) . join ( '' ) ;
if ( addHumanPrefix ) {
requestPrompt = "\n\nHuman: " + requestPrompt ;
}
if ( addAssistantPostfix ) {
requestPrompt = requestPrompt + '\n\nAssistant: ' ;
}
return requestPrompt ;
}
async function sendScaleRequest ( request , response ) {
const fetch = require ( 'node-fetch' ) . default ;
const api _url = new URL ( request . body . api _url _scale ) . toString ( ) ;
const api _key _scale = readSecret ( SECRET _KEYS . SCALE ) ;
if ( ! api _key _scale ) {
return response . status ( 401 ) . send ( { error : true } ) ;
}
const requestPrompt = convertChatMLPrompt ( request . body . messages ) ;
console . log ( 'Scale request:' , requestPrompt ) ;
try {
const controller = new AbortController ( ) ;
request . socket . removeAllListeners ( 'close' ) ;
request . socket . on ( 'close' , function ( ) {
controller . abort ( ) ;
} ) ;
const generateResponse = await fetch ( api _url , {
method : "POST" ,
body : JSON . stringify ( { input : { input : requestPrompt } } ) ,
headers : {
'Content-Type' : 'application/json' ,
'Authorization' : ` Basic ${ api _key _scale } ` ,
} ,
timeout : 0 ,
} ) ;
if ( ! generateResponse . ok ) {
console . log ( ` Scale API returned error: ${ generateResponse . status } ${ generateResponse . statusText } ${ await generateResponse . text ( ) } ` ) ;
return response . status ( generateResponse . status ) . send ( { error : true } ) ;
}
const generateResponseJson = await generateResponse . json ( ) ;
console . log ( 'Scale response:' , generateResponseJson ) ;
const reply = { choices : [ { "message" : { "content" : generateResponseJson . output , } } ] } ;
return response . send ( reply ) ;
} catch ( error ) {
console . log ( error ) ;
if ( ! response . headersSent ) {
return response . status ( 500 ) . send ( { error : true } ) ;
}
}
}
async function sendClaudeRequest ( request , response ) {
const fetch = require ( 'node-fetch' ) . default ;
const api _url = new URL ( request . body . reverse _proxy || api _claude ) . toString ( ) ;
2023-07-28 20:33:29 +02:00
const api _key _claude = request . body . reverse _proxy ? request . body . proxy _password : readSecret ( SECRET _KEYS . CLAUDE ) ;
2023-07-20 19:32:15 +02:00
if ( ! api _key _claude ) {
return response . status ( 401 ) . send ( { error : true } ) ;
}
try {
const controller = new AbortController ( ) ;
request . socket . removeAllListeners ( 'close' ) ;
request . socket . on ( 'close' , function ( ) {
controller . abort ( ) ;
} ) ;
2023-07-30 00:51:59 +02:00
let requestPrompt = convertClaudePrompt ( request . body . messages , true , true ) ;
if ( request . body . assistant _prefill ) {
requestPrompt += request . body . assistant _prefill ;
}
2023-07-20 19:32:15 +02:00
console . log ( 'Claude request:' , requestPrompt ) ;
const generateResponse = await fetch ( api _url + '/complete' , {
method : "POST" ,
signal : controller . signal ,
body : JSON . stringify ( {
prompt : requestPrompt ,
model : request . body . model ,
max _tokens _to _sample : request . body . max _tokens ,
stop _sequences : [ "\n\nHuman:" , "\n\nSystem:" , "\n\nAssistant:" ] ,
temperature : request . body . temperature ,
top _p : request . body . top _p ,
top _k : request . body . top _k ,
stream : request . body . stream ,
} ) ,
headers : {
"Content-Type" : "application/json" ,
"anthropic-version" : '2023-06-01' ,
"x-api-key" : api _key _claude ,
} ,
timeout : 0 ,
} ) ;
if ( request . body . stream ) {
// Pipe remote SSE stream to Express response
generateResponse . body . pipe ( response ) ;
request . socket . on ( 'close' , function ( ) {
generateResponse . body . destroy ( ) ; // Close the remote stream
response . end ( ) ; // End the Express response
} ) ;
generateResponse . body . on ( 'end' , function ( ) {
console . log ( "Streaming request finished" ) ;
response . end ( ) ;
} ) ;
} else {
if ( ! generateResponse . ok ) {
console . log ( ` Claude API returned error: ${ generateResponse . status } ${ generateResponse . statusText } ${ await generateResponse . text ( ) } ` ) ;
return response . status ( generateResponse . status ) . send ( { error : true } ) ;
}
const generateResponseJson = await generateResponse . json ( ) ;
const responseText = generateResponseJson . completion ;
console . log ( 'Claude response:' , responseText ) ;
// Wrap it back to OAI format
const reply = { choices : [ { "message" : { "content" : responseText , } } ] } ;
return response . send ( reply ) ;
}
} catch ( error ) {
console . log ( 'Error communicating with Claude: ' , error ) ;
if ( ! response . headersSent ) {
return response . status ( 500 ) . send ( { error : true } ) ;
}
}
}
app . post ( "/generate_openai" , jsonParser , function ( request , response _generate _openai ) {
if ( ! request . body ) return response _generate _openai . status ( 400 ) . send ( { error : true } ) ;
if ( request . body . use _claude ) {
return sendClaudeRequest ( request , response _generate _openai ) ;
}
if ( request . body . use _scale ) {
return sendScaleRequest ( request , response _generate _openai ) ;
}
let api _url ;
let api _key _openai ;
let headers ;
2023-08-01 17:49:03 +02:00
let bodyParams ;
2023-07-20 19:32:15 +02:00
if ( ! request . body . use _openrouter ) {
api _url = new URL ( request . body . reverse _proxy || api _openai ) . toString ( ) ;
2023-07-28 20:33:29 +02:00
api _key _openai = request . body . reverse _proxy ? request . body . proxy _password : readSecret ( SECRET _KEYS . OPENAI ) ;
2023-07-20 19:32:15 +02:00
headers = { } ;
2023-08-01 17:49:03 +02:00
bodyParams = { } ;
2023-07-20 19:32:15 +02:00
} else {
api _url = 'https://openrouter.ai/api/v1' ;
api _key _openai = readSecret ( SECRET _KEYS . OPENROUTER ) ;
// OpenRouter needs to pass the referer: https://openrouter.ai/docs
headers = { 'HTTP-Referer' : request . headers . referer } ;
2023-08-01 17:49:03 +02:00
bodyParams = { 'transforms' : [ "middle-out" ] } ;
2023-07-20 19:32:15 +02:00
}
2023-07-28 20:33:29 +02:00
if ( ! api _key _openai && ! request . body . reverse _proxy ) {
2023-07-20 19:32:15 +02:00
return response _generate _openai . status ( 401 ) . send ( { error : true } ) ;
}
const isTextCompletion = Boolean ( request . body . model && ( request . body . model . startsWith ( 'text-' ) || request . body . model . startsWith ( 'code-' ) ) ) ;
const textPrompt = isTextCompletion ? convertChatMLPrompt ( request . body . messages ) : '' ;
const endpointUrl = isTextCompletion ? ` ${ api _url } /completions ` : ` ${ api _url } /chat/completions ` ;
const controller = new AbortController ( ) ;
request . socket . removeAllListeners ( 'close' ) ;
request . socket . on ( 'close' , function ( ) {
controller . abort ( ) ;
} ) ;
const config = {
method : 'post' ,
url : endpointUrl ,
headers : {
'Content-Type' : 'application/json' ,
'Authorization' : 'Bearer ' + api _key _openai ,
... headers ,
} ,
data : {
"messages" : isTextCompletion === false ? request . body . messages : undefined ,
"prompt" : isTextCompletion === true ? textPrompt : undefined ,
"model" : request . body . model ,
"temperature" : request . body . temperature ,
"max_tokens" : request . body . max _tokens ,
"stream" : request . body . stream ,
"presence_penalty" : request . body . presence _penalty ,
"frequency_penalty" : request . body . frequency _penalty ,
"top_p" : request . body . top _p ,
"top_k" : request . body . top _k ,
"stop" : request . body . stop ,
2023-08-01 17:49:03 +02:00
"logit_bias" : request . body . logit _bias ,
... bodyParams ,
2023-07-20 19:32:15 +02:00
} ,
signal : controller . signal ,
} ;
console . log ( config . data ) ;
if ( request . body . stream ) {
config . responseType = 'stream' ;
}
async function makeRequest ( config , response _generate _openai , request , retries = 5 , timeout = 1000 ) {
try {
const response = await axios ( config ) ;
if ( response . status <= 299 ) {
if ( request . body . stream ) {
console . log ( 'Streaming request in progress' ) ;
response . data . pipe ( response _generate _openai ) ;
response . data . on ( 'end' , ( ) => {
console . log ( 'Streaming request finished' ) ;
response _generate _openai . end ( ) ;
} ) ;
} else {
response _generate _openai . send ( response . data ) ;
console . log ( response . data ) ;
console . log ( response . data ? . choices [ 0 ] ? . message ) ;
}
} else {
handleErrorResponse ( response , response _generate _openai , request ) ;
}
} catch ( error ) {
if ( error . response && error . response . status === 429 && retries > 0 ) {
console . log ( 'Out of quota, retrying...' ) ;
setTimeout ( ( ) => {
makeRequest ( config , response _generate _openai , request , retries - 1 ) ;
} , timeout ) ;
} else {
handleError ( error , response _generate _openai , request ) ;
}
}
}
function handleErrorResponse ( response , response _generate _openai , request ) {
if ( response . status >= 400 && response . status <= 504 ) {
console . log ( 'Error occurred:' , response . status , response . data ) ;
response _generate _openai . send ( { error : true } ) ;
}
}
function handleError ( error , response _generate _openai , request ) {
console . error ( 'Error:' , error . message ) ;
let message = error ? . response ? . statusText ;
switch ( error ? . response ? . status ) {
case 402 :
2023-08-02 22:02:29 +02:00
message = error ? . response ? . data ? . error ? . message || 'Credit limit reached' ;
2023-07-20 19:32:15 +02:00
console . log ( message ) ;
break ;
case 403 :
2023-08-02 22:02:29 +02:00
message = error ? . response ? . data ? . error ? . message || 'API key disabled or exhausted' ;
2023-07-20 19:32:15 +02:00
console . log ( message ) ;
break ;
2023-07-29 21:50:07 +02:00
case 451 :
message = error ? . response ? . data ? . error ? . message || 'Unavailable for legal reasons' ;
2023-07-20 19:32:15 +02:00
console . log ( message ) ;
break ;
}
const quota _error = error ? . response ? . status === 429 && error ? . response ? . data ? . error ? . type === 'insufficient_quota' ;
const response = { error : { message } , quota _error : quota _error }
if ( ! response _generate _openai . headersSent ) {
response _generate _openai . send ( response ) ;
} else if ( ! response _generate _openai . writableEnded ) {
response _generate _openai . write ( response ) ;
}
}
makeRequest ( config , response _generate _openai , request ) ;
} ) ;
app . post ( "/tokenize_openai" , jsonParser , function ( request , response _tokenize _openai = response ) {
if ( ! request . body ) return response _tokenize _openai . sendStatus ( 400 ) ;
let num _tokens = 0 ;
const model = getTokenizerModel ( String ( request . query . model || '' ) ) ;
if ( model == 'claude' ) {
num _tokens = countClaudeTokens ( claude _tokenizer , request . body ) ;
return response _tokenize _openai . send ( { "token_count" : num _tokens } ) ;
}
const tokensPerName = model . includes ( 'gpt-4' ) ? 1 : - 1 ;
const tokensPerMessage = model . includes ( 'gpt-4' ) ? 3 : 4 ;
const tokensPadding = 3 ;
const tokenizer = getTiktokenTokenizer ( model ) ;
for ( const msg of request . body ) {
try {
num _tokens += tokensPerMessage ;
for ( const [ key , value ] of Object . entries ( msg ) ) {
num _tokens += tokenizer . encode ( value ) . length ;
if ( key == "name" ) {
num _tokens += tokensPerName ;
}
}
} catch {
console . warn ( "Error tokenizing message:" , msg ) ;
}
}
num _tokens += tokensPadding ;
// not needed for cached tokenizers
//tokenizer.free();
response _tokenize _openai . send ( { "token_count" : num _tokens } ) ;
} ) ;
2023-07-23 22:52:31 +02:00
app . post ( "/save_preset" , jsonParser , function ( request , response ) {
const name = sanitize ( request . body . name ) ;
if ( ! request . body . preset || ! name ) {
return response . sendStatus ( 400 ) ;
}
const filename = ` ${ name } .settings ` ;
const directory = getPresetFolderByApiId ( request . body . apiId ) ;
if ( ! directory ) {
return response . sendStatus ( 400 ) ;
}
const fullpath = path . join ( directory , filename ) ;
fs . writeFileSync ( fullpath , JSON . stringify ( request . body . preset , null , 4 ) , 'utf-8' ) ;
return response . send ( { name } ) ;
} ) ;
app . post ( "/delete_preset" , jsonParser , function ( request , response ) {
const name = sanitize ( request . body . name ) ;
if ( ! name ) {
return response . sendStatus ( 400 ) ;
}
const filename = ` ${ name } .settings ` ;
const directory = getPresetFolderByApiId ( request . body . apiId ) ;
if ( ! directory ) {
return response . sendStatus ( 400 ) ;
}
const fullpath = path . join ( directory , filename ) ;
if ( fs . existsSync ) {
fs . unlinkSync ( fullpath ) ;
return response . sendStatus ( 200 ) ;
} else {
return response . sendStatus ( 404 ) ;
}
} ) ;
2023-07-20 19:32:15 +02:00
app . post ( "/savepreset_openai" , jsonParser , function ( request , response ) {
const name = sanitize ( request . query . name ) ;
if ( ! request . body || ! name ) {
return response . sendStatus ( 400 ) ;
}
const filename = ` ${ name } .settings ` ;
const fullpath = path . join ( directories . openAI _Settings , filename ) ;
fs . writeFileSync ( fullpath , JSON . stringify ( request . body , null , 4 ) , 'utf-8' ) ;
return response . send ( { name } ) ;
} ) ;
2023-07-23 22:52:31 +02:00
function getPresetFolderByApiId ( apiId ) {
switch ( apiId ) {
case 'kobold' :
case 'koboldhorde' :
return directories . koboldAI _Settings ;
case 'novel' :
return directories . novelAI _Settings ;
case 'textgenerationwebui' :
return directories . textGen _Settings ;
default :
return null ;
}
}
2023-07-20 19:32:15 +02:00
function createTokenizationHandler ( getTokenizerFn ) {
return async function ( request , response ) {
if ( ! request . body ) {
return response . sendStatus ( 400 ) ;
}
const text = request . body . text || '' ;
const tokenizer = getTokenizerFn ( ) ;
2023-08-02 21:35:05 +02:00
const { ids , count } = await countSentencepieceTokens ( tokenizer , text ) ;
return response . send ( { ids , count } ) ;
2023-07-20 19:32:15 +02:00
} ;
}
app . post ( "/tokenize_llama" , jsonParser , createTokenizationHandler ( ( ) => spp _llama ) ) ;
app . post ( "/tokenize_nerdstash" , jsonParser , createTokenizationHandler ( ( ) => spp _nerd ) ) ;
app . post ( "/tokenize_nerdstash_v2" , jsonParser , createTokenizationHandler ( ( ) => spp _nerd _v2 ) ) ;
app . post ( "/tokenize_via_api" , jsonParser , async function ( request , response ) {
if ( ! request . body ) {
return response . sendStatus ( 400 ) ;
}
const text = request . body . text || '' ;
try {
const args = {
body : JSON . stringify ( { "prompt" : text } ) ,
headers : { "Content-Type" : "application/json" }
} ;
2023-08-04 13:41:00 +02:00
2023-08-03 05:25:24 +02:00
if ( main _api == 'textgenerationwebui' && request . body . use _mancer ) {
args . headers = Object . assign ( args . headers , get _mancer _headers ( ) ) ;
}
2023-07-20 19:32:15 +02:00
const data = await postAsync ( api _server + "/v1/token-count" , args ) ;
console . log ( data ) ;
return response . send ( { count : data [ 'results' ] [ 0 ] [ 'tokens' ] } ) ;
} catch ( error ) {
console . log ( error ) ;
return response . send ( { error : true } ) ;
}
} ) ;
// ** REST CLIENT ASYNC WRAPPERS **
async function postAsync ( url , args ) {
const fetch = require ( 'node-fetch' ) . default ;
const response = await fetch ( url , { method : 'POST' , timeout : 0 , ... args } ) ;
if ( response . ok ) {
const data = await response . json ( ) ;
return data ;
}
throw response ;
}
function getAsync ( url , args ) {
return new Promise ( ( resolve , reject ) => {
client . get ( url , args , ( data , response ) => {
if ( response . statusCode >= 400 ) {
reject ( data ) ;
}
resolve ( data ) ;
} ) . on ( 'error' , e => reject ( e ) ) ;
} )
}
// ** END **
const tavernUrl = new URL (
( cliArguments . ssl ? 'https://' : 'http://' ) +
( listen ? '0.0.0.0' : '127.0.0.1' ) +
( ':' + server _port )
) ;
const autorunUrl = new URL (
( cliArguments . ssl ? 'https://' : 'http://' ) +
( '127.0.0.1' ) +
( ':' + server _port )
) ;
const setupTasks = async function ( ) {
const version = getVersion ( ) ;
console . log ( ` SillyTavern ${ version . pkgVersion } ` + ( version . gitBranch ? ` ' ${ version . gitBranch } ' ( ${ version . gitRevision } ) ` : '' ) ) ;
backupSettings ( ) ;
migrateSecrets ( ) ;
ensurePublicDirectoriesExist ( ) ;
await ensureThumbnailCache ( ) ;
2023-07-21 14:28:32 +02:00
contentManager . checkForNewContent ( ) ;
2023-07-20 19:32:15 +02:00
// Colab users could run the embedded tool
if ( ! is _colab ) await convertWebp ( ) ;
[ spp _llama , spp _nerd , spp _nerd _v2 , claude _tokenizer ] = await Promise . all ( [
loadSentencepieceTokenizer ( 'src/sentencepiece/tokenizer.model' ) ,
loadSentencepieceTokenizer ( 'src/sentencepiece/nerdstash.model' ) ,
loadSentencepieceTokenizer ( 'src/sentencepiece/nerdstash_v2.model' ) ,
loadClaudeTokenizer ( 'src/claude.json' ) ,
] ) ;
await statsHelpers . loadStatsFile ( directories . chats , directories . characters ) ;
// Set up event listeners for a graceful shutdown
process . on ( 'SIGINT' , statsHelpers . writeStatsToFileAndExit ) ;
process . on ( 'SIGTERM' , statsHelpers . writeStatsToFileAndExit ) ;
process . on ( 'uncaughtException' , ( err ) => {
console . error ( 'Uncaught exception:' , err ) ;
statsHelpers . writeStatsToFileAndExit ( ) ;
} ) ;
setInterval ( statsHelpers . saveStatsToFile , 5 * 60 * 1000 ) ;
console . log ( 'Launching...' ) ;
if ( autorun ) open ( autorunUrl . toString ( ) ) ;
console . log ( 'SillyTavern is listening on: ' + tavernUrl ) ;
if ( listen ) {
console . log ( '\n0.0.0.0 means SillyTavern is listening on all network interfaces (Wi-Fi, LAN, localhost). If you want to limit it only to internal localhost (127.0.0.1), change the setting in config.conf to “listen=false”\n' ) ;
}
}
if ( listen && ! config . whitelistMode && ! config . basicAuthMode ) {
if ( config . securityOverride )
console . warn ( "Security has been override. If it's not a trusted network, change the settings." ) ;
else {
console . error ( 'Your SillyTavern is currently unsecurely open to the public. Enable whitelisting or basic authentication.' ) ;
process . exit ( 1 ) ;
}
}
if ( true === cliArguments . ssl )
https . createServer (
{
cert : fs . readFileSync ( cliArguments . certPath ) ,
key : fs . readFileSync ( cliArguments . keyPath )
} , app )
. listen (
tavernUrl . port || 443 ,
tavernUrl . hostname ,
setupTasks
) ;
else
http . createServer ( app ) . listen (
tavernUrl . port || 80 ,
tavernUrl . hostname ,
setupTasks
) ;
async function convertWebp ( ) {
const files = fs . readdirSync ( directories . characters ) . filter ( e => e . endsWith ( ".webp" ) ) ;
if ( ! files . length ) {
return ;
}
console . log ( ` ${ files . length } WEBP files will be automatically converted. ` ) ;
for ( const file of files ) {
try {
const source = path . join ( directories . characters , file ) ;
const dest = path . join ( directories . characters , path . basename ( file , ".webp" ) + ".png" ) ;
if ( fs . existsSync ( dest ) ) {
console . log ( ` ${ dest } already exists. Delete ${ source } manually ` ) ;
continue ;
}
console . log ( ` Read... ${ source } ` ) ;
const data = await charaRead ( source ) ;
console . log ( ` Convert... ${ source } -> ${ dest } ` ) ;
await webp . dwebp ( source , dest , "-o" ) ;
console . log ( ` Write... ${ dest } ` ) ;
const success = await charaWrite ( dest , data , path . parse ( dest ) . name ) ;
if ( ! success ) {
console . log ( ` Failure on ${ source } -> ${ dest } ` ) ;
continue ;
}
console . log ( ` Remove... ${ source } ` ) ;
fs . rmSync ( source ) ;
} catch ( err ) {
console . log ( err ) ;
}
}
}
function backupSettings ( ) {
const MAX _BACKUPS = 25 ;
function generateTimestamp ( ) {
const now = new Date ( ) ;
const year = now . getFullYear ( ) ;
const month = String ( now . getMonth ( ) + 1 ) . padStart ( 2 , '0' ) ;
const day = String ( now . getDate ( ) ) . padStart ( 2 , '0' ) ;
const hours = String ( now . getHours ( ) ) . padStart ( 2 , '0' ) ;
const minutes = String ( now . getMinutes ( ) ) . padStart ( 2 , '0' ) ;
const seconds = String ( now . getSeconds ( ) ) . padStart ( 2 , '0' ) ;
return ` ${ year } ${ month } ${ day } - ${ hours } ${ minutes } ${ seconds } ` ;
}
try {
if ( ! fs . existsSync ( directories . backups ) ) {
fs . mkdirSync ( directories . backups ) ;
}
const backupFile = path . join ( directories . backups , ` settings_ ${ generateTimestamp ( ) } .json ` ) ;
fs . copyFileSync ( SETTINGS _FILE , backupFile ) ;
let files = fs . readdirSync ( directories . backups ) ;
if ( files . length > MAX _BACKUPS ) {
files = files . map ( f => path . join ( directories . backups , f ) ) ;
files . sort ( ( a , b ) => fs . statSync ( a ) . mtimeMs - fs . statSync ( b ) . mtimeMs ) ;
fs . rmSync ( files [ 0 ] ) ;
}
} catch ( err ) {
console . log ( 'Could not backup settings file' , err ) ;
}
}
function ensurePublicDirectoriesExist ( ) {
for ( const dir of Object . values ( directories ) ) {
if ( ! fs . existsSync ( dir ) ) {
fs . mkdirSync ( dir , { recursive : true } ) ;
}
}
}
const SECRETS _FILE = './secrets.json' ;
const SETTINGS _FILE = './public/settings.json' ;
const SECRET _KEYS = {
HORDE : 'api_key_horde' ,
2023-08-03 05:25:24 +02:00
MANCER : 'api_key_mancer' ,
2023-07-20 19:32:15 +02:00
OPENAI : 'api_key_openai' ,
NOVEL : 'api_key_novel' ,
CLAUDE : 'api_key_claude' ,
DEEPL : 'deepl' ,
OPENROUTER : 'api_key_openrouter' ,
SCALE : 'api_key_scale' ,
}
function migrateSecrets ( ) {
if ( ! fs . existsSync ( SETTINGS _FILE ) ) {
console . log ( 'Settings file does not exist' ) ;
return ;
}
try {
let modified = false ;
const fileContents = fs . readFileSync ( SETTINGS _FILE ) ;
const settings = JSON . parse ( fileContents ) ;
const oaiKey = settings ? . api _key _openai ;
const hordeKey = settings ? . horde _settings ? . api _key ;
const novelKey = settings ? . api _key _novel ;
if ( typeof oaiKey === 'string' ) {
console . log ( 'Migrating OpenAI key...' ) ;
writeSecret ( SECRET _KEYS . OPENAI , oaiKey ) ;
delete settings . api _key _openai ;
modified = true ;
}
if ( typeof hordeKey === 'string' ) {
console . log ( 'Migrating Horde key...' ) ;
writeSecret ( SECRET _KEYS . HORDE , hordeKey ) ;
delete settings . horde _settings . api _key ;
modified = true ;
}
if ( typeof novelKey === 'string' ) {
console . log ( 'Migrating Novel key...' ) ;
writeSecret ( SECRET _KEYS . NOVEL , novelKey ) ;
delete settings . api _key _novel ;
modified = true ;
}
if ( modified ) {
console . log ( 'Writing updated settings.json...' ) ;
const settingsContent = JSON . stringify ( settings ) ;
fs . writeFileSync ( SETTINGS _FILE , settingsContent , "utf-8" ) ;
}
}
catch ( error ) {
console . error ( 'Could not migrate secrets file. Proceed with caution.' ) ;
}
}
app . post ( '/writesecret' , jsonParser , ( request , response ) => {
const key = request . body . key ;
const value = request . body . value ;
writeSecret ( key , value ) ;
return response . send ( 'ok' ) ;
} ) ;
app . post ( '/readsecretstate' , jsonParser , ( _ , response ) => {
if ( ! fs . existsSync ( SECRETS _FILE ) ) {
return response . send ( { } ) ;
}
try {
const fileContents = fs . readFileSync ( SECRETS _FILE ) ;
const secrets = JSON . parse ( fileContents ) ;
const state = { } ;
for ( const key of Object . values ( SECRET _KEYS ) ) {
state [ key ] = ! ! secrets [ key ] ; // convert to boolean
}
return response . send ( state ) ;
} catch ( error ) {
console . error ( error ) ;
return response . send ( { } ) ;
}
} ) ;
const ANONYMOUS _KEY = "0000000000" ;
app . post ( '/generate_horde' , jsonParser , async ( request , response ) => {
const api _key _horde = readSecret ( SECRET _KEYS . HORDE ) || ANONYMOUS _KEY ;
const url = 'https://horde.koboldai.net/api/v2/generate/text/async' ;
const args = {
"body" : JSON . stringify ( request . body ) ,
"headers" : {
"Content-Type" : "application/json" ,
"Client-Agent" : request . header ( 'Client-Agent' ) ,
"apikey" : api _key _horde ,
}
} ;
console . log ( args . body ) ;
try {
const data = await postAsync ( url , args ) ;
return response . send ( data ) ;
} catch ( error ) {
console . error ( error ) ;
return response . sendStatus ( 500 ) ;
}
} ) ;
app . post ( '/viewsecrets' , jsonParser , async ( _ , response ) => {
if ( ! allowKeysExposure ) {
console . error ( 'secrets.json could not be viewed unless the value of allowKeysExposure in config.conf is set to true' ) ;
return response . sendStatus ( 403 ) ;
}
if ( ! fs . existsSync ( SECRETS _FILE ) ) {
console . error ( 'secrets.json does not exist' ) ;
return response . sendStatus ( 404 ) ;
}
try {
const fileContents = fs . readFileSync ( SECRETS _FILE ) ;
const secrets = JSON . parse ( fileContents ) ;
return response . send ( secrets ) ;
} catch ( error ) {
console . error ( error ) ;
return response . sendStatus ( 500 ) ;
}
} ) ;
app . post ( '/horde_samplers' , jsonParser , async ( _ , response ) => {
try {
const samplers = Object . values ( ai _horde . ModelGenerationInputStableSamplers ) ;
response . send ( samplers ) ;
} catch ( error ) {
console . error ( error ) ;
response . sendStatus ( 500 ) ;
}
} ) ;
app . post ( '/horde_models' , jsonParser , async ( _ , response ) => {
try {
const models = await ai _horde . getModels ( ) ;
response . send ( models ) ;
} catch ( error ) {
console . error ( error ) ;
response . sendStatus ( 500 ) ;
}
} ) ;
app . post ( '/horde_userinfo' , jsonParser , async ( _ , response ) => {
const api _key _horde = readSecret ( SECRET _KEYS . HORDE ) ;
if ( ! api _key _horde ) {
return response . send ( { anonymous : true } ) ;
}
try {
const user = await ai _horde . findUser ( { token : api _key _horde } ) ;
return response . send ( user ) ;
} catch ( error ) {
console . error ( error ) ;
return response . sendStatus ( 500 ) ;
}
} )
app . post ( '/horde_generateimage' , jsonParser , async ( request , response ) => {
const MAX _ATTEMPTS = 200 ;
const CHECK _INTERVAL = 3000 ;
const api _key _horde = readSecret ( SECRET _KEYS . HORDE ) || ANONYMOUS _KEY ;
console . log ( 'Stable Horde request:' , request . body ) ;
try {
const generation = await ai _horde . postAsyncImageGenerate (
{
prompt : ` ${ request . body . prompt _prefix } ${ request . body . prompt } ### ${ request . body . negative _prompt } ` ,
params :
{
sampler _name : request . body . sampler ,
hires _fix : request . body . enable _hr ,
use _gfpgan : request . body . restore _faces ,
cfg _scale : request . body . scale ,
steps : request . body . steps ,
width : request . body . width ,
height : request . body . height ,
karras : Boolean ( request . body . karras ) ,
n : 1 ,
} ,
r2 : false ,
nsfw : request . body . nfsw ,
models : [ request . body . model ] ,
} ,
{ token : api _key _horde } ) ;
if ( ! generation . id ) {
console . error ( 'Image generation request is not satisfyable:' , generation . message || 'unknown error' ) ;
return response . sendStatus ( 400 ) ;
}
for ( let attempt = 0 ; attempt < MAX _ATTEMPTS ; attempt ++ ) {
await delay ( CHECK _INTERVAL ) ;
const check = await ai _horde . getImageGenerationCheck ( generation . id ) ;
console . log ( check ) ;
if ( check . done ) {
const result = await ai _horde . getImageGenerationStatus ( generation . id ) ;
return response . send ( result . generations [ 0 ] . img ) ;
}
/ *
if ( ! check . is _possible ) {
return response . sendStatus ( 503 ) ;
}
* /
if ( check . faulted ) {
return response . sendStatus ( 500 ) ;
}
}
return response . sendStatus ( 504 ) ;
} catch ( error ) {
console . error ( error ) ;
return response . sendStatus ( 500 ) ;
}
} ) ;
app . post ( '/google_translate' , jsonParser , async ( request , response ) => {
const { generateRequestUrl , normaliseResponse } = require ( 'google-translate-api-browser' ) ;
const text = request . body . text ;
const lang = request . body . lang ;
if ( ! text || ! lang ) {
return response . sendStatus ( 400 ) ;
}
console . log ( 'Input text: ' + text ) ;
const url = generateRequestUrl ( text , { to : lang } ) ;
https . get ( url , ( resp ) => {
let data = '' ;
resp . on ( 'data' , ( chunk ) => {
data += chunk ;
} ) ;
resp . on ( 'end' , ( ) => {
const result = normaliseResponse ( JSON . parse ( data ) ) ;
console . log ( 'Translated text: ' + result . text ) ;
return response . send ( result . text ) ;
} ) ;
} ) . on ( "error" , ( err ) => {
console . log ( "Translation error: " + err . message ) ;
return response . sendStatus ( 500 ) ;
} ) ;
} ) ;
app . post ( '/deepl_translate' , jsonParser , async ( request , response ) => {
const key = readSecret ( SECRET _KEYS . DEEPL ) ;
if ( ! key ) {
return response . sendStatus ( 401 ) ;
}
const text = request . body . text ;
const lang = request . body . lang ;
if ( ! text || ! lang ) {
return response . sendStatus ( 400 ) ;
}
console . log ( 'Input text: ' + text ) ;
const fetch = require ( 'node-fetch' ) . default ;
const params = new URLSearchParams ( ) ;
params . append ( 'text' , text ) ;
params . append ( 'target_lang' , lang ) ;
try {
const result = await fetch ( 'https://api-free.deepl.com/v2/translate' , {
method : 'POST' ,
body : params ,
headers : {
'Accept' : 'application/json' ,
'Authorization' : ` DeepL-Auth-Key ${ key } ` ,
'Content-Type' : 'application/x-www-form-urlencoded' ,
} ,
timeout : 0 ,
} ) ;
if ( ! result . ok ) {
return response . sendStatus ( result . status ) ;
}
const json = await result . json ( ) ;
console . log ( 'Translated text: ' + json . translations [ 0 ] . text ) ;
return response . send ( json . translations [ 0 ] . text ) ;
} catch ( error ) {
console . log ( "Translation error: " + error . message ) ;
return response . sendStatus ( 500 ) ;
}
} ) ;
app . post ( '/novel_tts' , jsonParser , async ( request , response ) => {
const token = readSecret ( SECRET _KEYS . NOVEL ) ;
if ( ! token ) {
return response . sendStatus ( 401 ) ;
}
const text = request . body . text ;
const voice = request . body . voice ;
if ( ! text || ! voice ) {
return response . sendStatus ( 400 ) ;
}
try {
const fetch = require ( 'node-fetch' ) . default ;
const url = ` ${ api _novelai } /ai/generate-voice?text= ${ encodeURIComponent ( text ) } &voice=-1&seed= ${ encodeURIComponent ( voice ) } &opus=false&version=v2 ` ;
const result = await fetch ( url , {
method : 'GET' ,
headers : {
'Authorization' : ` Bearer ${ token } ` ,
'Accept' : 'audio/mpeg' ,
} ,
timeout : 0 ,
} ) ;
if ( ! result . ok ) {
return response . sendStatus ( result . status ) ;
}
const chunks = await readAllChunks ( result . body ) ;
const buffer = Buffer . concat ( chunks ) ;
response . setHeader ( 'Content-Type' , 'audio/mpeg' ) ;
return response . send ( buffer ) ;
}
catch ( error ) {
console . error ( error ) ;
return response . sendStatus ( 500 ) ;
}
} ) ;
app . post ( '/delete_sprite' , jsonParser , async ( request , response ) => {
const label = request . body . label ;
const name = request . body . name ;
if ( ! label || ! name ) {
return response . sendStatus ( 400 ) ;
}
try {
const spritesPath = path . join ( directories . characters , name ) ;
// No sprites folder exists, or not a directory
if ( ! fs . existsSync ( spritesPath ) || ! fs . statSync ( spritesPath ) . isDirectory ( ) ) {
return response . sendStatus ( 404 ) ;
}
const files = fs . readdirSync ( spritesPath ) ;
// Remove existing sprite with the same label
for ( const file of files ) {
if ( path . parse ( file ) . name === label ) {
fs . rmSync ( path . join ( spritesPath , file ) ) ;
}
}
return response . sendStatus ( 200 ) ;
} catch ( error ) {
console . error ( error ) ;
return response . sendStatus ( 500 ) ;
}
} ) ;
app . post ( '/upload_sprite_pack' , urlencodedParser , async ( request , response ) => {
const file = request . file ;
const name = request . body . name ;
if ( ! file || ! name ) {
return response . sendStatus ( 400 ) ;
}
try {
const spritesPath = path . join ( directories . characters , name ) ;
// Create sprites folder if it doesn't exist
if ( ! fs . existsSync ( spritesPath ) ) {
fs . mkdirSync ( spritesPath ) ;
}
// Path to sprites is not a directory. This should never happen.
if ( ! fs . statSync ( spritesPath ) . isDirectory ( ) ) {
return response . sendStatus ( 404 ) ;
}
const spritePackPath = path . join ( "./uploads/" , file . filename ) ;
const sprites = await getImageBuffers ( spritePackPath ) ;
const files = fs . readdirSync ( spritesPath ) ;
for ( const [ filename , buffer ] of sprites ) {
// Remove existing sprite with the same label
const existingFile = files . find ( file => path . parse ( file ) . name === path . parse ( filename ) . name ) ;
if ( existingFile ) {
fs . rmSync ( path . join ( spritesPath , existingFile ) ) ;
}
// Write sprite buffer to disk
const pathToSprite = path . join ( spritesPath , filename ) ;
fs . writeFileSync ( pathToSprite , buffer ) ;
}
// Remove uploaded ZIP file
fs . rmSync ( spritePackPath ) ;
return response . send ( { count : sprites . length } ) ;
} catch ( error ) {
console . error ( error ) ;
return response . sendStatus ( 500 ) ;
}
} ) ;
app . post ( '/upload_sprite' , urlencodedParser , async ( request , response ) => {
const file = request . file ;
const label = request . body . label ;
const name = request . body . name ;
if ( ! file || ! label || ! name ) {
return response . sendStatus ( 400 ) ;
}
try {
const spritesPath = path . join ( directories . characters , name ) ;
// Create sprites folder if it doesn't exist
if ( ! fs . existsSync ( spritesPath ) ) {
fs . mkdirSync ( spritesPath ) ;
}
// Path to sprites is not a directory. This should never happen.
if ( ! fs . statSync ( spritesPath ) . isDirectory ( ) ) {
return response . sendStatus ( 404 ) ;
}
const files = fs . readdirSync ( spritesPath ) ;
// Remove existing sprite with the same label
for ( const file of files ) {
if ( path . parse ( file ) . name === label ) {
fs . rmSync ( path . join ( spritesPath , file ) ) ;
}
}
const filename = label + path . parse ( file . originalname ) . ext ;
const spritePath = path . join ( "./uploads/" , file . filename ) ;
const pathToFile = path . join ( spritesPath , filename ) ;
// Copy uploaded file to sprites folder
fs . cpSync ( spritePath , pathToFile ) ;
// Remove uploaded file
fs . rmSync ( spritePath ) ;
return response . sendStatus ( 200 ) ;
} catch ( error ) {
console . error ( error ) ;
return response . sendStatus ( 500 ) ;
}
} ) ;
app . post ( '/import_custom' , jsonParser , async ( request , response ) => {
if ( ! request . body . url ) {
return response . sendStatus ( 400 ) ;
}
try {
const url = request . body . url ;
let result ;
const chubParsed = parseChubUrl ( url ) ;
if ( chubParsed ? . type === 'character' ) {
console . log ( 'Downloading chub character:' , chubParsed . id ) ;
result = await downloadChubCharacter ( chubParsed . id ) ;
}
else if ( chubParsed ? . type === 'lorebook' ) {
console . log ( 'Downloading chub lorebook:' , chubParsed . id ) ;
result = await downloadChubLorebook ( chubParsed . id ) ;
}
else {
return response . sendStatus ( 404 ) ;
}
response . set ( 'Content-Type' , result . fileType ) ;
response . set ( 'Content-Disposition' , ` attachment; filename=" ${ result . fileName } " ` ) ;
response . set ( 'X-Custom-Content-Type' , chubParsed ? . type ) ;
return response . send ( result . buffer ) ;
} catch ( error ) {
console . log ( 'Importing custom content failed' , error ) ;
return response . sendStatus ( 500 ) ;
}
} ) ;
async function downloadChubLorebook ( id ) {
const fetch = require ( 'node-fetch' ) . default ;
const result = await fetch ( 'https://api.chub.ai/api/lorebooks/download' , {
method : 'POST' ,
headers : { 'Content-Type' : 'application/json' } ,
body : JSON . stringify ( {
"fullPath" : id ,
"format" : "SILLYTAVERN" ,
} ) ,
} ) ;
if ( ! result . ok ) {
console . log ( await result . text ( ) ) ;
throw new Error ( 'Failed to download lorebook' ) ;
}
const name = id . split ( '/' ) . pop ( ) ;
const buffer = await result . buffer ( ) ;
const fileName = ` ${ sanitize ( name ) } .json ` ;
const fileType = result . headers . get ( 'content-type' ) ;
return { buffer , fileName , fileType } ;
}
async function downloadChubCharacter ( id ) {
const fetch = require ( 'node-fetch' ) . default ;
const result = await fetch ( 'https://api.chub.ai/api/characters/download' , {
method : 'POST' ,
headers : { 'Content-Type' : 'application/json' } ,
body : JSON . stringify ( {
"format" : "tavern" ,
"fullPath" : id ,
} )
} ) ;
if ( ! result . ok ) {
throw new Error ( 'Failed to download character' ) ;
}
const buffer = await result . buffer ( ) ;
const fileName = result . headers . get ( 'content-disposition' ) ? . split ( 'filename=' ) [ 1 ] || ` ${ sanitize ( id ) } .png ` ;
const fileType = result . headers . get ( 'content-type' ) ;
return { buffer , fileName , fileType } ;
}
function parseChubUrl ( str ) {
const splitStr = str . split ( '/' ) ;
const length = splitStr . length ;
if ( length < 2 ) {
return null ;
}
let domainIndex = - 1 ;
splitStr . forEach ( ( part , index ) => {
if ( part === 'www.chub.ai' || part === 'chub.ai' ) {
domainIndex = index ;
}
} )
const lastTwo = domainIndex !== - 1 ? splitStr . slice ( domainIndex + 1 ) : splitStr ;
const firstPart = lastTwo [ 0 ] . toLowerCase ( ) ;
if ( firstPart === 'characters' || firstPart === 'lorebooks' ) {
const type = firstPart === 'characters' ? 'character' : 'lorebook' ;
const id = type === 'character' ? lastTwo . slice ( 1 ) . join ( '/' ) : lastTwo . join ( '/' ) ;
return {
id : id ,
type : type
} ;
} else if ( length === 2 ) {
return {
id : lastTwo . join ( '/' ) ,
type : 'character'
} ;
}
return null ;
}
function importRisuSprites ( data ) {
try {
const name = data ? . data ? . name ;
const risuData = data ? . data ? . extensions ? . risuai ;
// Not a Risu AI character
if ( ! risuData || ! name ) {
return ;
}
let images = [ ] ;
if ( Array . isArray ( risuData . additionalAssets ) ) {
images = images . concat ( risuData . additionalAssets ) ;
}
if ( Array . isArray ( risuData . emotions ) ) {
images = images . concat ( risuData . emotions ) ;
}
// No sprites to import
if ( images . length === 0 ) {
return ;
}
// Create sprites folder if it doesn't exist
const spritesPath = path . join ( directories . characters , name ) ;
if ( ! fs . existsSync ( spritesPath ) ) {
fs . mkdirSync ( spritesPath ) ;
}
// Path to sprites is not a directory. This should never happen.
if ( ! fs . statSync ( spritesPath ) . isDirectory ( ) ) {
return ;
}
console . log ( ` RisuAI: Found ${ images . length } sprites for ${ name } . Writing to disk. ` ) ;
const files = fs . readdirSync ( spritesPath ) ;
outer : for ( const [ label , fileBase64 ] of images ) {
// Remove existing sprite with the same label
for ( const file of files ) {
if ( path . parse ( file ) . name === label ) {
console . log ( ` RisuAI: The sprite ${ label } for ${ name } already exists. Skipping. ` ) ;
continue outer ;
}
}
const filename = label + '.png' ;
const pathToFile = path . join ( spritesPath , filename ) ;
fs . writeFileSync ( pathToFile , fileBase64 , { encoding : 'base64' } ) ;
}
// Remove additionalAssets and emotions from data (they are now in the sprites folder)
delete data . data . extensions . risuai . additionalAssets ;
delete data . data . extensions . risuai . emotions ;
} catch ( error ) {
console . error ( error ) ;
}
}
function writeSecret ( key , value ) {
if ( ! fs . existsSync ( SECRETS _FILE ) ) {
const emptyFile = JSON . stringify ( { } ) ;
fs . writeFileSync ( SECRETS _FILE , emptyFile , "utf-8" ) ;
}
const fileContents = fs . readFileSync ( SECRETS _FILE ) ;
const secrets = JSON . parse ( fileContents ) ;
secrets [ key ] = value ;
fs . writeFileSync ( SECRETS _FILE , JSON . stringify ( secrets ) , "utf-8" ) ;
}
function readSecret ( key ) {
if ( ! fs . existsSync ( SECRETS _FILE ) ) {
return undefined ;
}
const fileContents = fs . readFileSync ( SECRETS _FILE ) ;
const secrets = JSON . parse ( fileContents ) ;
return secrets [ key ] ;
}
async function readAllChunks ( readableStream ) {
return new Promise ( ( resolve , reject ) => {
// Consume the readable stream
const chunks = [ ] ;
readableStream . on ( 'data' , ( chunk ) => {
chunks . push ( chunk ) ;
} ) ;
readableStream . on ( 'end' , ( ) => {
console . log ( 'Finished reading the stream.' ) ;
resolve ( chunks ) ;
} ) ;
readableStream . on ( 'error' , ( error ) => {
console . error ( 'Error while reading the stream:' , error ) ;
reject ( ) ;
} ) ;
} ) ;
}
async function getImageBuffers ( zipFilePath ) {
return new Promise ( ( resolve , reject ) => {
// Check if the zip file exists
if ( ! fs . existsSync ( zipFilePath ) ) {
reject ( new Error ( 'File not found' ) ) ;
return ;
}
const imageBuffers = [ ] ;
yauzl . open ( zipFilePath , { lazyEntries : true } , ( err , zipfile ) => {
if ( err ) {
reject ( err ) ;
} else {
zipfile . readEntry ( ) ;
zipfile . on ( 'entry' , ( entry ) => {
const mimeType = mime . lookup ( entry . fileName ) ;
if ( mimeType && mimeType . startsWith ( 'image/' ) && ! entry . fileName . startsWith ( '__MACOSX' ) ) {
console . log ( ` Extracting ${ entry . fileName } ` ) ;
zipfile . openReadStream ( entry , ( err , readStream ) => {
if ( err ) {
reject ( err ) ;
} else {
const chunks = [ ] ;
readStream . on ( 'data' , ( chunk ) => {
chunks . push ( chunk ) ;
} ) ;
readStream . on ( 'end' , ( ) => {
imageBuffers . push ( [ path . parse ( entry . fileName ) . base , Buffer . concat ( chunks ) ] ) ;
zipfile . readEntry ( ) ; // Continue to the next entry
} ) ;
}
} ) ;
} else {
zipfile . readEntry ( ) ; // Continue to the next entry
}
} ) ;
zipfile . on ( 'end' , ( ) => {
resolve ( imageBuffers ) ;
} ) ;
zipfile . on ( 'error' , ( err ) => {
reject ( err ) ;
} ) ;
}
} ) ;
} ) ;
}
/ * *
* This function extracts the extension information from the manifest file .
* @ param { string } extensionPath - The path of the extension folder
* @ returns { Object } - Returns the manifest data as an object
* /
async function getManifest ( extensionPath ) {
const manifestPath = path . join ( extensionPath , 'manifest.json' ) ;
// Check if manifest.json exists
if ( ! fs . existsSync ( manifestPath ) ) {
throw new Error ( ` Manifest file not found at ${ manifestPath } ` ) ;
}
const manifest = JSON . parse ( fs . readFileSync ( manifestPath , 'utf8' ) ) ;
return manifest ;
}
async function checkIfRepoIsUpToDate ( extensionPath ) {
const git = simpleGit ( ) ;
await git . cwd ( extensionPath ) . fetch ( 'origin' ) ;
const currentBranch = await git . cwd ( extensionPath ) . branch ( ) ;
const currentCommitHash = await git . cwd ( extensionPath ) . revparse ( [ 'HEAD' ] ) ;
const log = await git . cwd ( extensionPath ) . log ( {
from : currentCommitHash ,
to : ` origin/ ${ currentBranch . current } ` ,
} ) ;
// Fetch remote repository information
const remotes = await git . cwd ( extensionPath ) . getRemotes ( true ) ;
return {
isUpToDate : log . total === 0 ,
remoteUrl : remotes [ 0 ] . refs . fetch , // URL of the remote repository
} ;
}
/ * *
* HTTP POST handler function to clone a git repository from a provided URL , read the extension manifest ,
* and return extension information and path .
*
* @ param { Object } request - HTTP Request object , expects a JSON body with a 'url' property .
* @ param { Object } response - HTTP Response object used to respond to the HTTP request .
*
* @ returns { void }
* /
app . post ( '/get_extension' , jsonParser , async ( request , response ) => {
const git = simpleGit ( ) ;
if ( ! request . body . url ) {
return response . status ( 400 ) . send ( 'Bad Request: URL is required in the request body.' ) ;
}
try {
// make sure the third-party directory exists
if ( ! fs . existsSync ( directories . extensions + '/third-party' ) ) {
fs . mkdirSync ( directories . extensions + '/third-party' ) ;
}
const url = request . body . url ;
const extensionPath = path . join ( directories . extensions , 'third-party' , path . basename ( url , '.git' ) ) ;
if ( fs . existsSync ( extensionPath ) ) {
return response . status ( 409 ) . send ( ` Directory already exists at ${ extensionPath } ` ) ;
}
await git . clone ( url , extensionPath ) ;
console . log ( ` Extension has been cloned at ${ extensionPath } ` ) ;
const { version , author , display _name } = await getManifest ( extensionPath ) ;
return response . send ( { version , author , display _name , extensionPath } ) ;
} catch ( error ) {
console . log ( 'Importing custom content failed' , error ) ;
return response . status ( 500 ) . send ( ` Server Error: ${ error . message } ` ) ;
}
} ) ;
/ * *
* HTTP POST handler function to pull the latest updates from a git repository
* based on the extension name provided in the request body . It returns the latest commit hash ,
* the path of the extension , the status of the repository ( whether it ' s up - to - date or not ) ,
* and the remote URL of the repository .
*
* @ param { Object } request - HTTP Request object , expects a JSON body with an 'extensionName' property .
* @ param { Object } response - HTTP Response object used to respond to the HTTP request .
*
* @ returns { void }
* /
app . post ( '/update_extension' , jsonParser , async ( request , response ) => {
const git = simpleGit ( ) ;
if ( ! request . body . extensionName ) {
return response . status ( 400 ) . send ( 'Bad Request: extensionName is required in the request body.' ) ;
}
try {
const extensionName = request . body . extensionName ;
const extensionPath = path . join ( directories . extensions , 'third-party' , extensionName ) ;
if ( ! fs . existsSync ( extensionPath ) ) {
return response . status ( 404 ) . send ( ` Directory does not exist at ${ extensionPath } ` ) ;
}
const { isUpToDate , remoteUrl } = await checkIfRepoIsUpToDate ( extensionPath ) ;
const currentBranch = await git . cwd ( extensionPath ) . branch ( ) ;
if ( ! isUpToDate ) {
await git . cwd ( extensionPath ) . pull ( 'origin' , currentBranch . current ) ;
console . log ( ` Extension has been updated at ${ extensionPath } ` ) ;
} else {
console . log ( ` Extension is up to date at ${ extensionPath } ` ) ;
}
await git . cwd ( extensionPath ) . fetch ( 'origin' ) ;
const fullCommitHash = await git . cwd ( extensionPath ) . revparse ( [ 'HEAD' ] ) ;
const shortCommitHash = fullCommitHash . slice ( 0 , 7 ) ;
return response . send ( { shortCommitHash , extensionPath , isUpToDate , remoteUrl } ) ;
} catch ( error ) {
console . log ( 'Updating custom content failed' , error ) ;
return response . status ( 500 ) . send ( ` Server Error: ${ error . message } ` ) ;
}
} ) ;
/ * *
* HTTP POST handler function to get the current git commit hash and branch name for a given extension .
* It checks whether the repository is up - to - date with the remote , and returns the status along with
* the remote URL of the repository .
*
* @ param { Object } request - HTTP Request object , expects a JSON body with an 'extensionName' property .
* @ param { Object } response - HTTP Response object used to respond to the HTTP request .
*
* @ returns { void }
* /
app . post ( '/get_extension_version' , jsonParser , async ( request , response ) => {
const git = simpleGit ( ) ;
if ( ! request . body . extensionName ) {
return response . status ( 400 ) . send ( 'Bad Request: extensionName is required in the request body.' ) ;
}
try {
const extensionName = request . body . extensionName ;
const extensionPath = path . join ( directories . extensions , 'third-party' , extensionName ) ;
if ( ! fs . existsSync ( extensionPath ) ) {
return response . status ( 404 ) . send ( ` Directory does not exist at ${ extensionPath } ` ) ;
}
const currentBranch = await git . cwd ( extensionPath ) . branch ( ) ;
// get only the working branch
const currentBranchName = currentBranch . current ;
await git . cwd ( extensionPath ) . fetch ( 'origin' ) ;
const currentCommitHash = await git . cwd ( extensionPath ) . revparse ( [ 'HEAD' ] ) ;
console . log ( currentBranch , currentCommitHash ) ;
const { isUpToDate , remoteUrl } = await checkIfRepoIsUpToDate ( extensionPath ) ;
return response . send ( { currentBranchName , currentCommitHash , isUpToDate , remoteUrl } ) ;
} catch ( error ) {
console . log ( 'Getting extension version failed' , error ) ;
return response . status ( 500 ) . send ( ` Server Error: ${ error . message } ` ) ;
}
}
) ;
/ * *
* HTTP POST handler function to delete a git repository based on the extension name provided in the request body .
*
* @ param { Object } request - HTTP Request object , expects a JSON body with a 'url' property .
* @ param { Object } response - HTTP Response object used to respond to the HTTP request .
*
* @ returns { void }
* /
app . post ( '/delete_extension' , jsonParser , async ( request , response ) => {
if ( ! request . body . extensionName ) {
return response . status ( 400 ) . send ( 'Bad Request: extensionName is required in the request body.' ) ;
}
// Sanatize the extension name to prevent directory traversal
const extensionName = sanitize ( request . body . extensionName ) ;
try {
const extensionPath = path . join ( directories . extensions , 'third-party' , extensionName ) ;
if ( ! fs . existsSync ( extensionPath ) ) {
return response . status ( 404 ) . send ( ` Directory does not exist at ${ extensionPath } ` ) ;
}
await fs . promises . rmdir ( extensionPath , { recursive : true } ) ;
console . log ( ` Extension has been deleted at ${ extensionPath } ` ) ;
return response . send ( ` Extension has been deleted at ${ extensionPath } ` ) ;
} catch ( error ) {
console . log ( 'Deleting custom content failed' , error ) ;
return response . status ( 500 ) . send ( ` Server Error: ${ error . message } ` ) ;
}
} ) ;