Move transformers.js model cache under the data root

This commit is contained in:
Cohee 2024-08-16 11:59:41 +00:00
parent 24a283c724
commit c2057da348
3 changed files with 46 additions and 14 deletions

5
index.d.ts vendored
View File

@ -9,6 +9,11 @@ declare global {
}; };
} }
} }
/**
* The root directory for user data.
*/
var DATA_ROOT: string;
} }
declare module 'express-session' { declare module 'express-session' {

View File

@ -1,6 +1,7 @@
import { pipeline, env, RawImage, Pipeline } from 'sillytavern-transformers'; import { pipeline, env, RawImage, Pipeline } from 'sillytavern-transformers';
import { getConfigValue } from './util.js'; import { getConfigValue } from './util.js';
import path from 'path'; import path from 'path';
import fs from 'fs';
configureTransformers(); configureTransformers();
@ -48,7 +49,7 @@ const tasks = {
configField: 'extras.textToSpeechModel', configField: 'extras.textToSpeechModel',
quantized: false, quantized: false,
}, },
} };
/** /**
* Gets a RawImage object from a base64-encoded image. * Gets a RawImage object from a base64-encoded image.
@ -85,6 +86,36 @@ function getModelForTask(task) {
} }
} }
async function migrateCacheToDataDir() {
const oldCacheDir = path.join(process.cwd(), 'cache');
const newCacheDir = path.join(global.DATA_ROOT, '_cache');
if (!fs.existsSync(newCacheDir)) {
fs.mkdirSync(newCacheDir, { recursive: true });
}
if (fs.existsSync(oldCacheDir) && fs.statSync(oldCacheDir).isDirectory()) {
const files = fs.readdirSync(oldCacheDir);
if (files.length === 0) {
return;
}
console.log('Migrating model cache files to data directory. Please wait...');
for (const file of files) {
try {
const oldPath = path.join(oldCacheDir, file);
const newPath = path.join(newCacheDir, file);
fs.cpSync(oldPath, newPath, { recursive: true, force: true });
fs.rmSync(oldPath, { recursive: true, force: true });
} catch (error) {
console.warn('Failed to migrate cache file. The model will be re-downloaded.', error);
}
}
}
}
/** /**
* Gets the transformers.js pipeline for a given task. * Gets the transformers.js pipeline for a given task.
* @param {import('sillytavern-transformers').PipelineType} task The task to get the pipeline for * @param {import('sillytavern-transformers').PipelineType} task The task to get the pipeline for
@ -92,6 +123,8 @@ function getModelForTask(task) {
* @returns {Promise<Pipeline>} Pipeline for the task * @returns {Promise<Pipeline>} Pipeline for the task
*/ */
async function getPipeline(task, forceModel = '') { async function getPipeline(task, forceModel = '') {
await migrateCacheToDataDir();
if (tasks[task].pipeline) { if (tasks[task].pipeline) {
if (forceModel === '' || tasks[task].currentModel === forceModel) { if (forceModel === '' || tasks[task].currentModel === forceModel) {
return tasks[task].pipeline; return tasks[task].pipeline;
@ -100,11 +133,11 @@ async function getPipeline(task, forceModel = '') {
await tasks[task].pipeline.dispose(); await tasks[task].pipeline.dispose();
} }
const cache_dir = path.join(process.cwd(), 'cache'); const cacheDir = path.join(global.DATA_ROOT, '_cache');
const model = forceModel || getModelForTask(task); const model = forceModel || getModelForTask(task);
const localOnly = getConfigValue('extras.disableAutoDownload', false); const localOnly = getConfigValue('extras.disableAutoDownload', false);
console.log('Initializing transformers.js pipeline for task', task, 'with model', model); console.log('Initializing transformers.js pipeline for task', task, 'with model', model);
const instance = await pipeline(task, model, { cache_dir, quantized: tasks[task].quantized ?? true, local_files_only: localOnly }); const instance = await pipeline(task, model, { cache_dir: cacheDir, quantized: tasks[task].quantized ?? true, local_files_only: localOnly });
tasks[task].pipeline = instance; tasks[task].pipeline = instance;
tasks[task].currentModel = model; tasks[task].currentModel = model;
return instance; return instance;

View File

@ -19,12 +19,6 @@ const AVATAR_PREFIX = 'avatar:';
const ENABLE_ACCOUNTS = getConfigValue('enableUserAccounts', false); const ENABLE_ACCOUNTS = getConfigValue('enableUserAccounts', false);
const ANON_CSRF_SECRET = crypto.randomBytes(64).toString('base64'); const ANON_CSRF_SECRET = crypto.randomBytes(64).toString('base64');
/**
* The root directory for user data.
* @type {string}
*/
let DATA_ROOT = './data';
/** /**
* Cache for user directories. * Cache for user directories.
* @type {Map<string, UserDirectoryList>} * @type {Map<string, UserDirectoryList>}
@ -138,7 +132,7 @@ async function migrateUserData() {
console.log(); console.log();
console.log(color.magenta('Preparing to migrate user data...')); console.log(color.magenta('Preparing to migrate user data...'));
console.log(`All public data will be moved to the ${DATA_ROOT} directory.`); console.log(`All public data will be moved to the ${global.DATA_ROOT} directory.`);
console.log('This process may take a while depending on the amount of data to move.'); console.log('This process may take a while depending on the amount of data to move.');
console.log(`Backups will be placed in the ${PUBLIC_DIRECTORIES.backups} directory.`); console.log(`Backups will be placed in the ${PUBLIC_DIRECTORIES.backups} directory.`);
console.log(`The process will start in ${TIMEOUT} seconds. Press Ctrl+C to cancel.`); console.log(`The process will start in ${TIMEOUT} seconds. Press Ctrl+C to cancel.`);
@ -352,11 +346,11 @@ function toAvatarKey(handle) {
* @returns {Promise<void>} * @returns {Promise<void>}
*/ */
async function initUserStorage(dataRoot) { async function initUserStorage(dataRoot) {
DATA_ROOT = dataRoot; global.DATA_ROOT = dataRoot;
console.log('Using data root:', color.green(DATA_ROOT)); console.log('Using data root:', color.green(global.DATA_ROOT));
console.log(); console.log();
await storage.init({ await storage.init({
dir: path.join(DATA_ROOT, '_storage'), dir: path.join(global.DATA_ROOT, '_storage'),
ttl: false, // Never expire ttl: false, // Never expire
}); });
@ -457,7 +451,7 @@ function getUserDirectories(handle) {
const directories = structuredClone(USER_DIRECTORY_TEMPLATE); const directories = structuredClone(USER_DIRECTORY_TEMPLATE);
for (const key in directories) { for (const key in directories) {
directories[key] = path.join(DATA_ROOT, handle, USER_DIRECTORY_TEMPLATE[key]); directories[key] = path.join(global.DATA_ROOT, handle, USER_DIRECTORY_TEMPLATE[key]);
} }
DIRECTORIES_CACHE.set(handle, directories); DIRECTORIES_CACHE.set(handle, directories);
return directories; return directories;