2023-10-20 14:03:26 +02:00
|
|
|
import { pipeline, env, RawImage, Pipeline } from 'sillytavern-transformers';
|
2023-09-14 22:40:13 +02:00
|
|
|
import { getConfigValue } from './util.js';
|
2023-09-14 22:12:33 +02:00
|
|
|
import path from 'path';
|
2024-08-16 13:59:41 +02:00
|
|
|
import fs from 'fs';
|
2023-09-14 22:12:33 +02:00
|
|
|
|
|
|
|
configureTransformers();
|
|
|
|
|
|
|
|
function configureTransformers() {
|
|
|
|
// Limit the number of threads to 1 to avoid issues on Android
|
2024-07-17 17:02:27 +02:00
|
|
|
env.backends.onnx.wasm.numThreads = 1;
|
2023-09-14 22:12:33 +02:00
|
|
|
// Use WASM from a local folder to avoid CDN connections
|
|
|
|
env.backends.onnx.wasm.wasmPaths = path.join(process.cwd(), 'dist') + path.sep;
|
|
|
|
}
|
|
|
|
|
|
|
|
const tasks = {
|
|
|
|
'text-classification': {
|
|
|
|
defaultModel: 'Cohee/distilbert-base-uncased-go-emotions-onnx',
|
|
|
|
pipeline: null,
|
|
|
|
configField: 'extras.classificationModel',
|
2024-02-02 00:51:02 +01:00
|
|
|
quantized: true,
|
2023-09-14 22:12:33 +02:00
|
|
|
},
|
|
|
|
'image-to-text': {
|
|
|
|
defaultModel: 'Xenova/vit-gpt2-image-captioning',
|
|
|
|
pipeline: null,
|
|
|
|
configField: 'extras.captioningModel',
|
2024-02-02 00:51:02 +01:00
|
|
|
quantized: true,
|
2023-09-14 22:12:33 +02:00
|
|
|
},
|
|
|
|
'feature-extraction': {
|
|
|
|
defaultModel: 'Xenova/all-mpnet-base-v2',
|
|
|
|
pipeline: null,
|
|
|
|
configField: 'extras.embeddingModel',
|
2024-02-02 00:51:02 +01:00
|
|
|
quantized: true,
|
2023-09-14 22:12:33 +02:00
|
|
|
},
|
2023-10-20 14:03:26 +02:00
|
|
|
'text-generation': {
|
|
|
|
defaultModel: 'Cohee/fooocus_expansion-onnx',
|
|
|
|
pipeline: null,
|
|
|
|
configField: 'extras.promptExpansionModel',
|
2024-08-23 18:57:25 +02:00
|
|
|
quantized: false,
|
2023-10-20 14:03:26 +02:00
|
|
|
},
|
2024-02-01 23:36:40 +01:00
|
|
|
'automatic-speech-recognition': {
|
|
|
|
defaultModel: 'Xenova/whisper-small',
|
|
|
|
pipeline: null,
|
|
|
|
configField: 'extras.speechToTextModel',
|
2024-02-02 00:51:02 +01:00
|
|
|
quantized: true,
|
|
|
|
},
|
|
|
|
'text-to-speech': {
|
|
|
|
defaultModel: 'Xenova/speecht5_tts',
|
|
|
|
pipeline: null,
|
|
|
|
configField: 'extras.textToSpeechModel',
|
|
|
|
quantized: false,
|
2024-02-01 23:36:40 +01:00
|
|
|
},
|
2024-08-16 13:59:41 +02:00
|
|
|
};
|
2023-09-14 22:12:33 +02:00
|
|
|
|
2023-10-20 14:03:26 +02:00
|
|
|
/**
|
|
|
|
* Gets a RawImage object from a base64-encoded image.
|
|
|
|
* @param {string} image Base64-encoded image
|
|
|
|
* @returns {Promise<RawImage|null>} Object representing the image
|
|
|
|
*/
|
2023-09-14 22:12:33 +02:00
|
|
|
async function getRawImage(image) {
|
2023-09-22 22:04:26 +02:00
|
|
|
try {
|
|
|
|
const buffer = Buffer.from(image, 'base64');
|
|
|
|
const byteArray = new Uint8Array(buffer);
|
|
|
|
const blob = new Blob([byteArray]);
|
2023-09-14 22:12:33 +02:00
|
|
|
|
2023-09-22 22:04:26 +02:00
|
|
|
const rawImage = await RawImage.fromBlob(blob);
|
|
|
|
return rawImage;
|
|
|
|
} catch {
|
|
|
|
return null;
|
|
|
|
}
|
2023-09-14 22:12:33 +02:00
|
|
|
}
|
|
|
|
|
2023-10-20 14:03:26 +02:00
|
|
|
/**
|
|
|
|
* Gets the model to use for a given transformers.js task.
|
|
|
|
* @param {string} task The task to get the model for
|
|
|
|
* @returns {string} The model to use for the given task
|
|
|
|
*/
|
2023-09-14 22:12:33 +02:00
|
|
|
function getModelForTask(task) {
|
|
|
|
const defaultModel = tasks[task].defaultModel;
|
|
|
|
|
|
|
|
try {
|
2023-09-14 22:40:13 +02:00
|
|
|
const model = getConfigValue(tasks[task].configField, null);
|
2023-09-14 22:12:33 +02:00
|
|
|
return model || defaultModel;
|
|
|
|
} catch (error) {
|
2023-11-25 22:45:33 +01:00
|
|
|
console.warn('Failed to read config.yaml, using default classification model.');
|
2023-09-14 22:12:33 +02:00
|
|
|
return defaultModel;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2024-08-16 13:59:41 +02:00
|
|
|
async function migrateCacheToDataDir() {
|
|
|
|
const oldCacheDir = path.join(process.cwd(), 'cache');
|
|
|
|
const newCacheDir = path.join(global.DATA_ROOT, '_cache');
|
|
|
|
|
|
|
|
if (!fs.existsSync(newCacheDir)) {
|
|
|
|
fs.mkdirSync(newCacheDir, { recursive: true });
|
|
|
|
}
|
|
|
|
|
|
|
|
if (fs.existsSync(oldCacheDir) && fs.statSync(oldCacheDir).isDirectory()) {
|
|
|
|
const files = fs.readdirSync(oldCacheDir);
|
|
|
|
|
|
|
|
if (files.length === 0) {
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
console.log('Migrating model cache files to data directory. Please wait...');
|
|
|
|
|
|
|
|
for (const file of files) {
|
|
|
|
try {
|
|
|
|
const oldPath = path.join(oldCacheDir, file);
|
|
|
|
const newPath = path.join(newCacheDir, file);
|
|
|
|
fs.cpSync(oldPath, newPath, { recursive: true, force: true });
|
|
|
|
fs.rmSync(oldPath, { recursive: true, force: true });
|
|
|
|
} catch (error) {
|
|
|
|
console.warn('Failed to migrate cache file. The model will be re-downloaded.', error);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2023-10-20 14:03:26 +02:00
|
|
|
/**
|
|
|
|
* Gets the transformers.js pipeline for a given task.
|
2024-02-01 23:36:40 +01:00
|
|
|
* @param {import('sillytavern-transformers').PipelineType} task The task to get the pipeline for
|
|
|
|
* @param {string} forceModel The model to use for the pipeline, if any
|
2023-10-20 14:03:26 +02:00
|
|
|
* @returns {Promise<Pipeline>} Pipeline for the task
|
|
|
|
*/
|
2024-02-01 23:36:40 +01:00
|
|
|
async function getPipeline(task, forceModel = '') {
|
2024-08-16 13:59:41 +02:00
|
|
|
await migrateCacheToDataDir();
|
|
|
|
|
2023-09-14 22:12:33 +02:00
|
|
|
if (tasks[task].pipeline) {
|
2024-07-08 20:09:42 +02:00
|
|
|
if (forceModel === '' || tasks[task].currentModel === forceModel) {
|
|
|
|
return tasks[task].pipeline;
|
|
|
|
}
|
|
|
|
console.log('Disposing transformers.js pipeline for for task', task, 'with model', tasks[task].currentModel);
|
|
|
|
await tasks[task].pipeline.dispose();
|
2023-09-14 22:12:33 +02:00
|
|
|
}
|
|
|
|
|
2024-08-16 13:59:41 +02:00
|
|
|
const cacheDir = path.join(global.DATA_ROOT, '_cache');
|
2024-02-01 23:36:40 +01:00
|
|
|
const model = forceModel || getModelForTask(task);
|
2023-09-14 22:40:13 +02:00
|
|
|
const localOnly = getConfigValue('extras.disableAutoDownload', false);
|
2023-09-14 22:12:33 +02:00
|
|
|
console.log('Initializing transformers.js pipeline for task', task, 'with model', model);
|
2024-08-16 13:59:41 +02:00
|
|
|
const instance = await pipeline(task, model, { cache_dir: cacheDir, quantized: tasks[task].quantized ?? true, local_files_only: localOnly });
|
2023-09-14 22:12:33 +02:00
|
|
|
tasks[task].pipeline = instance;
|
2024-07-08 20:09:42 +02:00
|
|
|
tasks[task].currentModel = model;
|
2023-09-14 22:12:33 +02:00
|
|
|
return instance;
|
|
|
|
}
|
|
|
|
|
|
|
|
export default {
|
|
|
|
getPipeline,
|
|
|
|
getRawImage,
|
2024-07-17 14:48:59 +02:00
|
|
|
};
|