Merge remote-tracking branch 'upstream/staging' into quad-sample
This commit is contained in:
commit
30213c8430
|
@ -53,6 +53,8 @@ extras:
|
|||
captioningModel: Xenova/vit-gpt2-image-captioning
|
||||
embeddingModel: Cohee/jina-embeddings-v2-base-en
|
||||
promptExpansionModel: Cohee/fooocus_expansion-onnx
|
||||
speechToTextModel: Xenova/whisper-small
|
||||
textToSpeechModel: Xenova/speecht5_tts
|
||||
# -- OPENAI CONFIGURATION --
|
||||
openai:
|
||||
# Will send a random user ID to OpenAI completion API
|
||||
|
|
|
@ -37,9 +37,10 @@
|
|||
"png-chunks-extract": "^1.0.0",
|
||||
"response-time": "^2.3.2",
|
||||
"sanitize-filename": "^1.6.3",
|
||||
"sillytavern-transformers": "^2.7.3",
|
||||
"sillytavern-transformers": "^2.14.6",
|
||||
"simple-git": "^3.19.1",
|
||||
"vectra": "^0.2.2",
|
||||
"wavefile": "^11.0.0",
|
||||
"write-file-atomic": "^5.0.1",
|
||||
"ws": "^8.13.0",
|
||||
"yaml": "^2.3.4",
|
||||
|
@ -232,6 +233,14 @@
|
|||
"node": "^12.22.0 || ^14.17.0 || >=16.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@huggingface/jinja": {
|
||||
"version": "0.1.2",
|
||||
"resolved": "https://registry.npmjs.org/@huggingface/jinja/-/jinja-0.1.2.tgz",
|
||||
"integrity": "sha512-x5mpbfJt1nKmVep5WNP5VjNsjWApWNj8pPYI+uYMkBWH9bWUJmQmHt2lbf0VCoQd54Oq3XuFEh/UyoVh7rPxmg==",
|
||||
"engines": {
|
||||
"node": ">=18"
|
||||
}
|
||||
},
|
||||
"node_modules/@humanwhocodes/config-array": {
|
||||
"version": "0.11.13",
|
||||
"resolved": "https://registry.npmjs.org/@humanwhocodes/config-array/-/config-array-0.11.13.tgz",
|
||||
|
@ -3670,20 +3679,6 @@
|
|||
"resolved": "https://registry.npmjs.org/onnxruntime-common/-/onnxruntime-common-1.14.0.tgz",
|
||||
"integrity": "sha512-3LJpegM2iMNRX2wUmtYfeX/ytfOzNwAWKSq1HbRrKc9+uqG/FsEA0bbKZl1btQeZaXhC26l44NWpNUeXPII7Ew=="
|
||||
},
|
||||
"node_modules/onnxruntime-node": {
|
||||
"version": "1.14.0",
|
||||
"resolved": "https://registry.npmjs.org/onnxruntime-node/-/onnxruntime-node-1.14.0.tgz",
|
||||
"integrity": "sha512-5ba7TWomIV/9b6NH/1x/8QEeowsb+jBEvFzU6z0T4mNsFwdPqXeFUM7uxC6QeSRkEbWu3qEB0VMjrvzN/0S9+w==",
|
||||
"optional": true,
|
||||
"os": [
|
||||
"win32",
|
||||
"darwin",
|
||||
"linux"
|
||||
],
|
||||
"dependencies": {
|
||||
"onnxruntime-common": "~1.14.0"
|
||||
}
|
||||
},
|
||||
"node_modules/onnxruntime-web": {
|
||||
"version": "1.14.0",
|
||||
"resolved": "https://registry.npmjs.org/onnxruntime-web/-/onnxruntime-web-1.14.0.tgz",
|
||||
|
@ -4681,15 +4676,13 @@
|
|||
}
|
||||
},
|
||||
"node_modules/sillytavern-transformers": {
|
||||
"version": "2.7.3",
|
||||
"resolved": "https://registry.npmjs.org/sillytavern-transformers/-/sillytavern-transformers-2.7.3.tgz",
|
||||
"integrity": "sha512-vr6BQdLlT3TbCLJdzLt5Sc/MzZ7LWoTzdkkQJgtvKwU3sX1TcnW0Oz23hl211sefWdxwkj/g0RZdvL18hk1Jew==",
|
||||
"version": "2.14.6",
|
||||
"resolved": "https://registry.npmjs.org/sillytavern-transformers/-/sillytavern-transformers-2.14.6.tgz",
|
||||
"integrity": "sha512-Tpu3lcDfa3vQB/wRgF+7ZG8ZNtYygT6vEQs9+4BpXLghVanx6ic7rBSxmTxx9Sm90G1P3W8mxoVkzfs8KAvMiA==",
|
||||
"dependencies": {
|
||||
"@huggingface/jinja": "^0.1.0",
|
||||
"jimp": "^0.22.10",
|
||||
"onnxruntime-web": "1.14.0"
|
||||
},
|
||||
"optionalDependencies": {
|
||||
"onnxruntime-node": "1.14.0"
|
||||
}
|
||||
},
|
||||
"node_modules/simple-concat": {
|
||||
|
@ -5152,6 +5145,17 @@
|
|||
"vectra": "bin/vectra.js"
|
||||
}
|
||||
},
|
||||
"node_modules/wavefile": {
|
||||
"version": "11.0.0",
|
||||
"resolved": "https://registry.npmjs.org/wavefile/-/wavefile-11.0.0.tgz",
|
||||
"integrity": "sha512-/OBiAALgWU24IG7sC84cDO/KfFuvajWc5Uec0oV2zrpOOZZDgGdOwHwgEzOrwh8jkubBk7PtZfQBIcI1OaE5Ng==",
|
||||
"bin": {
|
||||
"wavefile": "bin/wavefile.js"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=8"
|
||||
}
|
||||
},
|
||||
"node_modules/web-streams-polyfill": {
|
||||
"version": "3.2.1",
|
||||
"resolved": "https://registry.npmjs.org/web-streams-polyfill/-/web-streams-polyfill-3.2.1.tgz",
|
||||
|
|
|
@ -27,9 +27,10 @@
|
|||
"png-chunks-extract": "^1.0.0",
|
||||
"response-time": "^2.3.2",
|
||||
"sanitize-filename": "^1.6.3",
|
||||
"sillytavern-transformers": "^2.7.3",
|
||||
"sillytavern-transformers": "^2.14.6",
|
||||
"simple-git": "^3.19.1",
|
||||
"vectra": "^0.2.2",
|
||||
"wavefile": "^11.0.0",
|
||||
"write-file-atomic": "^5.0.1",
|
||||
"ws": "^8.13.0",
|
||||
"yaml": "^2.3.4",
|
||||
|
|
|
@ -3670,9 +3670,13 @@
|
|||
<div class="flex-container marginBot10 alignitemscenter">
|
||||
<div id="create_dummy_persona" class="menu_button menu_button_icon" title="Create a dummy persona" data-i18n="[title]Create a dummy persona">
|
||||
<i class="fa-solid fa-person-circle-question fa-fw"></i>
|
||||
<span data-i18n="Create">Create</span>
|
||||
<div data-i18n="Create">Create</div>
|
||||
</div>
|
||||
<input id="persona_search_bar" class="text_pole width100p flex1 margin0" type="search" data-i18n="[placeholder]Search..." placeholder="Search..." maxlength="100">
|
||||
<select id="persona_sort_order" class="margin0">
|
||||
<option value="asc">A-Z</option>
|
||||
<option value="desc">Z-A</option>
|
||||
</select>
|
||||
<div id="persona_pagination_container" class="flex1"></div>
|
||||
<i id="persona_grid_toggle" class="fa-solid fa-table-cells-large menu_button" title="Toggle grid view"></i>
|
||||
</div>
|
||||
|
|
|
@ -5416,6 +5416,7 @@ export async function getUserAvatars(doRender = true, openPageAt = '') {
|
|||
}
|
||||
|
||||
const entities = personasFilter.applyFilters(allEntities);
|
||||
entities.sort((a, b) => power_user.persona_sort_order === 'asc' ? a.localeCompare(b) : b.localeCompare(a));
|
||||
|
||||
const storageKey = 'Personas_PerPage';
|
||||
const listId = '#user_avatar_block';
|
||||
|
@ -5482,7 +5483,7 @@ function getUserAvatarBlock(name) {
|
|||
template.find('.ch_name').text(personaName || '[Unnamed Persona]');
|
||||
template.find('.ch_description').text(personaDescription || '[No description]').toggleClass('text_muted', !personaDescription);
|
||||
template.attr('imgfile', name);
|
||||
template.find('.avatar').attr('imgfile', name);
|
||||
template.find('.avatar').attr('imgfile', name).attr('title', name);
|
||||
template.toggleClass('default_persona', name === power_user.default_persona);
|
||||
template.find('img').attr('src', getUserAvatar(name));
|
||||
$('#user_avatar_block').append(template);
|
||||
|
|
|
@ -139,7 +139,7 @@ const languageCodes = {
|
|||
};
|
||||
|
||||
const KEY_REQUIRED = ['deepl', 'libre'];
|
||||
const LOCAL_URL = ['libre', 'oneringtranslator', 'deeplx'];
|
||||
const LOCAL_URL = ['libre', 'oneringtranslator', 'deeplx', 'lingva'];
|
||||
|
||||
function showKeysButton() {
|
||||
const providerRequiresKey = KEY_REQUIRED.includes(extension_settings.translate.provider);
|
||||
|
@ -249,6 +249,27 @@ async function translateProviderGoogle(text, lang) {
|
|||
throw new Error(response.statusText);
|
||||
}
|
||||
|
||||
/**
|
||||
* Translates text using an instance of the Lingva Translate
|
||||
* @param {string} text Text to translate
|
||||
* @param {string} lang Target language code
|
||||
* @returns {Promise<string>} Translated text
|
||||
*/
|
||||
async function translateProviderLingva(text, lang) {
|
||||
const response = await fetch('/api/translate/lingva', {
|
||||
method: 'POST',
|
||||
headers: getRequestHeaders(),
|
||||
body: JSON.stringify({ text: text, lang: lang }),
|
||||
});
|
||||
|
||||
if (response.ok) {
|
||||
const result = await response.text();
|
||||
return result;
|
||||
}
|
||||
|
||||
throw new Error(response.statusText);
|
||||
}
|
||||
|
||||
/**
|
||||
* Translates text using the DeepL API
|
||||
* @param {string} text Text to translate
|
||||
|
@ -355,6 +376,8 @@ async function translate(text, lang) {
|
|||
return await translateProviderLibre(text, lang);
|
||||
case 'google':
|
||||
return await chunkedTranslate(text, lang, translateProviderGoogle, 5000);
|
||||
case 'lingva':
|
||||
return await chunkedTranslate(text, lang, translateProviderLingva, 5000);
|
||||
case 'deepl':
|
||||
return await translateProviderDeepl(text, lang);
|
||||
case 'deeplx':
|
||||
|
@ -507,6 +530,7 @@ jQuery(() => {
|
|||
<select id="translation_provider" name="provider" class="margin0">
|
||||
<option value="libre">Libre</option>
|
||||
<option value="google">Google</option>
|
||||
<option value="lingva">Lingva</option>
|
||||
<option value="deepl">DeepL</option>
|
||||
<option value="deeplx">DeepLX</option>
|
||||
<option value="bing">Bing</option>
|
||||
|
@ -569,6 +593,7 @@ jQuery(() => {
|
|||
const optionText = $('#translation_provider option:selected').text();
|
||||
const exampleURLs = {
|
||||
'libre': 'http://127.0.0.1:5000/translate',
|
||||
'lingva': 'https://lingva.ml/api/v1',
|
||||
'oneringtranslator': 'http://127.0.0.1:4990/translate',
|
||||
'deeplx': 'http://127.0.0.1:1188/translate',
|
||||
};
|
||||
|
|
|
@ -640,6 +640,11 @@ export function initPersonas() {
|
|||
$('#personas_backup').on('click', onBackupPersonas);
|
||||
$('#personas_restore').on('click', () => $('#personas_restore_input').trigger('click'));
|
||||
$('#personas_restore_input').on('change', onPersonasRestoreInput);
|
||||
$('#persona_sort_order').val(power_user.persona_sort_order).on('input', function () {
|
||||
power_user.persona_sort_order = String($(this).val());
|
||||
getUserAvatars(true, user_avatar);
|
||||
saveSettingsDebounced();
|
||||
});
|
||||
$('#persona_grid_toggle').on('click', () => {
|
||||
const state = localStorage.getItem(GRID_STORAGE_KEY) === 'true';
|
||||
localStorage.setItem(GRID_STORAGE_KEY, String(!state));
|
||||
|
|
|
@ -225,6 +225,7 @@ let power_user = {
|
|||
persona_description: '',
|
||||
persona_description_position: persona_description_positions.IN_PROMPT,
|
||||
persona_show_notifications: true,
|
||||
persona_sort_order: 'asc',
|
||||
|
||||
custom_stopping_strings: '',
|
||||
custom_stopping_strings_macro: true,
|
||||
|
@ -501,6 +502,11 @@ async function switchLabMode() {
|
|||
$('#labModeWarning').removeClass('displayNone');
|
||||
//$("#advanced-ai-config-block input[type='range']").hide()
|
||||
|
||||
$('#amount_gen').attr('min', '1')
|
||||
.attr('max', '99999')
|
||||
.attr('step', '1');
|
||||
|
||||
|
||||
} else {
|
||||
//re apply the original sliders values to each input
|
||||
originalSliderValues.forEach(function (slider) {
|
||||
|
@ -512,6 +518,10 @@ async function switchLabMode() {
|
|||
});
|
||||
$('#advanced-ai-config-block input[type=\'range\']').show();
|
||||
$('#labModeWarning').addClass('displayNone');
|
||||
|
||||
$('#amount_gen').attr('min', '16')
|
||||
.attr('max', '2048')
|
||||
.attr('step', '1');
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -35,7 +35,7 @@ export const SENTENCEPIECE_TOKENIZERS = [
|
|||
//tokenizers.NERD2,
|
||||
];
|
||||
|
||||
export const TEXTGEN_TOKENIZERS = [OOBA, TABBY, KOBOLDCPP, LLAMACPP];
|
||||
export const TEXTGEN_TOKENIZERS = [OOBA, TABBY, KOBOLDCPP, LLAMACPP, APHRODITE];
|
||||
|
||||
const TOKENIZER_URLS = {
|
||||
[tokenizers.GPT2]: {
|
||||
|
|
|
@ -1525,6 +1525,10 @@ select option:not(:checked) {
|
|||
min-width: 80px;
|
||||
}
|
||||
|
||||
#persona_sort_order {
|
||||
max-width: 4em;
|
||||
}
|
||||
|
||||
input[type=search]::-webkit-search-cancel-button {
|
||||
-webkit-appearance: none;
|
||||
height: 1em;
|
||||
|
@ -1837,6 +1841,10 @@ input[type=search]:focus::-webkit-search-cancel-button {
|
|||
border: 2px solid transparent;
|
||||
}
|
||||
|
||||
.avatar-container .character_select_container {
|
||||
flex-grow: 1;
|
||||
}
|
||||
|
||||
grammarly-extension {
|
||||
z-index: 35;
|
||||
}
|
||||
|
@ -2284,11 +2292,10 @@ input[type="checkbox"]:not(#nav-toggle):not(#rm_button_panel_pin):not(#lm_button
|
|||
|
||||
.avatar-container .avatar {
|
||||
cursor: pointer;
|
||||
width: 64px;
|
||||
height: 64px;
|
||||
border-radius: 50%;
|
||||
align-self: center;
|
||||
outline: 2px solid transparent;
|
||||
flex: unset;
|
||||
}
|
||||
|
||||
.avatar-container.selected {
|
||||
|
@ -2303,11 +2310,6 @@ input[type="checkbox"]:not(#nav-toggle):not(#rm_button_panel_pin):not(#lm_button
|
|||
color: var(--golden);
|
||||
}
|
||||
|
||||
#user_avatar_block .avatar img {
|
||||
width: 64px;
|
||||
height: 64px;
|
||||
}
|
||||
|
||||
#user_avatar_block .avatar_upload {
|
||||
cursor: pointer;
|
||||
width: 60px;
|
||||
|
|
|
@ -593,6 +593,9 @@ app.use('/api/backends/chat-completions', require('./src/endpoints/backends/chat
|
|||
// Scale (alt method)
|
||||
app.use('/api/backends/scale-alt', require('./src/endpoints/backends/scale-alt').router);
|
||||
|
||||
// Speech (text-to-speech and speech-to-text)
|
||||
app.use('/api/speech', require('./src/endpoints/speech').router);
|
||||
|
||||
const tavernUrl = new URL(
|
||||
(cliArguments.ssl ? 'https://' : 'http://') +
|
||||
(listen ? '0.0.0.0' : '127.0.0.1') +
|
||||
|
|
|
@ -300,6 +300,7 @@ function charaFormatData(data) {
|
|||
_.set(char, 'chat', data.ch_name + ' - ' + humanizedISO8601DateTime());
|
||||
_.set(char, 'talkativeness', data.talkativeness);
|
||||
_.set(char, 'fav', data.fav == 'true');
|
||||
_.set(char, 'tags', typeof data.tags == 'string' ? (data.tags.split(',').map(x => x.trim()).filter(x => x)) : data.tags || []);
|
||||
|
||||
// Spec V2 fields
|
||||
_.set(char, 'spec', 'chara_card_v2');
|
||||
|
|
|
@ -17,6 +17,7 @@ const SECRET_KEYS = {
|
|||
DEEPL: 'deepl',
|
||||
LIBRE: 'libre',
|
||||
LIBRE_URL: 'libre_url',
|
||||
LINGVA_URL: 'lingva_url',
|
||||
OPENROUTER: 'api_key_openrouter',
|
||||
SCALE: 'api_key_scale',
|
||||
AI21: 'api_key_ai21',
|
||||
|
|
|
@ -0,0 +1,82 @@
|
|||
const express = require('express');
|
||||
const { jsonParser } = require('../express-common');
|
||||
|
||||
const router = express.Router();
|
||||
|
||||
/**
|
||||
* Gets the audio data from a base64-encoded audio file.
|
||||
* @param {string} audio Base64-encoded audio
|
||||
* @returns {Float64Array} Audio data
|
||||
*/
|
||||
function getWaveFile(audio) {
|
||||
const wavefile = require('wavefile');
|
||||
const wav = new wavefile.WaveFile();
|
||||
wav.fromDataURI(audio);
|
||||
wav.toBitDepth('32f');
|
||||
wav.toSampleRate(16000);
|
||||
let audioData = wav.getSamples();
|
||||
if (Array.isArray(audioData)) {
|
||||
if (audioData.length > 1) {
|
||||
const SCALING_FACTOR = Math.sqrt(2);
|
||||
|
||||
// Merge channels (into first channel to save memory)
|
||||
for (let i = 0; i < audioData[0].length; ++i) {
|
||||
audioData[0][i] = SCALING_FACTOR * (audioData[0][i] + audioData[1][i]) / 2;
|
||||
}
|
||||
}
|
||||
|
||||
// Select first channel
|
||||
audioData = audioData[0];
|
||||
}
|
||||
|
||||
return audioData;
|
||||
}
|
||||
|
||||
router.post('/recognize', jsonParser, async (req, res) => {
|
||||
try {
|
||||
const TASK = 'automatic-speech-recognition';
|
||||
const { model, audio, lang } = req.body;
|
||||
const module = await import('../transformers.mjs');
|
||||
const pipe = await module.default.getPipeline(TASK, model);
|
||||
const wav = getWaveFile(audio);
|
||||
const start = performance.now();
|
||||
const result = await pipe(wav, { language: lang || null });
|
||||
const end = performance.now();
|
||||
console.log(`Execution duration: ${(end - start) / 1000} seconds`);
|
||||
console.log('Transcribed audio:', result.text);
|
||||
|
||||
return res.json({ text: result.text });
|
||||
} catch (error) {
|
||||
console.error(error);
|
||||
return res.sendStatus(500);
|
||||
}
|
||||
});
|
||||
|
||||
router.post('/synthesize', jsonParser, async (req, res) => {
|
||||
try {
|
||||
const wavefile = require('wavefile');
|
||||
const TASK = 'text-to-speech';
|
||||
const { text, model, speaker } = req.body;
|
||||
const module = await import('../transformers.mjs');
|
||||
const pipe = await module.default.getPipeline(TASK, model);
|
||||
const speaker_embeddings = speaker
|
||||
? new Float32Array(new Uint8Array(Buffer.from(speaker.split(',')[1], 'base64')).buffer)
|
||||
: null;
|
||||
const start = performance.now();
|
||||
const result = await pipe(text, { speaker_embeddings: speaker_embeddings });
|
||||
const end = performance.now();
|
||||
console.log(`Execution duration: ${(end - start) / 1000} seconds`);
|
||||
|
||||
const wav = new wavefile.WaveFile();
|
||||
wav.fromScratch(1, result.sampling_rate, '32f', result.audio);
|
||||
const buffer = wav.toBuffer();
|
||||
|
||||
res.set('Content-Type', 'audio/wav');
|
||||
return res.send(Buffer.from(buffer));
|
||||
} catch (error) {
|
||||
console.error(error);
|
||||
return res.sendStatus(500);
|
||||
}
|
||||
});
|
||||
|
||||
module.exports = { router };
|
|
@ -628,6 +628,10 @@ router.post('/remote/textgenerationwebui/encode', jsonParser, async function (re
|
|||
url += '/tokenize';
|
||||
args.body = JSON.stringify({ 'content': text });
|
||||
break;
|
||||
case TEXTGEN_TYPES.APHRODITE:
|
||||
url += '/v1/tokenize';
|
||||
args.body = JSON.stringify({ 'prompt': text });
|
||||
break;
|
||||
default:
|
||||
url += '/v1/internal/encode';
|
||||
args.body = JSON.stringify({ 'text': text });
|
||||
|
|
|
@ -102,6 +102,52 @@ router.post('/google', jsonParser, async (request, response) => {
|
|||
}
|
||||
});
|
||||
|
||||
router.post('/lingva', jsonParser, async (request, response) => {
|
||||
try {
|
||||
const baseUrl = readSecret(SECRET_KEYS.LINGVA_URL);
|
||||
|
||||
if (!baseUrl) {
|
||||
console.log('Lingva URL is not configured.');
|
||||
return response.sendStatus(400);
|
||||
}
|
||||
|
||||
const text = request.body.text;
|
||||
const lang = request.body.lang;
|
||||
|
||||
if (!text || !lang) {
|
||||
return response.sendStatus(400);
|
||||
}
|
||||
|
||||
console.log('Input text: ' + text);
|
||||
const url = `${baseUrl}/auto/${lang}/${encodeURIComponent(text)}`;
|
||||
|
||||
https.get(url, (resp) => {
|
||||
let data = '';
|
||||
|
||||
resp.on('data', (chunk) => {
|
||||
data += chunk;
|
||||
});
|
||||
|
||||
resp.on('end', () => {
|
||||
try {
|
||||
const result = JSON.parse(data);
|
||||
console.log('Translated text: ' + result.translation);
|
||||
return response.send(result.translation);
|
||||
} catch (error) {
|
||||
console.log('Translation error', error);
|
||||
return response.sendStatus(500);
|
||||
}
|
||||
});
|
||||
}).on('error', (err) => {
|
||||
console.log('Translation error: ' + err.message);
|
||||
return response.sendStatus(500);
|
||||
});
|
||||
} catch (error) {
|
||||
console.log('Translation error', error);
|
||||
return response.sendStatus(500);
|
||||
}
|
||||
});
|
||||
|
||||
router.post('/deepl', jsonParser, async (request, response) => {
|
||||
const key = readSecret(SECRET_KEYS.DEEPL);
|
||||
|
||||
|
|
|
@ -35,19 +35,31 @@ async function getVector(source, sourceSettings, text) {
|
|||
* @returns {Promise<number[][]>} - The array of vectors for the texts
|
||||
*/
|
||||
async function getBatchVector(source, sourceSettings, texts) {
|
||||
switch (source) {
|
||||
case 'mistral':
|
||||
case 'openai':
|
||||
return require('../openai-vectors').getOpenAIBatchVector(texts, source);
|
||||
case 'transformers':
|
||||
return require('../embedding').getTransformersBatchVector(texts);
|
||||
case 'extras':
|
||||
return require('../extras-vectors').getExtrasBatchVector(texts, sourceSettings.extrasUrl, sourceSettings.extrasKey);
|
||||
case 'palm':
|
||||
return require('../makersuite-vectors').getMakerSuiteBatchVector(texts);
|
||||
const batchSize = 10;
|
||||
const batches = Array(Math.ceil(texts.length / batchSize)).fill(undefined).map((_, i) => texts.slice(i * batchSize, i * batchSize + batchSize));
|
||||
|
||||
let results = [];
|
||||
for (let batch of batches) {
|
||||
switch (source) {
|
||||
case 'mistral':
|
||||
case 'openai':
|
||||
results.push(...await require('../openai-vectors').getOpenAIBatchVector(batch, source));
|
||||
break;
|
||||
case 'transformers':
|
||||
results.push(...await require('../embedding').getTransformersBatchVector(batch));
|
||||
break;
|
||||
case 'extras':
|
||||
results.push(...await require('../extras-vectors').getExtrasBatchVector(batch, sourceSettings.extrasUrl, sourceSettings.extrasKey));
|
||||
break;
|
||||
case 'palm':
|
||||
results.push(...await require('../makersuite-vectors').getMakerSuiteBatchVector(batch));
|
||||
break;
|
||||
default:
|
||||
throw new Error(`Unknown vector source ${source}`);
|
||||
}
|
||||
}
|
||||
|
||||
throw new Error(`Unknown vector source ${source}`);
|
||||
return results;
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -17,21 +17,37 @@ const tasks = {
|
|||
defaultModel: 'Cohee/distilbert-base-uncased-go-emotions-onnx',
|
||||
pipeline: null,
|
||||
configField: 'extras.classificationModel',
|
||||
quantized: true,
|
||||
},
|
||||
'image-to-text': {
|
||||
defaultModel: 'Xenova/vit-gpt2-image-captioning',
|
||||
pipeline: null,
|
||||
configField: 'extras.captioningModel',
|
||||
quantized: true,
|
||||
},
|
||||
'feature-extraction': {
|
||||
defaultModel: 'Xenova/all-mpnet-base-v2',
|
||||
pipeline: null,
|
||||
configField: 'extras.embeddingModel',
|
||||
quantized: true,
|
||||
},
|
||||
'text-generation': {
|
||||
defaultModel: 'Cohee/fooocus_expansion-onnx',
|
||||
pipeline: null,
|
||||
configField: 'extras.promptExpansionModel',
|
||||
quantized: true,
|
||||
},
|
||||
'automatic-speech-recognition': {
|
||||
defaultModel: 'Xenova/whisper-small',
|
||||
pipeline: null,
|
||||
configField: 'extras.speechToTextModel',
|
||||
quantized: true,
|
||||
},
|
||||
'text-to-speech': {
|
||||
defaultModel: 'Xenova/speecht5_tts',
|
||||
pipeline: null,
|
||||
configField: 'extras.textToSpeechModel',
|
||||
quantized: false,
|
||||
},
|
||||
}
|
||||
|
||||
|
@ -72,19 +88,20 @@ function getModelForTask(task) {
|
|||
|
||||
/**
|
||||
* Gets the transformers.js pipeline for a given task.
|
||||
* @param {string} task The task to get the pipeline for
|
||||
* @param {import('sillytavern-transformers').PipelineType} task The task to get the pipeline for
|
||||
* @param {string} forceModel The model to use for the pipeline, if any
|
||||
* @returns {Promise<Pipeline>} Pipeline for the task
|
||||
*/
|
||||
async function getPipeline(task) {
|
||||
async function getPipeline(task, forceModel = '') {
|
||||
if (tasks[task].pipeline) {
|
||||
return tasks[task].pipeline;
|
||||
}
|
||||
|
||||
const cache_dir = path.join(process.cwd(), 'cache');
|
||||
const model = getModelForTask(task);
|
||||
const model = forceModel || getModelForTask(task);
|
||||
const localOnly = getConfigValue('extras.disableAutoDownload', false);
|
||||
console.log('Initializing transformers.js pipeline for task', task, 'with model', model);
|
||||
const instance = await pipeline(task, model, { cache_dir, quantized: true, local_files_only: localOnly });
|
||||
const instance = await pipeline(task, model, { cache_dir, quantized: tasks[task].quantized ?? true, local_files_only: localOnly });
|
||||
tasks[task].pipeline = instance;
|
||||
return instance;
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue