mirror of
https://github.com/SillyTavern/SillyTavern.git
synced 2025-06-05 21:59:27 +02:00
Merge branch 'neo-server' into parser-v2
This commit is contained in:
@@ -60,7 +60,8 @@ function convertConfig() {
|
|||||||
try {
|
try {
|
||||||
console.log(color.blue('Converting config.conf to config.yaml. Your old config.conf will be renamed to config.conf.bak'));
|
console.log(color.blue('Converting config.conf to config.yaml. Your old config.conf will be renamed to config.conf.bak'));
|
||||||
const config = require(path.join(process.cwd(), './config.conf'));
|
const config = require(path.join(process.cwd(), './config.conf'));
|
||||||
fs.renameSync('./config.conf', './config.conf.bak');
|
fs.copyFileSync('./config.conf', './config.conf.bak');
|
||||||
|
fs.rmSync('./config.conf');
|
||||||
fs.writeFileSync('./config.yaml', yaml.stringify(config));
|
fs.writeFileSync('./config.yaml', yaml.stringify(config));
|
||||||
console.log(color.green('Conversion successful. Please check your config.yaml and fix it if necessary.'));
|
console.log(color.green('Conversion successful. Please check your config.yaml and fix it if necessary.'));
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
|
@@ -841,7 +841,7 @@ class PromptManager {
|
|||||||
const promptReferences = this.getPromptOrderForCharacter(this.activeCharacter);
|
const promptReferences = this.getPromptOrderForCharacter(this.activeCharacter);
|
||||||
for (let i = promptReferences.length - 1; i >= 0; i--) {
|
for (let i = promptReferences.length - 1; i >= 0; i--) {
|
||||||
const reference = promptReferences[i];
|
const reference = promptReferences[i];
|
||||||
if (-1 === this.serviceSettings.prompts.findIndex(prompt => prompt.identifier === reference.identifier)) {
|
if (reference && -1 === this.serviceSettings.prompts.findIndex(prompt => prompt.identifier === reference.identifier)) {
|
||||||
promptReferences.splice(i, 1);
|
promptReferences.splice(i, 1);
|
||||||
this.log('Removed unused reference: ' + reference.identifier);
|
this.log('Removed unused reference: ' + reference.identifier);
|
||||||
}
|
}
|
||||||
|
@@ -19,6 +19,8 @@
|
|||||||
<li data-placeholder="scale" class="sd_comfy_workflow_editor_not_found">"%scale%"</li>
|
<li data-placeholder="scale" class="sd_comfy_workflow_editor_not_found">"%scale%"</li>
|
||||||
<li data-placeholder="width" class="sd_comfy_workflow_editor_not_found">"%width%"</li>
|
<li data-placeholder="width" class="sd_comfy_workflow_editor_not_found">"%width%"</li>
|
||||||
<li data-placeholder="height" class="sd_comfy_workflow_editor_not_found">"%height%"</li>
|
<li data-placeholder="height" class="sd_comfy_workflow_editor_not_found">"%height%"</li>
|
||||||
|
<li data-placeholder="user_avatar" class="sd_comfy_workflow_editor_not_found">"%user_avatar%"</li>
|
||||||
|
<li data-placeholder="char_avatar" class="sd_comfy_workflow_editor_not_found">"%char_avatar%"</li>
|
||||||
<li><hr></li>
|
<li><hr></li>
|
||||||
<li data-placeholder="seed" class="sd_comfy_workflow_editor_not_found">
|
<li data-placeholder="seed" class="sd_comfy_workflow_editor_not_found">
|
||||||
"%seed%"
|
"%seed%"
|
||||||
|
@@ -2111,21 +2111,11 @@ async function generateMultimodalPrompt(generationType, quietPrompt) {
|
|||||||
let avatarUrl;
|
let avatarUrl;
|
||||||
|
|
||||||
if (generationType == generationMode.USER_MULTIMODAL) {
|
if (generationType == generationMode.USER_MULTIMODAL) {
|
||||||
avatarUrl = getUserAvatar(user_avatar);
|
avatarUrl = getUserAvatarUrl();
|
||||||
}
|
}
|
||||||
|
|
||||||
if (generationType == generationMode.CHARACTER_MULTIMODAL || generationType === generationMode.FACE_MULTIMODAL) {
|
if (generationType == generationMode.CHARACTER_MULTIMODAL || generationType === generationMode.FACE_MULTIMODAL) {
|
||||||
const context = getContext();
|
avatarUrl = getCharacterAvatarUrl();
|
||||||
|
|
||||||
if (context.groupId) {
|
|
||||||
const groupMembers = context.groups.find(x => x.id === context.groupId)?.members;
|
|
||||||
const lastMessageAvatar = context.chat?.filter(x => !x.is_system && !x.is_user)?.slice(-1)[0]?.original_avatar;
|
|
||||||
const randomMemberAvatar = Array.isArray(groupMembers) ? groupMembers[Math.floor(Math.random() * groupMembers.length)]?.avatar : null;
|
|
||||||
const avatarToUse = lastMessageAvatar || randomMemberAvatar;
|
|
||||||
avatarUrl = formatCharacterAvatar(avatarToUse);
|
|
||||||
} else {
|
|
||||||
avatarUrl = getCharacterAvatar(context.characterId);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
try {
|
try {
|
||||||
@@ -2152,6 +2142,24 @@ async function generateMultimodalPrompt(generationType, quietPrompt) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
function getCharacterAvatarUrl() {
|
||||||
|
const context = getContext();
|
||||||
|
|
||||||
|
if (context.groupId) {
|
||||||
|
const groupMembers = context.groups.find(x => x.id === context.groupId)?.members;
|
||||||
|
const lastMessageAvatar = context.chat?.filter(x => !x.is_system && !x.is_user)?.slice(-1)[0]?.original_avatar;
|
||||||
|
const randomMemberAvatar = Array.isArray(groupMembers) ? groupMembers[Math.floor(Math.random() * groupMembers.length)]?.avatar : null;
|
||||||
|
const avatarToUse = lastMessageAvatar || randomMemberAvatar;
|
||||||
|
return formatCharacterAvatar(avatarToUse);
|
||||||
|
} else {
|
||||||
|
return getCharacterAvatar(context.characterId);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function getUserAvatarUrl() {
|
||||||
|
return getUserAvatar(user_avatar);
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Generates a prompt using the main LLM API.
|
* Generates a prompt using the main LLM API.
|
||||||
* @param {string} quietPrompt - The prompt to use for the image generation.
|
* @param {string} quietPrompt - The prompt to use for the image generation.
|
||||||
@@ -2636,6 +2644,22 @@ async function generateComfyImage(prompt, negativePrompt) {
|
|||||||
(extension_settings.sd.comfy_placeholders ?? []).forEach(ph => {
|
(extension_settings.sd.comfy_placeholders ?? []).forEach(ph => {
|
||||||
workflow = workflow.replace(`"%${ph.find}%"`, JSON.stringify(substituteParams(ph.replace)));
|
workflow = workflow.replace(`"%${ph.find}%"`, JSON.stringify(substituteParams(ph.replace)));
|
||||||
});
|
});
|
||||||
|
if (/%user_avatar%/gi.test(workflow)) {
|
||||||
|
const response = await fetch(getUserAvatarUrl());
|
||||||
|
if (response.ok) {
|
||||||
|
const avatarBlob = await response.blob();
|
||||||
|
const avatarBase64 = await getBase64Async(avatarBlob);
|
||||||
|
workflow = workflow.replace('"%user_avatar%"', JSON.stringify(avatarBase64));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (/%char_avatar%/gi.test(workflow)) {
|
||||||
|
const response = await fetch(getCharacterAvatarUrl());
|
||||||
|
if (response.ok) {
|
||||||
|
const avatarBlob = await response.blob();
|
||||||
|
const avatarBase64 = await getBase64Async(avatarBlob);
|
||||||
|
workflow = workflow.replace('"%char_avatar%"', JSON.stringify(avatarBase64));
|
||||||
|
}
|
||||||
|
}
|
||||||
console.log(`{
|
console.log(`{
|
||||||
"prompt": ${workflow}
|
"prompt": ${workflow}
|
||||||
}`);
|
}`);
|
||||||
@@ -2649,6 +2673,10 @@ async function generateComfyImage(prompt, negativePrompt) {
|
|||||||
}`,
|
}`,
|
||||||
}),
|
}),
|
||||||
});
|
});
|
||||||
|
if (!promptResult.ok) {
|
||||||
|
const text = await promptResult.text();
|
||||||
|
throw new Error(text);
|
||||||
|
}
|
||||||
return { format: 'png', data: await promptResult.text() };
|
return { format: 'png', data: await promptResult.text() };
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@@ -35,6 +35,7 @@ const settings = {
|
|||||||
include_wi: false,
|
include_wi: false,
|
||||||
togetherai_model: 'togethercomputer/m2-bert-80M-32k-retrieval',
|
togetherai_model: 'togethercomputer/m2-bert-80M-32k-retrieval',
|
||||||
openai_model: 'text-embedding-ada-002',
|
openai_model: 'text-embedding-ada-002',
|
||||||
|
cohere_model: 'embed-english-v3.0',
|
||||||
summarize: false,
|
summarize: false,
|
||||||
summarize_sent: false,
|
summarize_sent: false,
|
||||||
summary_source: 'main',
|
summary_source: 'main',
|
||||||
@@ -68,6 +69,15 @@ const settings = {
|
|||||||
|
|
||||||
const moduleWorker = new ModuleWorkerWrapper(synchronizeChat);
|
const moduleWorker = new ModuleWorkerWrapper(synchronizeChat);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Gets the Collection ID for a file embedded in the chat.
|
||||||
|
* @param {string} fileUrl URL of the file
|
||||||
|
* @returns {string} Collection ID
|
||||||
|
*/
|
||||||
|
function getFileCollectionId(fileUrl) {
|
||||||
|
return `file_${getStringHash(fileUrl)}`;
|
||||||
|
}
|
||||||
|
|
||||||
async function onVectorizeAllClick() {
|
async function onVectorizeAllClick() {
|
||||||
try {
|
try {
|
||||||
if (!settings.enabled_chats) {
|
if (!settings.enabled_chats) {
|
||||||
@@ -308,7 +318,7 @@ async function processFiles(chat) {
|
|||||||
const dataBankCollectionIds = [];
|
const dataBankCollectionIds = [];
|
||||||
|
|
||||||
for (const file of dataBank) {
|
for (const file of dataBank) {
|
||||||
const collectionId = `file_${getStringHash(file.url)}`;
|
const collectionId = getFileCollectionId(file.url);
|
||||||
const hashesInCollection = await getSavedHashes(collectionId);
|
const hashesInCollection = await getSavedHashes(collectionId);
|
||||||
dataBankCollectionIds.push(collectionId);
|
dataBankCollectionIds.push(collectionId);
|
||||||
|
|
||||||
@@ -354,7 +364,7 @@ async function processFiles(chat) {
|
|||||||
|
|
||||||
const fileName = message.extra.file.name;
|
const fileName = message.extra.file.name;
|
||||||
const fileUrl = message.extra.file.url;
|
const fileUrl = message.extra.file.url;
|
||||||
const collectionId = `file_${getStringHash(fileUrl)}`;
|
const collectionId = getFileCollectionId(fileUrl);
|
||||||
const hashesInCollection = await getSavedHashes(collectionId);
|
const hashesInCollection = await getSavedHashes(collectionId);
|
||||||
|
|
||||||
// File is already in the collection
|
// File is already in the collection
|
||||||
@@ -598,6 +608,9 @@ function getVectorHeaders() {
|
|||||||
case 'openai':
|
case 'openai':
|
||||||
addOpenAiHeaders(headers);
|
addOpenAiHeaders(headers);
|
||||||
break;
|
break;
|
||||||
|
case 'cohere':
|
||||||
|
addCohereHeaders(headers);
|
||||||
|
break;
|
||||||
default:
|
default:
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
@@ -636,6 +649,16 @@ function addOpenAiHeaders(headers) {
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Add headers for the Cohere API source.
|
||||||
|
* @param {object} headers Header object
|
||||||
|
*/
|
||||||
|
function addCohereHeaders(headers) {
|
||||||
|
Object.assign(headers, {
|
||||||
|
'X-Cohere-Model': extension_settings.vectors.cohere_model,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Inserts vector items into a collection
|
* Inserts vector items into a collection
|
||||||
* @param {string} collectionId - The collection to insert into
|
* @param {string} collectionId - The collection to insert into
|
||||||
@@ -647,7 +670,8 @@ async function insertVectorItems(collectionId, items) {
|
|||||||
settings.source === 'palm' && !secret_state[SECRET_KEYS.MAKERSUITE] ||
|
settings.source === 'palm' && !secret_state[SECRET_KEYS.MAKERSUITE] ||
|
||||||
settings.source === 'mistral' && !secret_state[SECRET_KEYS.MISTRALAI] ||
|
settings.source === 'mistral' && !secret_state[SECRET_KEYS.MISTRALAI] ||
|
||||||
settings.source === 'togetherai' && !secret_state[SECRET_KEYS.TOGETHERAI] ||
|
settings.source === 'togetherai' && !secret_state[SECRET_KEYS.TOGETHERAI] ||
|
||||||
settings.source === 'nomicai' && !secret_state[SECRET_KEYS.NOMICAI]) {
|
settings.source === 'nomicai' && !secret_state[SECRET_KEYS.NOMICAI] ||
|
||||||
|
settings.source === 'cohere' && !secret_state[SECRET_KEYS.COHERE]) {
|
||||||
throw new Error('Vectors: API key missing', { cause: 'api_key_missing' });
|
throw new Error('Vectors: API key missing', { cause: 'api_key_missing' });
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -760,7 +784,7 @@ async function purgeFileVectorIndex(fileUrl) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
console.log(`Vectors: Purging file vector index for ${fileUrl}`);
|
console.log(`Vectors: Purging file vector index for ${fileUrl}`);
|
||||||
const collectionId = `file_${getStringHash(fileUrl)}`;
|
const collectionId = getFileCollectionId(fileUrl);
|
||||||
|
|
||||||
const response = await fetch('/api/vector/purge', {
|
const response = await fetch('/api/vector/purge', {
|
||||||
method: 'POST',
|
method: 'POST',
|
||||||
@@ -816,6 +840,7 @@ function toggleSettings() {
|
|||||||
$('#vectors_chats_settings').toggle(!!settings.enabled_chats);
|
$('#vectors_chats_settings').toggle(!!settings.enabled_chats);
|
||||||
$('#together_vectorsModel').toggle(settings.source === 'togetherai');
|
$('#together_vectorsModel').toggle(settings.source === 'togetherai');
|
||||||
$('#openai_vectorsModel').toggle(settings.source === 'openai');
|
$('#openai_vectorsModel').toggle(settings.source === 'openai');
|
||||||
|
$('#cohere_vectorsModel').toggle(settings.source === 'cohere');
|
||||||
$('#nomicai_apiKey').toggle(settings.source === 'nomicai');
|
$('#nomicai_apiKey').toggle(settings.source === 'nomicai');
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -859,6 +884,42 @@ async function onViewStatsClick() {
|
|||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
async function onVectorizeAllFilesClick() {
|
||||||
|
try {
|
||||||
|
const dataBank = getDataBankAttachments();
|
||||||
|
const chatAttachments = getContext().chat.filter(x => x.extra?.file).map(x => x.extra.file);
|
||||||
|
const allFiles = [...dataBank, ...chatAttachments];
|
||||||
|
|
||||||
|
for (const file of allFiles) {
|
||||||
|
const text = await getFileAttachment(file.url);
|
||||||
|
const collectionId = getFileCollectionId(file.url);
|
||||||
|
await vectorizeFile(text, file.name, collectionId, settings.chunk_size);
|
||||||
|
}
|
||||||
|
|
||||||
|
toastr.success('All files vectorized', 'Vectorization successful');
|
||||||
|
} catch (error) {
|
||||||
|
console.error('Vectors: Failed to vectorize all files', error);
|
||||||
|
toastr.error('Failed to vectorize all files', 'Vectorization failed');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async function onPurgeFilesClick() {
|
||||||
|
try {
|
||||||
|
const dataBank = getDataBankAttachments();
|
||||||
|
const chatAttachments = getContext().chat.filter(x => x.extra?.file).map(x => x.extra.file);
|
||||||
|
const allFiles = [...dataBank, ...chatAttachments];
|
||||||
|
|
||||||
|
for (const file of allFiles) {
|
||||||
|
await purgeFileVectorIndex(file.url);
|
||||||
|
}
|
||||||
|
|
||||||
|
toastr.success('All files purged', 'Purge successful');
|
||||||
|
} catch (error) {
|
||||||
|
console.error('Vectors: Failed to purge all files', error);
|
||||||
|
toastr.error('Failed to purge all files', 'Purge failed');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
jQuery(async () => {
|
jQuery(async () => {
|
||||||
if (!extension_settings.vectors) {
|
if (!extension_settings.vectors) {
|
||||||
extension_settings.vectors = settings;
|
extension_settings.vectors = settings;
|
||||||
@@ -913,6 +974,12 @@ jQuery(async () => {
|
|||||||
Object.assign(extension_settings.vectors, settings);
|
Object.assign(extension_settings.vectors, settings);
|
||||||
saveSettingsDebounced();
|
saveSettingsDebounced();
|
||||||
});
|
});
|
||||||
|
$('#vectors_cohere_model').val(settings.cohere_model).on('change', () => {
|
||||||
|
$('#vectors_modelWarning').show();
|
||||||
|
settings.cohere_model = String($('#vectors_cohere_model').val());
|
||||||
|
Object.assign(extension_settings.vectors, settings);
|
||||||
|
saveSettingsDebounced();
|
||||||
|
});
|
||||||
$('#vectors_template').val(settings.template).on('input', () => {
|
$('#vectors_template').val(settings.template).on('input', () => {
|
||||||
settings.template = String($('#vectors_template').val());
|
settings.template = String($('#vectors_template').val());
|
||||||
Object.assign(extension_settings.vectors, settings);
|
Object.assign(extension_settings.vectors, settings);
|
||||||
@@ -947,6 +1014,8 @@ jQuery(async () => {
|
|||||||
$('#vectors_vectorize_all').on('click', onVectorizeAllClick);
|
$('#vectors_vectorize_all').on('click', onVectorizeAllClick);
|
||||||
$('#vectors_purge').on('click', onPurgeClick);
|
$('#vectors_purge').on('click', onPurgeClick);
|
||||||
$('#vectors_view_stats').on('click', onViewStatsClick);
|
$('#vectors_view_stats').on('click', onViewStatsClick);
|
||||||
|
$('#vectors_files_vectorize_all').on('click', onVectorizeAllFilesClick);
|
||||||
|
$('#vectors_files_purge').on('click', onPurgeFilesClick);
|
||||||
|
|
||||||
$('#vectors_size_threshold').val(settings.size_threshold).on('input', () => {
|
$('#vectors_size_threshold').val(settings.size_threshold).on('input', () => {
|
||||||
settings.size_threshold = Number($('#vectors_size_threshold').val());
|
settings.size_threshold = Number($('#vectors_size_threshold').val());
|
||||||
|
@@ -10,13 +10,14 @@
|
|||||||
Vectorization Source
|
Vectorization Source
|
||||||
</label>
|
</label>
|
||||||
<select id="vectors_source" class="text_pole">
|
<select id="vectors_source" class="text_pole">
|
||||||
<option value="transformers">Local (Transformers)</option>
|
<option value="cohere">Cohere</option>
|
||||||
<option value="extras">Extras</option>
|
<option value="extras">Extras</option>
|
||||||
<option value="openai">OpenAI</option>
|
|
||||||
<option value="palm">Google MakerSuite (PaLM)</option>
|
<option value="palm">Google MakerSuite (PaLM)</option>
|
||||||
|
<option value="transformers">Local (Transformers)</option>
|
||||||
<option value="mistral">MistralAI</option>
|
<option value="mistral">MistralAI</option>
|
||||||
<option value="togetherai">TogetherAI</option>
|
|
||||||
<option value="nomicai">NomicAI</option>
|
<option value="nomicai">NomicAI</option>
|
||||||
|
<option value="openai">OpenAI</option>
|
||||||
|
<option value="togetherai">TogetherAI</option>
|
||||||
</select>
|
</select>
|
||||||
</div>
|
</div>
|
||||||
<div class="flex-container flexFlowColumn" id="openai_vectorsModel">
|
<div class="flex-container flexFlowColumn" id="openai_vectorsModel">
|
||||||
@@ -29,6 +30,20 @@
|
|||||||
<option value="text-embedding-3-large">text-embedding-3-large</option>
|
<option value="text-embedding-3-large">text-embedding-3-large</option>
|
||||||
</select>
|
</select>
|
||||||
</div>
|
</div>
|
||||||
|
<div class="flex-container flexFlowColumn" id="cohere_vectorsModel">
|
||||||
|
<label for="vectors_cohere_model">
|
||||||
|
Vectorization Model
|
||||||
|
</label>
|
||||||
|
<select id="vectors_cohere_model" class="text_pole">
|
||||||
|
<option value="embed-english-v3.0">embed-english-v3.0</option>
|
||||||
|
<option value="embed-multilingual-v3.0">embed-multilingual-v3.0</option>
|
||||||
|
<option value="embed-english-light-v3.0">embed-english-light-v3.0</option>
|
||||||
|
<option value="embed-multilingual-light-v3.0">embed-multilingual-light-v3.0</option>
|
||||||
|
<option value="embed-english-v2.0">embed-english-v2.0</option>
|
||||||
|
<option value="embed-english-light-v2.0">embed-english-light-v2.0</option>
|
||||||
|
<option value="embed-multilingual-v2.0">embed-multilingual-v2.0</option>
|
||||||
|
</select>
|
||||||
|
</div>
|
||||||
<div class="flex-container flexFlowColumn" id="together_vectorsModel">
|
<div class="flex-container flexFlowColumn" id="together_vectorsModel">
|
||||||
<label for="vectors_togetherai_model">
|
<label for="vectors_togetherai_model">
|
||||||
Vectorization Model
|
Vectorization Model
|
||||||
@@ -167,6 +182,14 @@
|
|||||||
</label>
|
</label>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
<div class="flex-container">
|
||||||
|
<div id="vectors_files_vectorize_all" class="menu_button menu_button_icon" title="Vectorize all files in the Data Bank and current chat.">
|
||||||
|
Vectorize All
|
||||||
|
</div>
|
||||||
|
<div id="vectors_files_purge" class="menu_button menu_button_icon" title="Purge all file vectors in the Data Bank and current chat.">
|
||||||
|
Purge Vectors
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
<hr>
|
<hr>
|
||||||
|
@@ -354,7 +354,9 @@ export function formatInstructModeSystemPrompt(systemPrompt) {
|
|||||||
const separator = power_user.instruct.wrap ? '\n' : '';
|
const separator = power_user.instruct.wrap ? '\n' : '';
|
||||||
|
|
||||||
if (power_user.instruct.system_sequence_prefix) {
|
if (power_user.instruct.system_sequence_prefix) {
|
||||||
systemPrompt = power_user.instruct.system_sequence_prefix + separator + systemPrompt;
|
// TODO: Replace with a proper 'System' prompt entity name input
|
||||||
|
const prefix = power_user.instruct.system_sequence_prefix.replace(/{{name}}/gi, 'System');
|
||||||
|
systemPrompt = prefix + separator + systemPrompt;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (power_user.instruct.system_sequence_suffix) {
|
if (power_user.instruct.system_sequence_suffix) {
|
||||||
|
@@ -119,11 +119,15 @@ export class Popup {
|
|||||||
const keyListener = (evt) => {
|
const keyListener = (evt) => {
|
||||||
switch (evt.key) {
|
switch (evt.key) {
|
||||||
case 'Escape': {
|
case 'Escape': {
|
||||||
evt.preventDefault();
|
// does it really matter where we check?
|
||||||
evt.stopPropagation();
|
const topModal = document.elementFromPoint(window.innerWidth / 2, window.innerHeight / 2)?.closest('.shadow_popup');
|
||||||
this.completeCancelled();
|
if (topModal == this.dom) {
|
||||||
window.removeEventListener('keydown', keyListenerBound);
|
evt.preventDefault();
|
||||||
break;
|
evt.stopPropagation();
|
||||||
|
this.completeCancelled();
|
||||||
|
window.removeEventListener('keydown', keyListenerBound);
|
||||||
|
break;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
@@ -227,7 +227,8 @@ router.post('/download', jsonParser, async (request, response) => {
|
|||||||
|
|
||||||
// Move into asset place
|
// Move into asset place
|
||||||
console.debug('Download finished, moving file from', temp_path, 'to', file_path);
|
console.debug('Download finished, moving file from', temp_path, 'to', file_path);
|
||||||
fs.renameSync(temp_path, file_path);
|
fs.copyFileSync(temp_path, file_path);
|
||||||
|
fs.rmSync(temp_path);
|
||||||
response.sendStatus(200);
|
response.sendStatus(200);
|
||||||
}
|
}
|
||||||
catch (error) {
|
catch (error) {
|
||||||
|
@@ -51,7 +51,8 @@ router.post('/rename', jsonParser, function (request, response) {
|
|||||||
return response.sendStatus(400);
|
return response.sendStatus(400);
|
||||||
}
|
}
|
||||||
|
|
||||||
fs.renameSync(oldFileName, newFileName);
|
fs.copyFileSync(oldFileName, newFileName);
|
||||||
|
fs.rmSync(oldFileName);
|
||||||
invalidateThumbnail(request.user.directories, 'bg', request.body.old_bg);
|
invalidateThumbnail(request.user.directories, 'bg', request.body.old_bg);
|
||||||
return response.send('ok');
|
return response.send('ok');
|
||||||
});
|
});
|
||||||
@@ -63,7 +64,8 @@ router.post('/upload', urlencodedParser, function (request, response) {
|
|||||||
const filename = request.file.originalname;
|
const filename = request.file.originalname;
|
||||||
|
|
||||||
try {
|
try {
|
||||||
fs.renameSync(img_path, path.join(request.user.directories.backgrounds, filename));
|
fs.copyFileSync(img_path, path.join(request.user.directories.backgrounds, filename));
|
||||||
|
fs.rmSync(img_path);
|
||||||
invalidateThumbnail(request.user.directories, 'bg', filename);
|
invalidateThumbnail(request.user.directories, 'bg', filename);
|
||||||
response.send(filename);
|
response.send(filename);
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
|
@@ -680,7 +680,8 @@ router.post('/rename', jsonParser, async function (request, response) {
|
|||||||
|
|
||||||
// Rename chats folder
|
// Rename chats folder
|
||||||
if (fs.existsSync(oldChatsPath) && !fs.existsSync(newChatsPath)) {
|
if (fs.existsSync(oldChatsPath) && !fs.existsSync(newChatsPath)) {
|
||||||
fs.renameSync(oldChatsPath, newChatsPath);
|
fs.cpSync(oldChatsPath, newChatsPath, { recursive: true });
|
||||||
|
fs.rmSync(oldChatsPath, { recursive: true, force: true });
|
||||||
}
|
}
|
||||||
|
|
||||||
// Remove the old character file
|
// Remove the old character file
|
||||||
|
@@ -213,8 +213,9 @@ router.post('/rename', jsonParser, async function (request, response) {
|
|||||||
return response.status(400).send({ error: true });
|
return response.status(400).send({ error: true });
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fs.copyFileSync(pathToOriginalFile, pathToRenamedFile);
|
||||||
|
fs.rmSync(pathToOriginalFile);
|
||||||
console.log('Successfully renamed.');
|
console.log('Successfully renamed.');
|
||||||
fs.renameSync(pathToOriginalFile, pathToRenamedFile);
|
|
||||||
return response.send({ ok: true });
|
return response.send({ ok: true });
|
||||||
});
|
});
|
||||||
|
|
||||||
|
@@ -12,23 +12,26 @@ const SOURCES = ['transformers', 'mistral', 'openai', 'extras', 'palm', 'togethe
|
|||||||
* @param {string} source - The source of the vector
|
* @param {string} source - The source of the vector
|
||||||
* @param {Object} sourceSettings - Settings for the source, if it needs any
|
* @param {Object} sourceSettings - Settings for the source, if it needs any
|
||||||
* @param {string} text - The text to get the vector for
|
* @param {string} text - The text to get the vector for
|
||||||
|
* @param {boolean} isQuery - If the text is a query for embedding search
|
||||||
* @param {import('../users').UserDirectoryList} directories - The directories object for the user
|
* @param {import('../users').UserDirectoryList} directories - The directories object for the user
|
||||||
* @returns {Promise<number[]>} - The vector for the text
|
* @returns {Promise<number[]>} - The vector for the text
|
||||||
*/
|
*/
|
||||||
async function getVector(source, sourceSettings, text, directories) {
|
async function getVector(source, sourceSettings, text, isQuery, directories) {
|
||||||
switch (source) {
|
switch (source) {
|
||||||
case 'nomicai':
|
case 'nomicai':
|
||||||
return require('../nomicai-vectors').getNomicAIVector(text, source, directories);
|
return require('../vectors/nomicai-vectors').getNomicAIVector(text, source, directories);
|
||||||
case 'togetherai':
|
case 'togetherai':
|
||||||
case 'mistral':
|
case 'mistral':
|
||||||
case 'openai':
|
case 'openai':
|
||||||
return require('../openai-vectors').getOpenAIVector(text, source, directories, sourceSettings.model);
|
return require('../vectors/openai-vectors').getOpenAIVector(text, source, directories, sourceSettings.model);
|
||||||
case 'transformers':
|
case 'transformers':
|
||||||
return require('../embedding').getTransformersVector(text);
|
return require('../vectors/embedding').getTransformersVector(text);
|
||||||
case 'extras':
|
case 'extras':
|
||||||
return require('../extras-vectors').getExtrasVector(text, sourceSettings.extrasUrl, sourceSettings.extrasKey);
|
return require('../vectors/extras-vectors').getExtrasVector(text, sourceSettings.extrasUrl, sourceSettings.extrasKey);
|
||||||
case 'palm':
|
case 'palm':
|
||||||
return require('../makersuite-vectors').getMakerSuiteVector(text, directories);
|
return require('../vectors/makersuite-vectors').getMakerSuiteVector(text, directories);
|
||||||
|
case 'cohere':
|
||||||
|
return require('../vectors/cohere-vectors').getCohereVector(text, isQuery, directories, sourceSettings.model);
|
||||||
}
|
}
|
||||||
|
|
||||||
throw new Error(`Unknown vector source ${source}`);
|
throw new Error(`Unknown vector source ${source}`);
|
||||||
@@ -39,10 +42,11 @@ async function getVector(source, sourceSettings, text, directories) {
|
|||||||
* @param {string} source - The source of the vector
|
* @param {string} source - The source of the vector
|
||||||
* @param {Object} sourceSettings - Settings for the source, if it needs any
|
* @param {Object} sourceSettings - Settings for the source, if it needs any
|
||||||
* @param {string[]} texts - The array of texts to get the vector for
|
* @param {string[]} texts - The array of texts to get the vector for
|
||||||
|
* @param {boolean} isQuery - If the text is a query for embedding search
|
||||||
* @param {import('../users').UserDirectoryList} directories - The directories object for the user
|
* @param {import('../users').UserDirectoryList} directories - The directories object for the user
|
||||||
* @returns {Promise<number[][]>} - The array of vectors for the texts
|
* @returns {Promise<number[][]>} - The array of vectors for the texts
|
||||||
*/
|
*/
|
||||||
async function getBatchVector(source, sourceSettings, texts, directories) {
|
async function getBatchVector(source, sourceSettings, texts, isQuery, directories) {
|
||||||
const batchSize = 10;
|
const batchSize = 10;
|
||||||
const batches = Array(Math.ceil(texts.length / batchSize)).fill(undefined).map((_, i) => texts.slice(i * batchSize, i * batchSize + batchSize));
|
const batches = Array(Math.ceil(texts.length / batchSize)).fill(undefined).map((_, i) => texts.slice(i * batchSize, i * batchSize + batchSize));
|
||||||
|
|
||||||
@@ -50,21 +54,24 @@ async function getBatchVector(source, sourceSettings, texts, directories) {
|
|||||||
for (let batch of batches) {
|
for (let batch of batches) {
|
||||||
switch (source) {
|
switch (source) {
|
||||||
case 'nomicai':
|
case 'nomicai':
|
||||||
results.push(...await require('../nomicai-vectors').getNomicAIBatchVector(batch, source, directories));
|
results.push(...await require('../vectors/nomicai-vectors').getNomicAIBatchVector(batch, source, directories));
|
||||||
break;
|
break;
|
||||||
case 'togetherai':
|
case 'togetherai':
|
||||||
case 'mistral':
|
case 'mistral':
|
||||||
case 'openai':
|
case 'openai':
|
||||||
results.push(...await require('../openai-vectors').getOpenAIBatchVector(batch, source, directories, sourceSettings.model));
|
results.push(...await require('../vectors/openai-vectors').getOpenAIBatchVector(batch, source, directories, sourceSettings.model));
|
||||||
break;
|
break;
|
||||||
case 'transformers':
|
case 'transformers':
|
||||||
results.push(...await require('../embedding').getTransformersBatchVector(batch));
|
results.push(...await require('../vectors/embedding').getTransformersBatchVector(batch));
|
||||||
break;
|
break;
|
||||||
case 'extras':
|
case 'extras':
|
||||||
results.push(...await require('../extras-vectors').getExtrasBatchVector(batch, sourceSettings.extrasUrl, sourceSettings.extrasKey));
|
results.push(...await require('../vectors/extras-vectors').getExtrasBatchVector(batch, sourceSettings.extrasUrl, sourceSettings.extrasKey));
|
||||||
break;
|
break;
|
||||||
case 'palm':
|
case 'palm':
|
||||||
results.push(...await require('../makersuite-vectors').getMakerSuiteBatchVector(batch, directories));
|
results.push(...await require('../vectors/makersuite-vectors').getMakerSuiteBatchVector(batch, directories));
|
||||||
|
break;
|
||||||
|
case 'cohere':
|
||||||
|
results.push(...await require('../vectors/cohere-vectors').getCohereBatchVector(batch, isQuery, directories, sourceSettings.model));
|
||||||
break;
|
break;
|
||||||
default:
|
default:
|
||||||
throw new Error(`Unknown vector source ${source}`);
|
throw new Error(`Unknown vector source ${source}`);
|
||||||
@@ -106,7 +113,7 @@ async function insertVectorItems(directories, collectionId, source, sourceSettin
|
|||||||
|
|
||||||
await store.beginUpdate();
|
await store.beginUpdate();
|
||||||
|
|
||||||
const vectors = await getBatchVector(source, sourceSettings, items.map(x => x.text), directories);
|
const vectors = await getBatchVector(source, sourceSettings, items.map(x => x.text), false, directories);
|
||||||
|
|
||||||
for (let i = 0; i < items.length; i++) {
|
for (let i = 0; i < items.length; i++) {
|
||||||
const item = items[i];
|
const item = items[i];
|
||||||
@@ -165,7 +172,7 @@ async function deleteVectorItems(directories, collectionId, source, hashes) {
|
|||||||
*/
|
*/
|
||||||
async function queryCollection(directories, collectionId, source, sourceSettings, searchText, topK) {
|
async function queryCollection(directories, collectionId, source, sourceSettings, searchText, topK) {
|
||||||
const store = await getIndex(directories, collectionId, source);
|
const store = await getIndex(directories, collectionId, source);
|
||||||
const vector = await getVector(source, sourceSettings, searchText, directories);
|
const vector = await getVector(source, sourceSettings, searchText, true, directories);
|
||||||
|
|
||||||
const result = await store.queryItems(vector, topK);
|
const result = await store.queryItems(vector, topK);
|
||||||
const metadata = result.map(x => x.item.metadata);
|
const metadata = result.map(x => x.item.metadata);
|
||||||
@@ -184,7 +191,7 @@ async function queryCollection(directories, collectionId, source, sourceSettings
|
|||||||
* @returns {Promise<Record<string, { hashes: number[], metadata: object[] }>>} - The top K results from each collection
|
* @returns {Promise<Record<string, { hashes: number[], metadata: object[] }>>} - The top K results from each collection
|
||||||
*/
|
*/
|
||||||
async function multiQueryCollection(directories, collectionIds, source, sourceSettings, searchText, topK) {
|
async function multiQueryCollection(directories, collectionIds, source, sourceSettings, searchText, topK) {
|
||||||
const vector = await getVector(source, sourceSettings, searchText, directories);
|
const vector = await getVector(source, sourceSettings, searchText, true, directories);
|
||||||
const results = [];
|
const results = [];
|
||||||
|
|
||||||
for (const collectionId of collectionIds) {
|
for (const collectionId of collectionIds) {
|
||||||
@@ -223,18 +230,24 @@ async function multiQueryCollection(directories, collectionIds, source, sourceSe
|
|||||||
*/
|
*/
|
||||||
function getSourceSettings(source, request) {
|
function getSourceSettings(source, request) {
|
||||||
if (source === 'togetherai') {
|
if (source === 'togetherai') {
|
||||||
let model = String(request.headers['x-togetherai-model']);
|
const model = String(request.headers['x-togetherai-model']);
|
||||||
|
|
||||||
return {
|
return {
|
||||||
model: model,
|
model: model,
|
||||||
};
|
};
|
||||||
} else if (source === 'openai') {
|
} else if (source === 'openai') {
|
||||||
let model = String(request.headers['x-openai-model']);
|
const model = String(request.headers['x-openai-model']);
|
||||||
|
|
||||||
return {
|
return {
|
||||||
model: model,
|
model: model,
|
||||||
};
|
};
|
||||||
} else {
|
} else if (source === 'cohere') {
|
||||||
|
const model = String(request.headers['x-cohere-model']);
|
||||||
|
|
||||||
|
return {
|
||||||
|
model: model,
|
||||||
|
};
|
||||||
|
}else {
|
||||||
// Extras API settings to connect to the Extras embeddings provider
|
// Extras API settings to connect to the Extras embeddings provider
|
||||||
let extrasUrl = '';
|
let extrasUrl = '';
|
||||||
let extrasKey = '';
|
let extrasKey = '';
|
||||||
|
14
src/users.js
14
src/users.js
@@ -286,12 +286,22 @@ async function migrateUserData() {
|
|||||||
// Copy the file to the new location
|
// Copy the file to the new location
|
||||||
fs.cpSync(migration.old, migration.new, { force: true });
|
fs.cpSync(migration.old, migration.new, { force: true });
|
||||||
// Move the file to the backup location
|
// Move the file to the backup location
|
||||||
fs.renameSync(migration.old, path.join(backupDirectory, path.basename(migration.old)));
|
fs.cpSync(
|
||||||
|
migration.old,
|
||||||
|
path.join(backupDirectory, path.basename(migration.old)),
|
||||||
|
{ recursive: true, force: true }
|
||||||
|
);
|
||||||
|
fs.rmSync(migration.old, { recursive: true, force: true });
|
||||||
} else {
|
} else {
|
||||||
// Copy the directory to the new location
|
// Copy the directory to the new location
|
||||||
fs.cpSync(migration.old, migration.new, { recursive: true, force: true });
|
fs.cpSync(migration.old, migration.new, { recursive: true, force: true });
|
||||||
// Move the directory to the backup location
|
// Move the directory to the backup location
|
||||||
fs.renameSync(migration.old, path.join(backupDirectory, path.basename(migration.old)));
|
fs.cpSync(
|
||||||
|
migration.old,
|
||||||
|
path.join(backupDirectory, path.basename(migration.old)),
|
||||||
|
{ recursive: true, force: true }
|
||||||
|
);
|
||||||
|
fs.rmSync(migration.old, { recursive: true, force: true });
|
||||||
}
|
}
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.error(color.red(`Error migrating ${migration.old} to ${migration.new}:`), error.message);
|
console.error(color.red(`Error migrating ${migration.old} to ${migration.new}:`), error.message);
|
||||||
|
65
src/vectors/cohere-vectors.js
Normal file
65
src/vectors/cohere-vectors.js
Normal file
@@ -0,0 +1,65 @@
|
|||||||
|
const fetch = require('node-fetch').default;
|
||||||
|
const { SECRET_KEYS, readSecret } = require('../endpoints/secrets');
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Gets the vector for the given text batch from an OpenAI compatible endpoint.
|
||||||
|
* @param {string[]} texts - The array of texts to get the vector for
|
||||||
|
* @param {boolean} isQuery - If the text is a query for embedding search
|
||||||
|
* @param {import('../users').UserDirectoryList} directories - The directories object for the user
|
||||||
|
* @param {string} model - The model to use for the embedding
|
||||||
|
* @returns {Promise<number[][]>} - The array of vectors for the texts
|
||||||
|
*/
|
||||||
|
async function getCohereBatchVector(texts, isQuery, directories, model) {
|
||||||
|
const key = readSecret(directories, SECRET_KEYS.COHERE);
|
||||||
|
|
||||||
|
if (!key) {
|
||||||
|
console.log('No API key found');
|
||||||
|
throw new Error('No API key found');
|
||||||
|
}
|
||||||
|
|
||||||
|
const response = await fetch('https://api.cohere.ai/v1/embed', {
|
||||||
|
method: 'POST',
|
||||||
|
headers: {
|
||||||
|
'Content-Type': 'application/json',
|
||||||
|
Authorization: `Bearer ${key}`,
|
||||||
|
},
|
||||||
|
body: JSON.stringify({
|
||||||
|
texts: texts,
|
||||||
|
model: model,
|
||||||
|
input_type: isQuery ? 'search_query' : 'search_document',
|
||||||
|
truncate: 'END',
|
||||||
|
}),
|
||||||
|
});
|
||||||
|
|
||||||
|
if (!response.ok) {
|
||||||
|
const text = await response.text();
|
||||||
|
console.log('API request failed', response.statusText, text);
|
||||||
|
throw new Error('API request failed');
|
||||||
|
}
|
||||||
|
|
||||||
|
const data = await response.json();
|
||||||
|
if (!Array.isArray(data?.embeddings)) {
|
||||||
|
console.log('API response was not an array');
|
||||||
|
throw new Error('API response was not an array');
|
||||||
|
}
|
||||||
|
|
||||||
|
return data.embeddings;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Gets the vector for the given text from an OpenAI compatible endpoint.
|
||||||
|
* @param {string} text - The text to get the vector for
|
||||||
|
* @param {boolean} isQuery - If the text is a query for embedding search
|
||||||
|
* @param {import('../users').UserDirectoryList} directories - The directories object for the user
|
||||||
|
* @param {string} model - The model to use for the embedding
|
||||||
|
* @returns {Promise<number[]>} - The vector for the text
|
||||||
|
*/
|
||||||
|
async function getCohereVector(text, isQuery, directories, model) {
|
||||||
|
const vectors = await getCohereBatchVector([text], isQuery, directories, model);
|
||||||
|
return vectors[0];
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = {
|
||||||
|
getCohereBatchVector,
|
||||||
|
getCohereVector,
|
||||||
|
};
|
@@ -6,7 +6,7 @@ const TASK = 'feature-extraction';
|
|||||||
* @returns {Promise<number[]>} - The vectorized text in form of an array of numbers
|
* @returns {Promise<number[]>} - The vectorized text in form of an array of numbers
|
||||||
*/
|
*/
|
||||||
async function getTransformersVector(text) {
|
async function getTransformersVector(text) {
|
||||||
const module = await import('./transformers.mjs');
|
const module = await import('../transformers.mjs');
|
||||||
const pipe = await module.default.getPipeline(TASK);
|
const pipe = await module.default.getPipeline(TASK);
|
||||||
const result = await pipe(text, { pooling: 'mean', normalize: true });
|
const result = await pipe(text, { pooling: 'mean', normalize: true });
|
||||||
const vector = Array.from(result.data);
|
const vector = Array.from(result.data);
|
@@ -1,10 +1,10 @@
|
|||||||
const fetch = require('node-fetch').default;
|
const fetch = require('node-fetch').default;
|
||||||
const { SECRET_KEYS, readSecret } = require('./endpoints/secrets');
|
const { SECRET_KEYS, readSecret } = require('../endpoints/secrets');
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Gets the vector for the given text from gecko model
|
* Gets the vector for the given text from gecko model
|
||||||
* @param {string[]} texts - The array of texts to get the vector for
|
* @param {string[]} texts - The array of texts to get the vector for
|
||||||
* @param {import('./users').UserDirectoryList} directories - The directories object for the user
|
* @param {import('../users').UserDirectoryList} directories - The directories object for the user
|
||||||
* @returns {Promise<number[][]>} - The array of vectors for the texts
|
* @returns {Promise<number[][]>} - The array of vectors for the texts
|
||||||
*/
|
*/
|
||||||
async function getMakerSuiteBatchVector(texts, directories) {
|
async function getMakerSuiteBatchVector(texts, directories) {
|
||||||
@@ -16,7 +16,7 @@ async function getMakerSuiteBatchVector(texts, directories) {
|
|||||||
/**
|
/**
|
||||||
* Gets the vector for the given text from PaLM gecko model
|
* Gets the vector for the given text from PaLM gecko model
|
||||||
* @param {string} text - The text to get the vector for
|
* @param {string} text - The text to get the vector for
|
||||||
* @param {import('./users').UserDirectoryList} directories - The directories object for the user
|
* @param {import('../users').UserDirectoryList} directories - The directories object for the user
|
||||||
* @returns {Promise<number[]>} - The vector for the text
|
* @returns {Promise<number[]>} - The vector for the text
|
||||||
*/
|
*/
|
||||||
async function getMakerSuiteVector(text, directories) {
|
async function getMakerSuiteVector(text, directories) {
|
@@ -1,5 +1,5 @@
|
|||||||
const fetch = require('node-fetch').default;
|
const fetch = require('node-fetch').default;
|
||||||
const { SECRET_KEYS, readSecret } = require('./endpoints/secrets');
|
const { SECRET_KEYS, readSecret } = require('../endpoints/secrets');
|
||||||
|
|
||||||
const SOURCES = {
|
const SOURCES = {
|
||||||
'nomicai': {
|
'nomicai': {
|
||||||
@@ -13,7 +13,7 @@ const SOURCES = {
|
|||||||
* Gets the vector for the given text batch from an OpenAI compatible endpoint.
|
* Gets the vector for the given text batch from an OpenAI compatible endpoint.
|
||||||
* @param {string[]} texts - The array of texts to get the vector for
|
* @param {string[]} texts - The array of texts to get the vector for
|
||||||
* @param {string} source - The source of the vector
|
* @param {string} source - The source of the vector
|
||||||
* @param {import('./users').UserDirectoryList} directories - The directories object for the user
|
* @param {import('../users').UserDirectoryList} directories - The directories object for the user
|
||||||
* @returns {Promise<number[][]>} - The array of vectors for the texts
|
* @returns {Promise<number[][]>} - The array of vectors for the texts
|
||||||
*/
|
*/
|
||||||
async function getNomicAIBatchVector(texts, source, directories) {
|
async function getNomicAIBatchVector(texts, source, directories) {
|
||||||
@@ -64,7 +64,7 @@ async function getNomicAIBatchVector(texts, source, directories) {
|
|||||||
* Gets the vector for the given text from an OpenAI compatible endpoint.
|
* Gets the vector for the given text from an OpenAI compatible endpoint.
|
||||||
* @param {string} text - The text to get the vector for
|
* @param {string} text - The text to get the vector for
|
||||||
* @param {string} source - The source of the vector
|
* @param {string} source - The source of the vector
|
||||||
* @param {import('./users').UserDirectoryList} directories - The directories object for the user
|
* @param {import('../users').UserDirectoryList} directories - The directories object for the user
|
||||||
* @returns {Promise<number[]>} - The vector for the text
|
* @returns {Promise<number[]>} - The vector for the text
|
||||||
*/
|
*/
|
||||||
async function getNomicAIVector(text, source, directories) {
|
async function getNomicAIVector(text, source, directories) {
|
@@ -1,5 +1,5 @@
|
|||||||
const fetch = require('node-fetch').default;
|
const fetch = require('node-fetch').default;
|
||||||
const { SECRET_KEYS, readSecret } = require('./endpoints/secrets');
|
const { SECRET_KEYS, readSecret } = require('../endpoints/secrets');
|
||||||
|
|
||||||
const SOURCES = {
|
const SOURCES = {
|
||||||
'togetherai': {
|
'togetherai': {
|
||||||
@@ -23,7 +23,7 @@ const SOURCES = {
|
|||||||
* Gets the vector for the given text batch from an OpenAI compatible endpoint.
|
* Gets the vector for the given text batch from an OpenAI compatible endpoint.
|
||||||
* @param {string[]} texts - The array of texts to get the vector for
|
* @param {string[]} texts - The array of texts to get the vector for
|
||||||
* @param {string} source - The source of the vector
|
* @param {string} source - The source of the vector
|
||||||
* @param {import('./users').UserDirectoryList} directories - The directories object for the user
|
* @param {import('../users').UserDirectoryList} directories - The directories object for the user
|
||||||
* @param {string} model - The model to use for the embedding
|
* @param {string} model - The model to use for the embedding
|
||||||
* @returns {Promise<number[][]>} - The array of vectors for the texts
|
* @returns {Promise<number[][]>} - The array of vectors for the texts
|
||||||
*/
|
*/
|
||||||
@@ -79,7 +79,7 @@ async function getOpenAIBatchVector(texts, source, directories, model = '') {
|
|||||||
* Gets the vector for the given text from an OpenAI compatible endpoint.
|
* Gets the vector for the given text from an OpenAI compatible endpoint.
|
||||||
* @param {string} text - The text to get the vector for
|
* @param {string} text - The text to get the vector for
|
||||||
* @param {string} source - The source of the vector
|
* @param {string} source - The source of the vector
|
||||||
* @param {import('./users').UserDirectoryList} directories - The directories object for the user
|
* @param {import('../users').UserDirectoryList} directories - The directories object for the user
|
||||||
* @param {string} model - The model to use for the embedding
|
* @param {string} model - The model to use for the embedding
|
||||||
* @returns {Promise<number[]>} - The vector for the text
|
* @returns {Promise<number[]>} - The vector for the text
|
||||||
*/
|
*/
|
Reference in New Issue
Block a user