mirror of
https://github.com/SillyTavern/SillyTavern.git
synced 2025-06-05 21:59:27 +02:00
Merge branch 'neo-server' into parser-v2
This commit is contained in:
@@ -19,6 +19,8 @@
|
||||
<li data-placeholder="scale" class="sd_comfy_workflow_editor_not_found">"%scale%"</li>
|
||||
<li data-placeholder="width" class="sd_comfy_workflow_editor_not_found">"%width%"</li>
|
||||
<li data-placeholder="height" class="sd_comfy_workflow_editor_not_found">"%height%"</li>
|
||||
<li data-placeholder="user_avatar" class="sd_comfy_workflow_editor_not_found">"%user_avatar%"</li>
|
||||
<li data-placeholder="char_avatar" class="sd_comfy_workflow_editor_not_found">"%char_avatar%"</li>
|
||||
<li><hr></li>
|
||||
<li data-placeholder="seed" class="sd_comfy_workflow_editor_not_found">
|
||||
"%seed%"
|
||||
|
@@ -2111,21 +2111,11 @@ async function generateMultimodalPrompt(generationType, quietPrompt) {
|
||||
let avatarUrl;
|
||||
|
||||
if (generationType == generationMode.USER_MULTIMODAL) {
|
||||
avatarUrl = getUserAvatar(user_avatar);
|
||||
avatarUrl = getUserAvatarUrl();
|
||||
}
|
||||
|
||||
if (generationType == generationMode.CHARACTER_MULTIMODAL || generationType === generationMode.FACE_MULTIMODAL) {
|
||||
const context = getContext();
|
||||
|
||||
if (context.groupId) {
|
||||
const groupMembers = context.groups.find(x => x.id === context.groupId)?.members;
|
||||
const lastMessageAvatar = context.chat?.filter(x => !x.is_system && !x.is_user)?.slice(-1)[0]?.original_avatar;
|
||||
const randomMemberAvatar = Array.isArray(groupMembers) ? groupMembers[Math.floor(Math.random() * groupMembers.length)]?.avatar : null;
|
||||
const avatarToUse = lastMessageAvatar || randomMemberAvatar;
|
||||
avatarUrl = formatCharacterAvatar(avatarToUse);
|
||||
} else {
|
||||
avatarUrl = getCharacterAvatar(context.characterId);
|
||||
}
|
||||
avatarUrl = getCharacterAvatarUrl();
|
||||
}
|
||||
|
||||
try {
|
||||
@@ -2152,6 +2142,24 @@ async function generateMultimodalPrompt(generationType, quietPrompt) {
|
||||
}
|
||||
}
|
||||
|
||||
function getCharacterAvatarUrl() {
|
||||
const context = getContext();
|
||||
|
||||
if (context.groupId) {
|
||||
const groupMembers = context.groups.find(x => x.id === context.groupId)?.members;
|
||||
const lastMessageAvatar = context.chat?.filter(x => !x.is_system && !x.is_user)?.slice(-1)[0]?.original_avatar;
|
||||
const randomMemberAvatar = Array.isArray(groupMembers) ? groupMembers[Math.floor(Math.random() * groupMembers.length)]?.avatar : null;
|
||||
const avatarToUse = lastMessageAvatar || randomMemberAvatar;
|
||||
return formatCharacterAvatar(avatarToUse);
|
||||
} else {
|
||||
return getCharacterAvatar(context.characterId);
|
||||
}
|
||||
}
|
||||
|
||||
function getUserAvatarUrl() {
|
||||
return getUserAvatar(user_avatar);
|
||||
}
|
||||
|
||||
/**
|
||||
* Generates a prompt using the main LLM API.
|
||||
* @param {string} quietPrompt - The prompt to use for the image generation.
|
||||
@@ -2636,6 +2644,22 @@ async function generateComfyImage(prompt, negativePrompt) {
|
||||
(extension_settings.sd.comfy_placeholders ?? []).forEach(ph => {
|
||||
workflow = workflow.replace(`"%${ph.find}%"`, JSON.stringify(substituteParams(ph.replace)));
|
||||
});
|
||||
if (/%user_avatar%/gi.test(workflow)) {
|
||||
const response = await fetch(getUserAvatarUrl());
|
||||
if (response.ok) {
|
||||
const avatarBlob = await response.blob();
|
||||
const avatarBase64 = await getBase64Async(avatarBlob);
|
||||
workflow = workflow.replace('"%user_avatar%"', JSON.stringify(avatarBase64));
|
||||
}
|
||||
}
|
||||
if (/%char_avatar%/gi.test(workflow)) {
|
||||
const response = await fetch(getCharacterAvatarUrl());
|
||||
if (response.ok) {
|
||||
const avatarBlob = await response.blob();
|
||||
const avatarBase64 = await getBase64Async(avatarBlob);
|
||||
workflow = workflow.replace('"%char_avatar%"', JSON.stringify(avatarBase64));
|
||||
}
|
||||
}
|
||||
console.log(`{
|
||||
"prompt": ${workflow}
|
||||
}`);
|
||||
@@ -2649,6 +2673,10 @@ async function generateComfyImage(prompt, negativePrompt) {
|
||||
}`,
|
||||
}),
|
||||
});
|
||||
if (!promptResult.ok) {
|
||||
const text = await promptResult.text();
|
||||
throw new Error(text);
|
||||
}
|
||||
return { format: 'png', data: await promptResult.text() };
|
||||
}
|
||||
|
||||
|
@@ -35,6 +35,7 @@ const settings = {
|
||||
include_wi: false,
|
||||
togetherai_model: 'togethercomputer/m2-bert-80M-32k-retrieval',
|
||||
openai_model: 'text-embedding-ada-002',
|
||||
cohere_model: 'embed-english-v3.0',
|
||||
summarize: false,
|
||||
summarize_sent: false,
|
||||
summary_source: 'main',
|
||||
@@ -68,6 +69,15 @@ const settings = {
|
||||
|
||||
const moduleWorker = new ModuleWorkerWrapper(synchronizeChat);
|
||||
|
||||
/**
|
||||
* Gets the Collection ID for a file embedded in the chat.
|
||||
* @param {string} fileUrl URL of the file
|
||||
* @returns {string} Collection ID
|
||||
*/
|
||||
function getFileCollectionId(fileUrl) {
|
||||
return `file_${getStringHash(fileUrl)}`;
|
||||
}
|
||||
|
||||
async function onVectorizeAllClick() {
|
||||
try {
|
||||
if (!settings.enabled_chats) {
|
||||
@@ -308,7 +318,7 @@ async function processFiles(chat) {
|
||||
const dataBankCollectionIds = [];
|
||||
|
||||
for (const file of dataBank) {
|
||||
const collectionId = `file_${getStringHash(file.url)}`;
|
||||
const collectionId = getFileCollectionId(file.url);
|
||||
const hashesInCollection = await getSavedHashes(collectionId);
|
||||
dataBankCollectionIds.push(collectionId);
|
||||
|
||||
@@ -354,7 +364,7 @@ async function processFiles(chat) {
|
||||
|
||||
const fileName = message.extra.file.name;
|
||||
const fileUrl = message.extra.file.url;
|
||||
const collectionId = `file_${getStringHash(fileUrl)}`;
|
||||
const collectionId = getFileCollectionId(fileUrl);
|
||||
const hashesInCollection = await getSavedHashes(collectionId);
|
||||
|
||||
// File is already in the collection
|
||||
@@ -598,6 +608,9 @@ function getVectorHeaders() {
|
||||
case 'openai':
|
||||
addOpenAiHeaders(headers);
|
||||
break;
|
||||
case 'cohere':
|
||||
addCohereHeaders(headers);
|
||||
break;
|
||||
default:
|
||||
break;
|
||||
}
|
||||
@@ -636,6 +649,16 @@ function addOpenAiHeaders(headers) {
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Add headers for the Cohere API source.
|
||||
* @param {object} headers Header object
|
||||
*/
|
||||
function addCohereHeaders(headers) {
|
||||
Object.assign(headers, {
|
||||
'X-Cohere-Model': extension_settings.vectors.cohere_model,
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Inserts vector items into a collection
|
||||
* @param {string} collectionId - The collection to insert into
|
||||
@@ -647,7 +670,8 @@ async function insertVectorItems(collectionId, items) {
|
||||
settings.source === 'palm' && !secret_state[SECRET_KEYS.MAKERSUITE] ||
|
||||
settings.source === 'mistral' && !secret_state[SECRET_KEYS.MISTRALAI] ||
|
||||
settings.source === 'togetherai' && !secret_state[SECRET_KEYS.TOGETHERAI] ||
|
||||
settings.source === 'nomicai' && !secret_state[SECRET_KEYS.NOMICAI]) {
|
||||
settings.source === 'nomicai' && !secret_state[SECRET_KEYS.NOMICAI] ||
|
||||
settings.source === 'cohere' && !secret_state[SECRET_KEYS.COHERE]) {
|
||||
throw new Error('Vectors: API key missing', { cause: 'api_key_missing' });
|
||||
}
|
||||
|
||||
@@ -760,7 +784,7 @@ async function purgeFileVectorIndex(fileUrl) {
|
||||
}
|
||||
|
||||
console.log(`Vectors: Purging file vector index for ${fileUrl}`);
|
||||
const collectionId = `file_${getStringHash(fileUrl)}`;
|
||||
const collectionId = getFileCollectionId(fileUrl);
|
||||
|
||||
const response = await fetch('/api/vector/purge', {
|
||||
method: 'POST',
|
||||
@@ -816,6 +840,7 @@ function toggleSettings() {
|
||||
$('#vectors_chats_settings').toggle(!!settings.enabled_chats);
|
||||
$('#together_vectorsModel').toggle(settings.source === 'togetherai');
|
||||
$('#openai_vectorsModel').toggle(settings.source === 'openai');
|
||||
$('#cohere_vectorsModel').toggle(settings.source === 'cohere');
|
||||
$('#nomicai_apiKey').toggle(settings.source === 'nomicai');
|
||||
}
|
||||
|
||||
@@ -859,6 +884,42 @@ async function onViewStatsClick() {
|
||||
|
||||
}
|
||||
|
||||
async function onVectorizeAllFilesClick() {
|
||||
try {
|
||||
const dataBank = getDataBankAttachments();
|
||||
const chatAttachments = getContext().chat.filter(x => x.extra?.file).map(x => x.extra.file);
|
||||
const allFiles = [...dataBank, ...chatAttachments];
|
||||
|
||||
for (const file of allFiles) {
|
||||
const text = await getFileAttachment(file.url);
|
||||
const collectionId = getFileCollectionId(file.url);
|
||||
await vectorizeFile(text, file.name, collectionId, settings.chunk_size);
|
||||
}
|
||||
|
||||
toastr.success('All files vectorized', 'Vectorization successful');
|
||||
} catch (error) {
|
||||
console.error('Vectors: Failed to vectorize all files', error);
|
||||
toastr.error('Failed to vectorize all files', 'Vectorization failed');
|
||||
}
|
||||
}
|
||||
|
||||
async function onPurgeFilesClick() {
|
||||
try {
|
||||
const dataBank = getDataBankAttachments();
|
||||
const chatAttachments = getContext().chat.filter(x => x.extra?.file).map(x => x.extra.file);
|
||||
const allFiles = [...dataBank, ...chatAttachments];
|
||||
|
||||
for (const file of allFiles) {
|
||||
await purgeFileVectorIndex(file.url);
|
||||
}
|
||||
|
||||
toastr.success('All files purged', 'Purge successful');
|
||||
} catch (error) {
|
||||
console.error('Vectors: Failed to purge all files', error);
|
||||
toastr.error('Failed to purge all files', 'Purge failed');
|
||||
}
|
||||
}
|
||||
|
||||
jQuery(async () => {
|
||||
if (!extension_settings.vectors) {
|
||||
extension_settings.vectors = settings;
|
||||
@@ -913,6 +974,12 @@ jQuery(async () => {
|
||||
Object.assign(extension_settings.vectors, settings);
|
||||
saveSettingsDebounced();
|
||||
});
|
||||
$('#vectors_cohere_model').val(settings.cohere_model).on('change', () => {
|
||||
$('#vectors_modelWarning').show();
|
||||
settings.cohere_model = String($('#vectors_cohere_model').val());
|
||||
Object.assign(extension_settings.vectors, settings);
|
||||
saveSettingsDebounced();
|
||||
});
|
||||
$('#vectors_template').val(settings.template).on('input', () => {
|
||||
settings.template = String($('#vectors_template').val());
|
||||
Object.assign(extension_settings.vectors, settings);
|
||||
@@ -947,6 +1014,8 @@ jQuery(async () => {
|
||||
$('#vectors_vectorize_all').on('click', onVectorizeAllClick);
|
||||
$('#vectors_purge').on('click', onPurgeClick);
|
||||
$('#vectors_view_stats').on('click', onViewStatsClick);
|
||||
$('#vectors_files_vectorize_all').on('click', onVectorizeAllFilesClick);
|
||||
$('#vectors_files_purge').on('click', onPurgeFilesClick);
|
||||
|
||||
$('#vectors_size_threshold').val(settings.size_threshold).on('input', () => {
|
||||
settings.size_threshold = Number($('#vectors_size_threshold').val());
|
||||
|
@@ -10,13 +10,14 @@
|
||||
Vectorization Source
|
||||
</label>
|
||||
<select id="vectors_source" class="text_pole">
|
||||
<option value="transformers">Local (Transformers)</option>
|
||||
<option value="cohere">Cohere</option>
|
||||
<option value="extras">Extras</option>
|
||||
<option value="openai">OpenAI</option>
|
||||
<option value="palm">Google MakerSuite (PaLM)</option>
|
||||
<option value="transformers">Local (Transformers)</option>
|
||||
<option value="mistral">MistralAI</option>
|
||||
<option value="togetherai">TogetherAI</option>
|
||||
<option value="nomicai">NomicAI</option>
|
||||
<option value="openai">OpenAI</option>
|
||||
<option value="togetherai">TogetherAI</option>
|
||||
</select>
|
||||
</div>
|
||||
<div class="flex-container flexFlowColumn" id="openai_vectorsModel">
|
||||
@@ -29,6 +30,20 @@
|
||||
<option value="text-embedding-3-large">text-embedding-3-large</option>
|
||||
</select>
|
||||
</div>
|
||||
<div class="flex-container flexFlowColumn" id="cohere_vectorsModel">
|
||||
<label for="vectors_cohere_model">
|
||||
Vectorization Model
|
||||
</label>
|
||||
<select id="vectors_cohere_model" class="text_pole">
|
||||
<option value="embed-english-v3.0">embed-english-v3.0</option>
|
||||
<option value="embed-multilingual-v3.0">embed-multilingual-v3.0</option>
|
||||
<option value="embed-english-light-v3.0">embed-english-light-v3.0</option>
|
||||
<option value="embed-multilingual-light-v3.0">embed-multilingual-light-v3.0</option>
|
||||
<option value="embed-english-v2.0">embed-english-v2.0</option>
|
||||
<option value="embed-english-light-v2.0">embed-english-light-v2.0</option>
|
||||
<option value="embed-multilingual-v2.0">embed-multilingual-v2.0</option>
|
||||
</select>
|
||||
</div>
|
||||
<div class="flex-container flexFlowColumn" id="together_vectorsModel">
|
||||
<label for="vectors_togetherai_model">
|
||||
Vectorization Model
|
||||
@@ -167,6 +182,14 @@
|
||||
</label>
|
||||
</div>
|
||||
</div>
|
||||
<div class="flex-container">
|
||||
<div id="vectors_files_vectorize_all" class="menu_button menu_button_icon" title="Vectorize all files in the Data Bank and current chat.">
|
||||
Vectorize All
|
||||
</div>
|
||||
<div id="vectors_files_purge" class="menu_button menu_button_icon" title="Purge all file vectors in the Data Bank and current chat.">
|
||||
Purge Vectors
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<hr>
|
||||
|
Reference in New Issue
Block a user