Merge branch 'staging' into slash-command-enums

This commit is contained in:
Cohee
2024-06-23 15:01:55 +03:00
21 changed files with 493 additions and 190 deletions

View File

@@ -1,4 +1,4 @@
import { getBase64Async, isTrueBoolean, saveBase64AsFile } from '../../utils.js';
import { ensureImageFormatSupported, getBase64Async, isTrueBoolean, saveBase64AsFile } from '../../utils.js';
import { getContext, getApiUrl, doExtrasFetch, extension_settings, modules, renderExtensionTemplateAsync } from '../../extensions.js';
import { callPopup, getRequestHeaders, saveSettingsDebounced, substituteParamsExtended } from '../../../script.js';
import { getMessageTimeStamp } from '../../RossAscends-mods.js';
@@ -274,7 +274,7 @@ async function getCaptionForFile(file, prompt, quiet) {
try {
setSpinnerIcon();
const context = getContext();
const fileData = await getBase64Async(file);
const fileData = await getBase64Async(await ensureImageFormatSupported(file));
const base64Format = fileData.split(',')[0].split(';')[0].split('/')[1];
const base64Data = fileData.split(',')[1];
const { caption } = await doCaptionRequest(base64Data, fileData, prompt);
@@ -379,6 +379,12 @@ jQuery(async function () {
}
function switchMultimodalBlocks() {
const isMultimodal = extension_settings.caption.source === 'multimodal';
$('#caption_ollama_pull').on('click', (e) => {
const presetModel = extension_settings.caption.multimodal_model !== 'ollama_current' ? extension_settings.caption.multimodal_model : '';
e.preventDefault();
$('#ollama_download_model').trigger('click');
$('#dialogue_popup_input').val(presetModel);
});
$('#caption_multimodal_block').toggle(isMultimodal);
$('#caption_prompt_block').toggle(isMultimodal);
$('#caption_multimodal_api').val(extension_settings.caption.multimodal_api);

View File

@@ -58,14 +58,20 @@
<option data-type="openrouter" value="google/gemini-flash-1.5">google/gemini-flash-1.5</option>
<option data-type="openrouter" value="liuhaotian/llava-yi-34b">liuhaotian/llava-yi-34b</option>
<option data-type="ollama" value="ollama_current" data-i18n="currently_selected">[Currently selected]</option>
<option data-type="ollama" value="bakllava:latest">bakllava:latest</option>
<option data-type="ollama" value="llava:latest">llava:latest</option>
<option data-type="ollama" value="bakllava">bakllava</option>
<option data-type="ollama" value="llava">llava</option>
<option data-type="ollama" value="llava-llama3">llava-llama3</option>
<option data-type="ollama" value="llava-phi3">llava-phi3</option>
<option data-type="ollama" value="moondream">moondream</option>
<option data-type="llamacpp" value="llamacpp_current" data-i18n="currently_loaded">[Currently loaded]</option>
<option data-type="ooba" value="ooba_current" data-i18n="currently_loaded">[Currently loaded]</option>
<option data-type="koboldcpp" value="koboldcpp_current" data-i18n="currently_loaded">[Currently loaded]</option>
<option data-type="custom" value="custom_current" data-i18n="currently_selected">[Currently selected]</option>
</select>
</div>
<div data-type="ollama">
The model must be downloaded first! Do it with the <code>ollama pull</code> command or <a href="#" id="caption_ollama_pull">click here</a>.
</div>
<label data-type="openai,anthropic,google" class="checkbox_label flexBasis100p" for="caption_allow_reverse_proxy" title="Allow using reverse proxy if defined and valid.">
<input id="caption_allow_reverse_proxy" type="checkbox" class="checkbox">
<span data-i18n="Allow reverse proxy">Allow reverse proxy</span>

View File

@@ -148,7 +148,7 @@ export class SettingsUi {
this.onQrSetChange();
}
onQrSetChange() {
this.currentQrSet = QuickReplySet.get(this.currentSet.value);
this.currentQrSet = QuickReplySet.get(this.currentSet.value) ?? new QuickReplySet();
this.disableSend.checked = this.currentQrSet.disableSend;
this.placeBeforeInput.checked = this.currentQrSet.placeBeforeInput;
this.injectInput.checked = this.currentQrSet.injectInput;

View File

@@ -20,7 +20,7 @@ import {
renderExtensionTemplateAsync,
doExtrasFetch, getApiUrl,
} from '../../extensions.js';
import { collapseNewlines } from '../../power-user.js';
import { collapseNewlines, registerDebugFunction } from '../../power-user.js';
import { SECRET_KEYS, secret_state, writeSecret } from '../../secrets.js';
import { getDataBankAttachments, getDataBankAttachmentsForSource, getFileAttachment } from '../../chats.js';
import { debounce, getStringHash as calculateHash, waitUntilCondition, onlyUnique, splitRecursive, trimToStartSentence, trimToEndSentence } from '../../utils.js';
@@ -989,6 +989,28 @@ async function purgeVectorIndex(collectionId) {
}
}
/**
* Purges all vector indexes.
*/
async function purgeAllVectorIndexes() {
try {
const response = await fetch('/api/vector/purge-all', {
method: 'POST',
headers: getRequestHeaders(),
});
if (!response.ok) {
throw new Error('Failed to purge all vector indexes');
}
console.log('Vectors: Purged all vector indexes');
toastr.success('All vector indexes purged', 'Purge successful');
} catch (error) {
console.error('Vectors: Failed to purge all', error);
toastr.error('Failed to purge all vector indexes', 'Purge failed');
}
}
function toggleSettings() {
$('#vectors_files_settings').toggle(!!settings.enabled_files);
$('#vectors_chats_settings').toggle(!!settings.enabled_chats);
@@ -1502,6 +1524,13 @@ jQuery(async () => {
saveSettingsDebounced();
});
$('#vectors_ollama_pull').on('click', (e) => {
const presetModel = extension_settings.vectors.ollama_model || '';
e.preventDefault();
$('#ollama_download_model').trigger('click');
$('#dialogue_popup_input').val(presetModel);
});
const validSecret = !!secret_state[SECRET_KEYS.NOMICAI];
const placeholder = validSecret ? '✔️ Key saved' : '❌ Missing key';
$('#api_key_nomicai').attr('placeholder', placeholder);
@@ -1571,4 +1600,11 @@ jQuery(async () => {
],
returns: ARGUMENT_TYPE.LIST,
}));
registerDebugFunction('purge-everything', 'Purge all vector indices', 'Obliterate all stored vectors for all sources. No mercy.', async () => {
if (!confirm('Are you sure?')) {
return;
}
await purgeAllVectorIndexes();
});
});

View File

@@ -32,8 +32,11 @@
<input id="vectors_ollama_keep" type="checkbox" />
<span data-i18n="Keep model in memory">Keep model in memory</span>
</label>
<i data-i18n="Hint: Download models and set the URL in the API connection settings.">
Hint: Download models and set the URL in the API connection settings.
<div>
The model must be downloaded first! Do it with the <code>ollama pull</code> command or <a href="#" id="vectors_ollama_pull">click here</a>.
</div>
<i data-i18n="Hint: Set the URL in the API connection settings.">
Hint: Set the URL in the API connection settings.
</i>
</div>
<div class="flex-container flexFlowColumn" id="llamacpp_vectorsModel">