Merge branch 'staging' into llamacpp-sampler-order

This commit is contained in:
Cohee 2024-02-24 15:10:58 +02:00
commit 299bd9d563
15 changed files with 229 additions and 31 deletions

View File

@ -1917,6 +1917,7 @@
<option value="llamacpp">llama.cpp</option>
<option value="ollama">Ollama</option>
<option value="togetherai">TogetherAI</option>
<option value="infermaticai">InfermaticAI</option>
</select>
</div>
<div data-tg-type="togetherai" class="flex-container flexFlowColumn">
@ -1937,6 +1938,24 @@
</select>
</div>
</div>
<div data-tg-type="infermaticai" class="flex-container flexFlowColumn">
<h4 data-i18n="InfermaticAI API Key">InfermaticAI API Key</h4>
<div class="flex-container">
<input id="api_key_infermaticai" name="api_key_infermaticai" class="text_pole flex1" maxlength="500" value="" type="text" autocomplete="off">
<div title="Clear your API key" data-i18n="[title]Clear your API key" class="menu_button fa-solid fa-circle-xmark clear-api-key" data-key="api_key_infermaticai"></div>
</div>
<div data-for="api_key_infermaticai" class="neutral_warning">
For privacy reasons, your API key will be hidden after you reload the page.
</div>
<div>
<h4 data-i18n="InfermaticAI Model">InfermaticAI Model</h4>
<select id="model_infermaticai_select">
<option>
-- Connect to the API --
</option>
</select>
</div>
</div>
<div data-tg-type="mancer" class="flex-container flexFlowColumn">
<div class="flex-container flexFlowColumn">
</div>
@ -4560,7 +4579,7 @@
<div name="perEntryOverridesBlock" class="flex-container wide100p alignitemscenter">
<div class="world_entry_form_control flex1">
<small class="textAlignCenter" data-i18n="Scan Depth">Scan Depth</small>
<input class="text_pole" name="scanDepth" type="number" placeholder="Use global setting" data-i18n="[placeholder]Use global setting" max="100">
<input class="text_pole" name="scanDepth" type="number" placeholder="Use global setting" data-i18n="[placeholder]Use global setting" max="1000">
</div>
<div class="world_entry_form_control flex1">
<small class="textAlignCenter" data-i18n="Case-Sensitive">Case-Sensitive</small>

View File

@ -18,9 +18,10 @@ import {
textgen_types,
getTextGenServer,
validateTextGenUrl,
parseTextgenLogprobs,
} from './scripts/textgen-settings.js';
const { MANCER, TOGETHERAI, OOBA, APHRODITE, OLLAMA } = textgen_types;
const { MANCER, TOGETHERAI, OOBA, APHRODITE, OLLAMA, INFERMATICAI } = textgen_types;
import {
world_info,
@ -194,7 +195,7 @@ import { createPersona, initPersonas, selectCurrentPersona, setPersonaDescriptio
import { getBackgrounds, initBackgrounds, loadBackgroundSettings, background_settings } from './scripts/backgrounds.js';
import { hideLoader, showLoader } from './scripts/loader.js';
import { BulkEditOverlay, CharacterContextMenu } from './scripts/BulkEditOverlay.js';
import { loadMancerModels, loadOllamaModels, loadTogetherAIModels } from './scripts/textgen-models.js';
import { loadMancerModels, loadOllamaModels, loadTogetherAIModels, loadInfermaticAIModels } from './scripts/textgen-models.js';
import { appendFileContent, hasPendingFileAttachment, populateFileAttachment, decodeStyleTags, encodeStyleTags } from './scripts/chats.js';
import { initPresetManager } from './scripts/preset-manager.js';
import { evaluateMacros } from './scripts/macros.js';
@ -1053,6 +1054,9 @@ async function getStatusTextgen() {
} else if (textgen_settings.type === OLLAMA) {
loadOllamaModels(data?.data);
online_status = textgen_settings.ollama_model || 'Connected';
} else if (textgen_settings.type === INFERMATICAI) {
loadInfermaticAIModels(data?.data);
online_status = textgen_settings.infermaticai_model;
} else {
online_status = data?.result;
}
@ -2665,8 +2669,8 @@ class StreamingProcessor {
}
const continueMsg = this.type === 'continue' ? this.messageAlreadyGenerated : undefined;
await saveChatConditional();
saveLogprobsForActiveMessage(this.messageLogprobs.filter(Boolean), continueMsg);
await saveChatConditional();
activateSendButtons();
showSwipeButtons();
setGenerationProgress(0);
@ -4478,6 +4482,11 @@ function parseAndSaveLogprobs(data, continueFrom) {
// `sendOpenAIRequest`. `data` for these APIs is just a string with
// the text of the generated message, logprobs are not included.
return;
case 'textgenerationwebui':
if (textgen_settings.type === textgen_types.LLAMACPP) {
logprobs = data?.completion_probabilities?.map(x => parseTextgenLogprobs(x.content, [x])) || null;
}
break;
default:
return;
}
@ -7684,6 +7693,11 @@ const CONNECT_API_MAP = {
button: '#api_button_openai',
source: chat_completion_sources.CUSTOM,
},
'infermaticai': {
selected: 'textgenerationwebui',
button: '#api_button_textgenerationwebui',
type: textgen_types.INFERMATICAI,
},
};
async function selectContextCallback(_, name) {
@ -8616,6 +8630,11 @@ jQuery(async function () {
await writeSecret(SECRET_KEYS.OOBA, oobaKey);
}
const infermaticAIKey = String($('#api_key_infermaticai').val()).trim();
if (infermaticAIKey.length) {
await writeSecret(SECRET_KEYS.INFERMATICAI, infermaticAIKey);
}
validateTextGenUrl();
startStatusLoading();
main_api = 'textgenerationwebui';

View File

@ -389,8 +389,9 @@ function RA_autoconnect(PrevApi) {
}
break;
case 'textgenerationwebui':
if ((textgen_settings.type === textgen_types.MANCER && secret_state[SECRET_KEYS.MANCER]) ||
(textgen_settings.type === textgen_types.TOGETHERAI && secret_state[SECRET_KEYS.TOGETHERAI])
if ((textgen_settings.type === textgen_types.MANCER && secret_state[SECRET_KEYS.MANCER])
|| (textgen_settings.type === textgen_types.TOGETHERAI && secret_state[SECRET_KEYS.TOGETHERAI])
|| (textgen_settings.type === textgen_types.INFERMATICAI && secret_state[SECRET_KEYS.INFERMATICAI])
) {
$('#api_button_textgenerationwebui').trigger('click');
}

View File

@ -11,7 +11,7 @@ import {
updateMessageBlock,
} from '../../../script.js';
import { extension_settings, getContext } from '../../extensions.js';
import { secret_state, writeSecret } from '../../secrets.js';
import { findSecret, secret_state, writeSecret } from '../../secrets.js';
import { splitRecursive } from '../../utils.js';
export const autoModeOptions = {
@ -598,13 +598,18 @@ jQuery(() => {
'deeplx': 'http://127.0.0.1:1188/translate',
};
const popupText = `<h3>${optionText} API URL</h3><i>Example: <tt>${String(exampleURLs[extension_settings.translate.provider])}</tt></i>`;
const url = await callPopup(popupText, 'input');
if (url == false) {
const secretKey = extension_settings.translate.provider + '_url';
const savedUrl = secret_state[secretKey] ? await findSecret(secretKey) : '';
const url = await callPopup(popupText, 'input', savedUrl);
if (url == false || url == '') {
return;
}
await writeSecret(extension_settings.translate.provider + '_url', url);
await writeSecret(secretKey, url);
toastr.success('API URL saved');
$('#translate_url_button').addClass('success');
});

View File

@ -530,10 +530,15 @@ async function queryCollection(collectionId, searchText, topK) {
return results;
}
/**
* Purges the vector index for a collection.
* @param {string} collectionId Collection ID to purge
* @returns <Promise<boolean>> True if deleted, false if not
*/
async function purgeVectorIndex(collectionId) {
try {
if (!settings.enabled_chats) {
return;
return true;
}
const response = await fetch('/api/vector/purge', {
@ -549,9 +554,10 @@ async function purgeVectorIndex(collectionId) {
}
console.log(`Vectors: Purged vector index for collection ${collectionId}`);
return true;
} catch (error) {
console.error('Vectors: Failed to purge', error);
return false;
}
}
@ -566,8 +572,11 @@ async function onPurgeClick() {
toastr.info('No chat selected', 'Purge aborted');
return;
}
await purgeVectorIndex(chatId);
toastr.success('Vector index purged', 'Purge successful');
if (await purgeVectorIndex(chatId)) {
toastr.success('Vector index purged', 'Purge successful');
} else {
toastr.error('Failed to purge vector index', 'Purge failed');
}
}
async function onViewStatsClick() {

View File

@ -139,9 +139,14 @@ function renderTopLogprobs() {
const candidates = topLogprobs
.sort(([, logA], [, logB]) => logB - logA)
.map(([text, log]) => {
const probability = Math.exp(log);
sum += probability;
return [text, probability, log];
if (log < 0) {
const probability = Math.exp(log);
sum += probability;
return [text, probability, log];
}
else {
return [text, log, null];
}
});
candidates.push(['<others>', 1 - sum, 0]);
@ -157,7 +162,9 @@ function renderTopLogprobs() {
const tokenText = $('<span></span>').text(`${toVisibleWhitespace(token)}`);
const percentText = $('<span></span>').text(`${(probability * 100).toFixed(2)}%`);
container.append(tokenText, percentText);
container.attr('title', `logarithm: ${log}`);
if (log) {
container.attr('title', `logarithm: ${log}`);
}
addKeyboardProps(container);
if (token !== '<others>') {
container.click(() => onAlternativeClicked(state.selectedTokenLogprobs, token));
@ -459,7 +466,7 @@ function convertTokenIdLogprobsToText(input) {
}
export function initLogprobs() {
const debouncedRender = debounce(renderAlternativeTokensView, 250);
const debouncedRender = debounce(renderAlternativeTokensView, 500);
$('#logprobsViewerClose').click(onToggleLogprobsPanel);
$('#option_toggle_logprobs').click(onToggleLogprobsPanel);
eventSource.on(event_types.CHAT_CHANGED, debouncedRender);

View File

@ -16,6 +16,7 @@ export const SECRET_KEYS = {
SERPAPI: 'api_key_serpapi',
MISTRALAI: 'api_key_mistralai',
TOGETHERAI: 'api_key_togetherai',
INFERMATICAI: 'api_key_infermaticai',
CUSTOM: 'api_key_custom',
OOBA: 'api_key_ooba',
};
@ -37,6 +38,7 @@ const INPUT_MAP = {
[SECRET_KEYS.CUSTOM]: '#api_key_custom',
[SECRET_KEYS.TOGETHERAI]: '#api_key_togetherai',
[SECRET_KEYS.OOBA]: '#api_key_ooba',
[SECRET_KEYS.INFERMATICAI]: '#api_key_infermaticai',
};
async function clearSecret() {
@ -124,6 +126,11 @@ export async function readSecretState() {
}
}
/**
* Finds a secret value by key.
* @param {string} key Secret key
* @returns {Promise<string | undefined>} Secret value, or undefined if keys are not exposed
*/
export async function findSecret(key) {
try {
const response = await fetch('/api/secrets/find', {

View File

@ -4,6 +4,7 @@ import { textgenerationwebui_settings as textgen_settings, textgen_types } from
let mancerModels = [];
let togetherModels = [];
let infermaticAIModels = [];
export async function loadOllamaModels(data) {
if (!Array.isArray(data)) {
@ -52,6 +53,32 @@ export async function loadTogetherAIModels(data) {
}
}
export async function loadInfermaticAIModels(data) {
if (!Array.isArray(data)) {
console.error('Invalid Infermatic AI models data', data);
return;
}
infermaticAIModels = data;
if (!data.find(x => x.id === textgen_settings.infermaticai_model)) {
textgen_settings.infermaticai_model = data[0]?.id || '';
}
$('#model_infermaticai_select').empty();
for (const model of data) {
if (model.display_type === 'image') {
continue;
}
const option = document.createElement('option');
option.value = model.id;
option.text = model.id;
option.selected = model.id === textgen_settings.infermaticai_model;
$('#model_infermaticai_select').append(option);
}
}
export async function loadMancerModels(data) {
if (!Array.isArray(data)) {
console.error('Invalid Mancer models data', data);
@ -91,6 +118,14 @@ function onTogetherModelSelect() {
setGenerationParamsFromPreset({ max_length: model.context_length });
}
function onInfermaticAIModelSelect() {
const modelName = String($('#model_infermaticai_select').val());
textgen_settings.infermaticai_model = modelName;
$('#api_button_textgenerationwebui').trigger('click');
const model = infermaticAIModels.find(x => x.id === modelName);
setGenerationParamsFromPreset({ max_length: model.context_length });
}
function onOllamaModelSelect() {
const modelId = String($('#ollama_model').val());
textgen_settings.ollama_model = modelId;
@ -130,6 +165,20 @@ function getTogetherModelTemplate(option) {
`));
}
function getInfermaticAIModelTemplate(option) {
const model = infermaticAIModels.find(x => x.id === option?.element?.value);
if (!option.id || !model) {
return option.text;
}
return $((`
<div class="flex-container flexFlowColumn">
<div><strong>${DOMPurify.sanitize(model.id)}</strong></div>
</div>
`));
}
async function downloadOllamaModel() {
try {
const serverUrl = textgen_settings.server_urls[textgen_types.OLLAMA];
@ -174,6 +223,7 @@ async function downloadOllamaModel() {
jQuery(function () {
$('#mancer_model').on('change', onMancerModelSelect);
$('#model_togetherai_select').on('change', onTogetherModelSelect);
$('#model_infermaticai_select').on('change', onInfermaticAIModelSelect);
$('#ollama_model').on('change', onOllamaModelSelect);
$('#ollama_download_model').on('click', downloadOllamaModel);
@ -198,5 +248,12 @@ jQuery(function () {
searchInputCssClass: 'text_pole',
width: '100%',
});
$('#model_infermaticai_select').select2({
placeholder: 'Select a model',
searchInputPlaceholder: 'Search models...',
searchInputCssClass: 'text_pole',
width: '100%',
templateResult: getInfermaticAIModelTemplate,
});
}
});

View File

@ -31,9 +31,11 @@ export const textgen_types = {
TOGETHERAI: 'togetherai',
LLAMACPP: 'llamacpp',
OLLAMA: 'ollama',
INFERMATICAI: 'infermaticai',
};
const { MANCER, APHRODITE, TABBY, TOGETHERAI, OOBA, OLLAMA, LLAMACPP } = textgen_types;
const { MANCER, APHRODITE, TABBY, TOGETHERAI, OOBA, OLLAMA, LLAMACPP, INFERMATICAI } = textgen_types;
const LLAMACPP_DEFAULT_ORDER = [
'top_k',
'tfs_z',
@ -64,6 +66,7 @@ const MANCER_SERVER_KEY = 'mancer_server';
const MANCER_SERVER_DEFAULT = 'https://neuro.mancer.tech';
let MANCER_SERVER = localStorage.getItem(MANCER_SERVER_KEY) ?? MANCER_SERVER_DEFAULT;
let TOGETHERAI_SERVER = 'https://api.together.xyz';
let INFERMATICAI_SERVER = 'https://api.totalgpt.ai';
const SERVER_INPUTS = {
[textgen_types.OOBA]: '#textgenerationwebui_api_url_text',
@ -130,6 +133,7 @@ const settings = {
type: textgen_types.OOBA,
mancer_model: 'mytholite',
togetherai_model: 'Gryphe/MythoMax-L2-13b',
infermaticai_model: '',
ollama_model: '',
legacy_api: false,
sampler_order: KOBOLDCPP_ORDER,
@ -230,6 +234,10 @@ export function getTextGenServer() {
return TOGETHERAI_SERVER;
}
if (settings.type === INFERMATICAI) {
return INFERMATICAI_SERVER;
}
return settings.server_urls[settings.type] ?? '';
}
@ -253,8 +261,8 @@ async function selectPreset(name) {
function formatTextGenURL(value) {
try {
// Mancer/Together doesn't need any formatting (it's hardcoded)
if (settings.type === MANCER || settings.type === TOGETHERAI) {
// Mancer/Together/InfermaticAI doesn't need any formatting (it's hardcoded)
if (settings.type === MANCER || settings.type === TOGETHERAI || settings.type === INFERMATICAI) {
return value;
}
@ -795,7 +803,7 @@ async function generateTextGenWithStreaming(generate_data, signal) {
} else {
const newText = data?.choices?.[0]?.text || data?.content || '';
text += newText;
logprobs = parseTextgenLogprobs(newText, data.choices?.[0]?.logprobs);
logprobs = parseTextgenLogprobs(newText, data.choices?.[0]?.logprobs || data?.completion_probabilities);
}
yield { text, swipes, logprobs };
@ -811,7 +819,7 @@ async function generateTextGenWithStreaming(generate_data, signal) {
* @param {Object} logprobs - logprobs object returned from the API
* @returns {import('logprobs.js').TokenLogprobs | null} - converted logprobs
*/
function parseTextgenLogprobs(token, logprobs) {
export function parseTextgenLogprobs(token, logprobs) {
if (!logprobs) {
return null;
}
@ -828,6 +836,14 @@ function parseTextgenLogprobs(token, logprobs) {
const candidates = Object.entries(topLogprobs[0]);
return { token, topLogprobs: candidates };
}
case LLAMACPP: {
/** @type {Record<string, number>[]} */
if (!logprobs?.length) {
return null;
}
const candidates = logprobs[0].probs.map(x => [ x.tok_str, x.prob ]);
return { token, topLogprobs: candidates };
}
default:
return null;
}
@ -882,6 +898,10 @@ function getModel() {
return settings.togetherai_model;
}
if (settings.type === INFERMATICAI) {
return settings.infermaticai_model;
}
if (settings.type === APHRODITE) {
return online_status;
}
@ -970,6 +990,7 @@ export function getTextGenGenerationData(finalPrompt, maxTokens, isImpersonate,
'n_predict': maxTokens,
'mirostat': settings.mirostat_mode,
'ignore_eos': settings.ban_eos_token,
'n_probs': power_user.request_token_probabilities ? 10 : undefined,
};
const aphroditeParams = {
'n': canMultiSwipe ? settings.n : 1,

View File

@ -70,7 +70,7 @@ const SORT_ORDER_KEY = 'world_info_sort_order';
const METADATA_KEY = 'world_info';
const DEFAULT_DEPTH = 4;
const MAX_SCAN_DEPTH = 100;
const MAX_SCAN_DEPTH = 1000;
/**
* Represents a scanning buffer for one evaluation of World Info.
@ -113,6 +113,10 @@ class WorldInfoBuffer {
if (messages[depth]) {
this.#depthBuffer[depth] = messages[depth].trim();
}
// break if last message is reached
if (depth === messages.length - 1) {
break;
}
}
}
@ -1513,11 +1517,13 @@ function getWorldEntry(name, data, entry) {
// Clamp if necessary
if (value < 0) {
$(this).val(0).trigger('input');
toastr.warning('Scan depth cannot be negative');
return;
}
if (value > MAX_SCAN_DEPTH) {
$(this).val(MAX_SCAN_DEPTH).trigger('input');
toastr.warning(`Scan depth cannot exceed ${MAX_SCAN_DEPTH}`);
return;
}

View File

@ -19,6 +19,14 @@ function getTogetherAIHeaders() {
}) : {};
}
function getInfermaticAIHeaders() {
const apiKey = readSecret(SECRET_KEYS.INFERMATICAI);
return apiKey ? ({
'Authorization': `Bearer ${apiKey}`,
}) : {};
}
function getAphroditeHeaders() {
const apiKey = readSecret(SECRET_KEYS.APHRODITE);
@ -80,6 +88,9 @@ function setAdditionalHeaders(request, args, server) {
case TEXTGEN_TYPES.OOBA:
headers = getOobaHeaders();
break;
case TEXTGEN_TYPES.INFERMATICAI:
headers = getInfermaticAIHeaders();
break;
default:
headers = server ? getOverrideHeaders((new URL(server))?.host) : {};
break;

View File

@ -176,8 +176,21 @@ const TEXTGEN_TYPES = {
TOGETHERAI: 'togetherai',
LLAMACPP: 'llamacpp',
OLLAMA: 'ollama',
INFERMATICAI: 'infermaticai',
};
const INFERMATICAI_KEYS = [
'model',
'prompt',
'max_tokens',
'temperature',
'top_p',
'top_k',
'repetition_penalty',
'stream',
'stop',
];
// https://docs.together.ai/reference/completions
const TOGETHERAI_KEYS = [
'model',
@ -225,4 +238,5 @@ module.exports = {
AVATAR_HEIGHT,
TOGETHERAI_KEYS,
OLLAMA_KEYS,
INFERMATICAI_KEYS,
};

View File

@ -4,7 +4,7 @@ const _ = require('lodash');
const Readable = require('stream').Readable;
const { jsonParser } = require('../../express-common');
const { TEXTGEN_TYPES, TOGETHERAI_KEYS, OLLAMA_KEYS } = require('../../constants');
const { TEXTGEN_TYPES, TOGETHERAI_KEYS, OLLAMA_KEYS, INFERMATICAI_KEYS } = require('../../constants');
const { forwardFetchResponse, trimV1 } = require('../../util');
const { setAdditionalHeaders } = require('../../additional-headers');
@ -106,6 +106,7 @@ router.post('/status', jsonParser, async function (request, response) {
case TEXTGEN_TYPES.APHRODITE:
case TEXTGEN_TYPES.KOBOLDCPP:
case TEXTGEN_TYPES.LLAMACPP:
case TEXTGEN_TYPES.INFERMATICAI:
url += '/v1/models';
break;
case TEXTGEN_TYPES.MANCER:
@ -232,6 +233,7 @@ router.post('/generate', jsonParser, async function (request, response) {
case TEXTGEN_TYPES.TABBY:
case TEXTGEN_TYPES.KOBOLDCPP:
case TEXTGEN_TYPES.TOGETHERAI:
case TEXTGEN_TYPES.INFERMATICAI:
url += '/v1/completions';
break;
case TEXTGEN_TYPES.MANCER:
@ -261,6 +263,11 @@ router.post('/generate', jsonParser, async function (request, response) {
args.body = JSON.stringify(request.body);
}
if (request.body.api_type === TEXTGEN_TYPES.INFERMATICAI) {
request.body = _.pickBy(request.body, (_, key) => INFERMATICAI_KEYS.includes(key));
args.body = JSON.stringify(request.body);
}
if (request.body.api_type === TEXTGEN_TYPES.OLLAMA) {
args.body = JSON.stringify({
model: request.body.model,
@ -292,6 +299,11 @@ router.post('/generate', jsonParser, async function (request, response) {
data['choices'] = [{ text }];
}
// Map InfermaticAI response to OAI completions format
if (completionsReply.url.includes('https://api.totalgpt.ai')) {
data['choices'] = (data?.choices || []).map(choice => ({ text: choice.message.content }));
}
return response.send(data);
} else {
const text = await completionsReply.text();

View File

@ -30,8 +30,17 @@ const SECRET_KEYS = {
MISTRALAI: 'api_key_mistralai',
CUSTOM: 'api_key_custom',
OOBA: 'api_key_ooba',
INFERMATICAI: 'api_key_infermaticai',
};
// These are the keys that are safe to expose, even if allowKeysExposure is false
const EXPORTABLE_KEYS = [
SECRET_KEYS.LIBRE_URL,
SECRET_KEYS.LINGVA_URL,
SECRET_KEYS.ONERING_URL,
SECRET_KEYS.DEEPLX_URL,
];
/**
* Writes a secret to the secrets file
* @param {string} key Secret key
@ -212,14 +221,13 @@ router.post('/view', jsonParser, async (_, response) => {
router.post('/find', jsonParser, (request, response) => {
const allowKeysExposure = getConfigValue('allowKeysExposure', false);
const key = request.body.key;
if (!allowKeysExposure) {
if (!allowKeysExposure && !EXPORTABLE_KEYS.includes(key)) {
console.error('Cannot fetch secrets unless allowKeysExposure in config.yaml is set to true');
return response.sendStatus(403);
}
const key = request.body.key;
try {
const secret = readSecret(key);

View File

@ -4,6 +4,9 @@ const express = require('express');
const sanitize = require('sanitize-filename');
const { jsonParser } = require('../express-common');
// Don't forget to add new sources to the SOURCES array
const SOURCES = ['transformers', 'mistral', 'openai', 'extras', 'palm'];
/**
* Gets the vector for the given text from the given source.
* @param {string} source - The source of the vector
@ -261,8 +264,7 @@ router.post('/purge', jsonParser, async (req, res) => {
const collectionId = String(req.body.collectionId);
const sources = ['transformers', 'openai', 'palm'];
for (const source of sources) {
for (const source of SOURCES) {
const index = await getIndex(collectionId, source, false);
const exists = await index.isIndexCreated();