diff --git a/public/scripts/extensions/quick-reply/index.js b/public/scripts/extensions/quick-reply/index.js index ad5bb74e0..50f3aba72 100644 --- a/public/scripts/extensions/quick-reply/index.js +++ b/public/scripts/extensions/quick-reply/index.js @@ -636,9 +636,9 @@ function generateQuickReplyElements() { $('#quickReplyContainer').empty().append(quickReplyHtml); for (let i = 1; i <= extension_settings.quickReply.numberOfSlots; i++) { - $(`#quickReply${i}Mes`).on('input', function () { onQuickReplyInput(i); }); - $(`#quickReply${i}Label`).on('input', function () { onQuickReplyLabelInput(i); }); - $(`#quickReply${i}CtxButton`).on('click', function () { onQuickReplyCtxButtonClick(i); }); + $(`#quickReply${i}Mes`).on('input', function () { onQuickReplyInput(this.closest('[data-order]').getAttribute('data-order')); }); + $(`#quickReply${i}Label`).on('input', function () { onQuickReplyLabelInput(this.closest('[data-order]').getAttribute('data-order')); }); + $(`#quickReply${i}CtxButton`).on('click', function () { onQuickReplyCtxButtonClick(this.closest('[data-order]').getAttribute('data-order')); }); $(`#quickReplyContainer > [data-order="${i}"]`).attr('data-contextMenu', JSON.stringify(extension_settings.quickReply.quickReplySlots[i - 1]?.contextMenu ?? [])); } @@ -692,9 +692,11 @@ function saveQROrder() { //update html-level order data to match new sort let i = 1; $('#quickReplyContainer').children().each(function () { + const oldOrder = $(this).attr('data-order'); $(this).attr('data-order', i); $(this).find('input').attr('id', `quickReply${i}Label`); $(this).find('textarea').attr('id', `quickReply${i}Mes`); + $(this).find(`#quickReply${oldOrder}CtxButton`).attr('id', `quickReply${i}CtxButton`); i++; }); diff --git a/public/scripts/extensions/stable-diffusion/index.js b/public/scripts/extensions/stable-diffusion/index.js index 7e193db8a..9a57bcd8e 100644 --- a/public/scripts/extensions/stable-diffusion/index.js +++ b/public/scripts/extensions/stable-diffusion/index.js @@ -967,7 +967,7 @@ async function getAutoRemoteUpscalers() { async function getVladRemoteUpscalers() { try { - const result = await fetch('/api/sd-next/upscalers', { + const result = await fetch('/api/sd/sd-next/upscalers', { method: 'POST', headers: getRequestHeaders(), body: JSON.stringify(getSdRequestBody()), diff --git a/public/scripts/secrets.js b/public/scripts/secrets.js index 5fef67e2f..84279641d 100644 --- a/public/scripts/secrets.js +++ b/public/scripts/secrets.js @@ -49,7 +49,7 @@ function updateSecretDisplay() { } async function viewSecrets() { - const response = await fetch('/viewsecrets', { + const response = await fetch('/api/secrets/view', { method: 'POST', headers: getRequestHeaders(), }); diff --git a/public/scripts/tokenizers.js b/public/scripts/tokenizers.js index ce52b6704..ecab34705 100644 --- a/public/scripts/tokenizers.js +++ b/public/scripts/tokenizers.js @@ -161,17 +161,17 @@ function callTokenizer(type, str, padding) { case tokenizers.NONE: return guesstimate(str) + padding; case tokenizers.GPT2: - return countTokensRemote('/api/tokenize/gpt2', str, padding); + return countTokensRemote('/api/tokenizers/gpt2/encode', str, padding); case tokenizers.LLAMA: - return countTokensRemote('/api/tokenize/llama', str, padding); + return countTokensRemote('/api/tokenizers/llama/encode', str, padding); case tokenizers.NERD: - return countTokensRemote('/api/tokenize/nerdstash', str, padding); + return countTokensRemote('/api/tokenizers/nerdstash/encode', str, padding); case tokenizers.NERD2: - return countTokensRemote('/api/tokenize/nerdstash_v2', str, padding); + return countTokensRemote('/api/tokenizers/nerdstash_v2/encode', str, padding); case tokenizers.MISTRAL: - return countTokensRemote('/api/tokenize/mistral', str, padding); + return countTokensRemote('/api/tokenizers/mistral/encode', str, padding); case tokenizers.YI: - return countTokensRemote('/api/tokenize/yi', str, padding); + return countTokensRemote('/api/tokenizers/yi/encode', str, padding); case tokenizers.API: return countTokensRemote('/tokenize_via_api', str, padding); default: @@ -349,7 +349,7 @@ export function countTokensOpenAI(messages, full = false) { jQuery.ajax({ async: false, type: 'POST', // - url: shouldTokenizeAI21 ? '/api/tokenize/ai21' : `/api/tokenize/openai?model=${model}`, + url: shouldTokenizeAI21 ? '/api/tokenizers/ai21/count' : `/api/tokenizers/openai/count?model=${model}`, data: JSON.stringify([message]), dataType: 'json', contentType: 'application/json', @@ -509,20 +509,20 @@ function decodeTextTokensRemote(endpoint, ids, model = '') { export function getTextTokens(tokenizerType, str) { switch (tokenizerType) { case tokenizers.GPT2: - return getTextTokensRemote('/api/tokenize/gpt2', str); + return getTextTokensRemote('/api/tokenizers/gpt2/encode', str); case tokenizers.LLAMA: - return getTextTokensRemote('/api/tokenize/llama', str); + return getTextTokensRemote('/api/tokenizers/llama/encode', str); case tokenizers.NERD: - return getTextTokensRemote('/api/tokenize/nerdstash', str); + return getTextTokensRemote('/api/tokenizers/nerdstash/encode', str); case tokenizers.NERD2: - return getTextTokensRemote('/api/tokenize/nerdstash_v2', str); + return getTextTokensRemote('/api/tokenizers/nerdstash_v2/encode', str); case tokenizers.MISTRAL: - return getTextTokensRemote('/api/tokenize/mistral', str); + return getTextTokensRemote('/api/tokenizers/mistral/encode', str); case tokenizers.YI: - return getTextTokensRemote('/api/tokenize/yi', str); + return getTextTokensRemote('/api/tokenizers/yi/encode', str); case tokenizers.OPENAI: { const model = getTokenizerModel(); - return getTextTokensRemote('/api/tokenize/openai-encode', str, model); + return getTextTokensRemote('/api/tokenizers/openai/encode', str, model); } case tokenizers.API: return getTextTokensRemote('/tokenize_via_api', str); @@ -540,20 +540,20 @@ export function getTextTokens(tokenizerType, str) { export function decodeTextTokens(tokenizerType, ids) { switch (tokenizerType) { case tokenizers.GPT2: - return decodeTextTokensRemote('/api/decode/gpt2', ids); + return decodeTextTokensRemote('/api/tokenizers/gpt2/decode', ids); case tokenizers.LLAMA: - return decodeTextTokensRemote('/api/decode/llama', ids); + return decodeTextTokensRemote('/api/tokenizers/llama/decode', ids); case tokenizers.NERD: - return decodeTextTokensRemote('/api/decode/nerdstash', ids); + return decodeTextTokensRemote('/api/tokenizers/nerdstash/decode', ids); case tokenizers.NERD2: - return decodeTextTokensRemote('/api/decode/nerdstash_v2', ids); + return decodeTextTokensRemote('/api/tokenizers/nerdstash_v2/decode', ids); case tokenizers.MISTRAL: - return decodeTextTokensRemote('/api/decode/mistral', ids); + return decodeTextTokensRemote('/api/tokenizers/mistral/decode', ids); case tokenizers.YI: - return decodeTextTokensRemote('/api/decode/yi', ids); + return decodeTextTokensRemote('/api/tokenizers/yi/decode', ids); case tokenizers.OPENAI: { const model = getTokenizerModel(); - return decodeTextTokensRemote('/api/decode/openai', ids, model); + return decodeTextTokensRemote('/api/tokenizers/openai/decode', ids, model); } default: console.warn('Calling decodeTextTokens with unsupported tokenizer type', tokenizerType); diff --git a/public/scripts/world-info.js b/public/scripts/world-info.js index f156a8cd3..5c9d52775 100644 --- a/public/scripts/world-info.js +++ b/public/scripts/world-info.js @@ -1374,6 +1374,7 @@ const newEntryTemplate = { probability: 100, useProbability: true, depth: DEFAULT_DEPTH, + group: '', }; function createWorldInfoEntry(name, data, fromSlashCommand = false) { @@ -1970,6 +1971,7 @@ function convertAgnaiMemoryBook(inputObj) { displayIndex: index, probability: null, useProbability: false, + group: '', }; }); @@ -1996,6 +1998,7 @@ function convertRisuLorebook(inputObj) { displayIndex: index, probability: entry.activationPercent ?? null, useProbability: entry.activationPercent ?? false, + group: '', }; }); @@ -2027,6 +2030,7 @@ function convertNovelLorebook(inputObj) { displayIndex: index, probability: null, useProbability: false, + group: '', }; }); @@ -2060,6 +2064,7 @@ function convertCharacterBook(characterBook) { useProbability: entry.extensions?.useProbability ?? false, depth: entry.extensions?.depth ?? DEFAULT_DEPTH, selectiveLogic: entry.extensions?.selectiveLogic ?? 0, + group: entry.extensions?.group ?? '', }; }); diff --git a/server.js b/server.js index 8dbe78c57..a0b24bb5b 100644 --- a/server.js +++ b/server.js @@ -1875,6 +1875,7 @@ function convertWorldInfoToCharacterBook(name, entries) { useProbability: entry.useProbability ?? false, depth: entry.depth ?? 4, selectiveLogic: entry.selectiveLogic ?? 0, + group: entry.group ?? '', }, }; diff --git a/src/endpoints/secrets.js b/src/endpoints/secrets.js index c612836cc..a0282c5e1 100644 --- a/src/endpoints/secrets.js +++ b/src/endpoints/secrets.js @@ -169,7 +169,7 @@ function registerEndpoints(app, jsonParser) { } }); - app.post('/viewsecrets', jsonParser, async (_, response) => { + app.post('/api/secrets/view', jsonParser, async (_, response) => { const allowKeysExposure = getConfigValue('allowKeysExposure', false); if (!allowKeysExposure) { diff --git a/src/endpoints/stable-diffusion.js b/src/endpoints/stable-diffusion.js index f6c94be98..2375bc44b 100644 --- a/src/endpoints/stable-diffusion.js +++ b/src/endpoints/stable-diffusion.js @@ -291,7 +291,7 @@ function registerEndpoints(app, jsonParser) { } }); - app.post('/api/sd-next/upscalers', jsonParser, async (request, response) => { + app.post('/api/sd/sd-next/upscalers', jsonParser, async (request, response) => { try { const url = new URL(request.body.url); url.pathname = '/sdapi/v1/upscalers'; diff --git a/src/endpoints/tokenizers.js b/src/endpoints/tokenizers.js index d73af29b1..b0e6b10db 100644 --- a/src/endpoints/tokenizers.js +++ b/src/endpoints/tokenizers.js @@ -365,7 +365,7 @@ async function loadTokenizers() { * @param {any} jsonParser JSON parser middleware */ function registerEndpoints(app, jsonParser) { - app.post('/api/tokenize/ai21', jsonParser, async function (req, res) { + app.post('/api/tokenizers/ai21/count', jsonParser, async function (req, res) { if (!req.body) return res.sendStatus(400); const options = { method: 'POST', @@ -387,20 +387,20 @@ function registerEndpoints(app, jsonParser) { } }); - app.post('/api/tokenize/llama', jsonParser, createSentencepieceEncodingHandler(spp_llama)); - app.post('/api/tokenize/nerdstash', jsonParser, createSentencepieceEncodingHandler(spp_nerd)); - app.post('/api/tokenize/nerdstash_v2', jsonParser, createSentencepieceEncodingHandler(spp_nerd_v2)); - app.post('/api/tokenize/mistral', jsonParser, createSentencepieceEncodingHandler(spp_mistral)); - app.post('/api/tokenize/yi', jsonParser, createSentencepieceEncodingHandler(spp_yi)); - app.post('/api/tokenize/gpt2', jsonParser, createTiktokenEncodingHandler('gpt2')); - app.post('/api/decode/llama', jsonParser, createSentencepieceDecodingHandler(spp_llama)); - app.post('/api/decode/nerdstash', jsonParser, createSentencepieceDecodingHandler(spp_nerd)); - app.post('/api/decode/nerdstash_v2', jsonParser, createSentencepieceDecodingHandler(spp_nerd_v2)); - app.post('/api/decode/mistral', jsonParser, createSentencepieceDecodingHandler(spp_mistral)); - app.post('/api/decode/yi', jsonParser, createSentencepieceDecodingHandler(spp_yi)); - app.post('/api/decode/gpt2', jsonParser, createTiktokenDecodingHandler('gpt2')); + app.post('/api/tokenizers/llama/encode', jsonParser, createSentencepieceEncodingHandler(spp_llama)); + app.post('/api/tokenizers/nerdstash/encode', jsonParser, createSentencepieceEncodingHandler(spp_nerd)); + app.post('/api/tokenizers/nerdstash_v2/encode', jsonParser, createSentencepieceEncodingHandler(spp_nerd_v2)); + app.post('/api/tokenizers/mistral/encode', jsonParser, createSentencepieceEncodingHandler(spp_mistral)); + app.post('/api/tokenizers/yi/encode', jsonParser, createSentencepieceEncodingHandler(spp_yi)); + app.post('/api/tokenizers/gpt2/encode', jsonParser, createTiktokenEncodingHandler('gpt2')); + app.post('/api/tokenizers/llama/decode', jsonParser, createSentencepieceDecodingHandler(spp_llama)); + app.post('/api/tokenizers/nerdstash/decode', jsonParser, createSentencepieceDecodingHandler(spp_nerd)); + app.post('/api/tokenizers/nerdstash_v2/decode', jsonParser, createSentencepieceDecodingHandler(spp_nerd_v2)); + app.post('/api/tokenizers/mistral/decode', jsonParser, createSentencepieceDecodingHandler(spp_mistral)); + app.post('/api/tokenizers/yi/decode', jsonParser, createSentencepieceDecodingHandler(spp_yi)); + app.post('/api/tokenizers/gpt2/decode', jsonParser, createTiktokenDecodingHandler('gpt2')); - app.post('/api/tokenize/openai-encode', jsonParser, async function (req, res) { + app.post('/api/tokenizers/openai/encode', jsonParser, async function (req, res) { try { const queryModel = String(req.query.model || ''); @@ -435,7 +435,7 @@ function registerEndpoints(app, jsonParser) { } }); - app.post('/api/decode/openai', jsonParser, async function (req, res) { + app.post('/api/tokenizers/openai/decode', jsonParser, async function (req, res) { try { const queryModel = String(req.query.model || ''); @@ -469,7 +469,7 @@ function registerEndpoints(app, jsonParser) { } }); - app.post('/api/tokenize/openai', jsonParser, async function (req, res) { + app.post('/api/tokenizers/openai/count', jsonParser, async function (req, res) { try { if (!req.body) return res.sendStatus(400);