Merge branch 'staging' into express-common

This commit is contained in:
Cohee 2023-12-04 19:09:20 +02:00
commit 95ef108e84
9 changed files with 52 additions and 44 deletions

View File

@ -636,9 +636,9 @@ function generateQuickReplyElements() {
$('#quickReplyContainer').empty().append(quickReplyHtml); $('#quickReplyContainer').empty().append(quickReplyHtml);
for (let i = 1; i <= extension_settings.quickReply.numberOfSlots; i++) { for (let i = 1; i <= extension_settings.quickReply.numberOfSlots; i++) {
$(`#quickReply${i}Mes`).on('input', function () { onQuickReplyInput(i); }); $(`#quickReply${i}Mes`).on('input', function () { onQuickReplyInput(this.closest('[data-order]').getAttribute('data-order')); });
$(`#quickReply${i}Label`).on('input', function () { onQuickReplyLabelInput(i); }); $(`#quickReply${i}Label`).on('input', function () { onQuickReplyLabelInput(this.closest('[data-order]').getAttribute('data-order')); });
$(`#quickReply${i}CtxButton`).on('click', function () { onQuickReplyCtxButtonClick(i); }); $(`#quickReply${i}CtxButton`).on('click', function () { onQuickReplyCtxButtonClick(this.closest('[data-order]').getAttribute('data-order')); });
$(`#quickReplyContainer > [data-order="${i}"]`).attr('data-contextMenu', JSON.stringify(extension_settings.quickReply.quickReplySlots[i - 1]?.contextMenu ?? [])); $(`#quickReplyContainer > [data-order="${i}"]`).attr('data-contextMenu', JSON.stringify(extension_settings.quickReply.quickReplySlots[i - 1]?.contextMenu ?? []));
} }
@ -692,9 +692,11 @@ function saveQROrder() {
//update html-level order data to match new sort //update html-level order data to match new sort
let i = 1; let i = 1;
$('#quickReplyContainer').children().each(function () { $('#quickReplyContainer').children().each(function () {
const oldOrder = $(this).attr('data-order');
$(this).attr('data-order', i); $(this).attr('data-order', i);
$(this).find('input').attr('id', `quickReply${i}Label`); $(this).find('input').attr('id', `quickReply${i}Label`);
$(this).find('textarea').attr('id', `quickReply${i}Mes`); $(this).find('textarea').attr('id', `quickReply${i}Mes`);
$(this).find(`#quickReply${oldOrder}CtxButton`).attr('id', `quickReply${i}CtxButton`);
i++; i++;
}); });

View File

@ -967,7 +967,7 @@ async function getAutoRemoteUpscalers() {
async function getVladRemoteUpscalers() { async function getVladRemoteUpscalers() {
try { try {
const result = await fetch('/api/sd-next/upscalers', { const result = await fetch('/api/sd/sd-next/upscalers', {
method: 'POST', method: 'POST',
headers: getRequestHeaders(), headers: getRequestHeaders(),
body: JSON.stringify(getSdRequestBody()), body: JSON.stringify(getSdRequestBody()),

View File

@ -49,7 +49,7 @@ function updateSecretDisplay() {
} }
async function viewSecrets() { async function viewSecrets() {
const response = await fetch('/viewsecrets', { const response = await fetch('/api/secrets/view', {
method: 'POST', method: 'POST',
headers: getRequestHeaders(), headers: getRequestHeaders(),
}); });

View File

@ -161,17 +161,17 @@ function callTokenizer(type, str, padding) {
case tokenizers.NONE: case tokenizers.NONE:
return guesstimate(str) + padding; return guesstimate(str) + padding;
case tokenizers.GPT2: case tokenizers.GPT2:
return countTokensRemote('/api/tokenize/gpt2', str, padding); return countTokensRemote('/api/tokenizers/gpt2/encode', str, padding);
case tokenizers.LLAMA: case tokenizers.LLAMA:
return countTokensRemote('/api/tokenize/llama', str, padding); return countTokensRemote('/api/tokenizers/llama/encode', str, padding);
case tokenizers.NERD: case tokenizers.NERD:
return countTokensRemote('/api/tokenize/nerdstash', str, padding); return countTokensRemote('/api/tokenizers/nerdstash/encode', str, padding);
case tokenizers.NERD2: case tokenizers.NERD2:
return countTokensRemote('/api/tokenize/nerdstash_v2', str, padding); return countTokensRemote('/api/tokenizers/nerdstash_v2/encode', str, padding);
case tokenizers.MISTRAL: case tokenizers.MISTRAL:
return countTokensRemote('/api/tokenize/mistral', str, padding); return countTokensRemote('/api/tokenizers/mistral/encode', str, padding);
case tokenizers.YI: case tokenizers.YI:
return countTokensRemote('/api/tokenize/yi', str, padding); return countTokensRemote('/api/tokenizers/yi/encode', str, padding);
case tokenizers.API: case tokenizers.API:
return countTokensRemote('/tokenize_via_api', str, padding); return countTokensRemote('/tokenize_via_api', str, padding);
default: default:
@ -349,7 +349,7 @@ export function countTokensOpenAI(messages, full = false) {
jQuery.ajax({ jQuery.ajax({
async: false, async: false,
type: 'POST', // type: 'POST', //
url: shouldTokenizeAI21 ? '/api/tokenize/ai21' : `/api/tokenize/openai?model=${model}`, url: shouldTokenizeAI21 ? '/api/tokenizers/ai21/count' : `/api/tokenizers/openai/count?model=${model}`,
data: JSON.stringify([message]), data: JSON.stringify([message]),
dataType: 'json', dataType: 'json',
contentType: 'application/json', contentType: 'application/json',
@ -509,20 +509,20 @@ function decodeTextTokensRemote(endpoint, ids, model = '') {
export function getTextTokens(tokenizerType, str) { export function getTextTokens(tokenizerType, str) {
switch (tokenizerType) { switch (tokenizerType) {
case tokenizers.GPT2: case tokenizers.GPT2:
return getTextTokensRemote('/api/tokenize/gpt2', str); return getTextTokensRemote('/api/tokenizers/gpt2/encode', str);
case tokenizers.LLAMA: case tokenizers.LLAMA:
return getTextTokensRemote('/api/tokenize/llama', str); return getTextTokensRemote('/api/tokenizers/llama/encode', str);
case tokenizers.NERD: case tokenizers.NERD:
return getTextTokensRemote('/api/tokenize/nerdstash', str); return getTextTokensRemote('/api/tokenizers/nerdstash/encode', str);
case tokenizers.NERD2: case tokenizers.NERD2:
return getTextTokensRemote('/api/tokenize/nerdstash_v2', str); return getTextTokensRemote('/api/tokenizers/nerdstash_v2/encode', str);
case tokenizers.MISTRAL: case tokenizers.MISTRAL:
return getTextTokensRemote('/api/tokenize/mistral', str); return getTextTokensRemote('/api/tokenizers/mistral/encode', str);
case tokenizers.YI: case tokenizers.YI:
return getTextTokensRemote('/api/tokenize/yi', str); return getTextTokensRemote('/api/tokenizers/yi/encode', str);
case tokenizers.OPENAI: { case tokenizers.OPENAI: {
const model = getTokenizerModel(); const model = getTokenizerModel();
return getTextTokensRemote('/api/tokenize/openai-encode', str, model); return getTextTokensRemote('/api/tokenizers/openai/encode', str, model);
} }
case tokenizers.API: case tokenizers.API:
return getTextTokensRemote('/tokenize_via_api', str); return getTextTokensRemote('/tokenize_via_api', str);
@ -540,20 +540,20 @@ export function getTextTokens(tokenizerType, str) {
export function decodeTextTokens(tokenizerType, ids) { export function decodeTextTokens(tokenizerType, ids) {
switch (tokenizerType) { switch (tokenizerType) {
case tokenizers.GPT2: case tokenizers.GPT2:
return decodeTextTokensRemote('/api/decode/gpt2', ids); return decodeTextTokensRemote('/api/tokenizers/gpt2/decode', ids);
case tokenizers.LLAMA: case tokenizers.LLAMA:
return decodeTextTokensRemote('/api/decode/llama', ids); return decodeTextTokensRemote('/api/tokenizers/llama/decode', ids);
case tokenizers.NERD: case tokenizers.NERD:
return decodeTextTokensRemote('/api/decode/nerdstash', ids); return decodeTextTokensRemote('/api/tokenizers/nerdstash/decode', ids);
case tokenizers.NERD2: case tokenizers.NERD2:
return decodeTextTokensRemote('/api/decode/nerdstash_v2', ids); return decodeTextTokensRemote('/api/tokenizers/nerdstash_v2/decode', ids);
case tokenizers.MISTRAL: case tokenizers.MISTRAL:
return decodeTextTokensRemote('/api/decode/mistral', ids); return decodeTextTokensRemote('/api/tokenizers/mistral/decode', ids);
case tokenizers.YI: case tokenizers.YI:
return decodeTextTokensRemote('/api/decode/yi', ids); return decodeTextTokensRemote('/api/tokenizers/yi/decode', ids);
case tokenizers.OPENAI: { case tokenizers.OPENAI: {
const model = getTokenizerModel(); const model = getTokenizerModel();
return decodeTextTokensRemote('/api/decode/openai', ids, model); return decodeTextTokensRemote('/api/tokenizers/openai/decode', ids, model);
} }
default: default:
console.warn('Calling decodeTextTokens with unsupported tokenizer type', tokenizerType); console.warn('Calling decodeTextTokens with unsupported tokenizer type', tokenizerType);

View File

@ -1374,6 +1374,7 @@ const newEntryTemplate = {
probability: 100, probability: 100,
useProbability: true, useProbability: true,
depth: DEFAULT_DEPTH, depth: DEFAULT_DEPTH,
group: '',
}; };
function createWorldInfoEntry(name, data, fromSlashCommand = false) { function createWorldInfoEntry(name, data, fromSlashCommand = false) {
@ -1970,6 +1971,7 @@ function convertAgnaiMemoryBook(inputObj) {
displayIndex: index, displayIndex: index,
probability: null, probability: null,
useProbability: false, useProbability: false,
group: '',
}; };
}); });
@ -1996,6 +1998,7 @@ function convertRisuLorebook(inputObj) {
displayIndex: index, displayIndex: index,
probability: entry.activationPercent ?? null, probability: entry.activationPercent ?? null,
useProbability: entry.activationPercent ?? false, useProbability: entry.activationPercent ?? false,
group: '',
}; };
}); });
@ -2027,6 +2030,7 @@ function convertNovelLorebook(inputObj) {
displayIndex: index, displayIndex: index,
probability: null, probability: null,
useProbability: false, useProbability: false,
group: '',
}; };
}); });
@ -2060,6 +2064,7 @@ function convertCharacterBook(characterBook) {
useProbability: entry.extensions?.useProbability ?? false, useProbability: entry.extensions?.useProbability ?? false,
depth: entry.extensions?.depth ?? DEFAULT_DEPTH, depth: entry.extensions?.depth ?? DEFAULT_DEPTH,
selectiveLogic: entry.extensions?.selectiveLogic ?? 0, selectiveLogic: entry.extensions?.selectiveLogic ?? 0,
group: entry.extensions?.group ?? '',
}; };
}); });

View File

@ -1875,6 +1875,7 @@ function convertWorldInfoToCharacterBook(name, entries) {
useProbability: entry.useProbability ?? false, useProbability: entry.useProbability ?? false,
depth: entry.depth ?? 4, depth: entry.depth ?? 4,
selectiveLogic: entry.selectiveLogic ?? 0, selectiveLogic: entry.selectiveLogic ?? 0,
group: entry.group ?? '',
}, },
}; };

View File

@ -169,7 +169,7 @@ function registerEndpoints(app, jsonParser) {
} }
}); });
app.post('/viewsecrets', jsonParser, async (_, response) => { app.post('/api/secrets/view', jsonParser, async (_, response) => {
const allowKeysExposure = getConfigValue('allowKeysExposure', false); const allowKeysExposure = getConfigValue('allowKeysExposure', false);
if (!allowKeysExposure) { if (!allowKeysExposure) {

View File

@ -291,7 +291,7 @@ function registerEndpoints(app, jsonParser) {
} }
}); });
app.post('/api/sd-next/upscalers', jsonParser, async (request, response) => { app.post('/api/sd/sd-next/upscalers', jsonParser, async (request, response) => {
try { try {
const url = new URL(request.body.url); const url = new URL(request.body.url);
url.pathname = '/sdapi/v1/upscalers'; url.pathname = '/sdapi/v1/upscalers';

View File

@ -365,7 +365,7 @@ async function loadTokenizers() {
* @param {any} jsonParser JSON parser middleware * @param {any} jsonParser JSON parser middleware
*/ */
function registerEndpoints(app, jsonParser) { function registerEndpoints(app, jsonParser) {
app.post('/api/tokenize/ai21', jsonParser, async function (req, res) { app.post('/api/tokenizers/ai21/count', jsonParser, async function (req, res) {
if (!req.body) return res.sendStatus(400); if (!req.body) return res.sendStatus(400);
const options = { const options = {
method: 'POST', method: 'POST',
@ -387,20 +387,20 @@ function registerEndpoints(app, jsonParser) {
} }
}); });
app.post('/api/tokenize/llama', jsonParser, createSentencepieceEncodingHandler(spp_llama)); app.post('/api/tokenizers/llama/encode', jsonParser, createSentencepieceEncodingHandler(spp_llama));
app.post('/api/tokenize/nerdstash', jsonParser, createSentencepieceEncodingHandler(spp_nerd)); app.post('/api/tokenizers/nerdstash/encode', jsonParser, createSentencepieceEncodingHandler(spp_nerd));
app.post('/api/tokenize/nerdstash_v2', jsonParser, createSentencepieceEncodingHandler(spp_nerd_v2)); app.post('/api/tokenizers/nerdstash_v2/encode', jsonParser, createSentencepieceEncodingHandler(spp_nerd_v2));
app.post('/api/tokenize/mistral', jsonParser, createSentencepieceEncodingHandler(spp_mistral)); app.post('/api/tokenizers/mistral/encode', jsonParser, createSentencepieceEncodingHandler(spp_mistral));
app.post('/api/tokenize/yi', jsonParser, createSentencepieceEncodingHandler(spp_yi)); app.post('/api/tokenizers/yi/encode', jsonParser, createSentencepieceEncodingHandler(spp_yi));
app.post('/api/tokenize/gpt2', jsonParser, createTiktokenEncodingHandler('gpt2')); app.post('/api/tokenizers/gpt2/encode', jsonParser, createTiktokenEncodingHandler('gpt2'));
app.post('/api/decode/llama', jsonParser, createSentencepieceDecodingHandler(spp_llama)); app.post('/api/tokenizers/llama/decode', jsonParser, createSentencepieceDecodingHandler(spp_llama));
app.post('/api/decode/nerdstash', jsonParser, createSentencepieceDecodingHandler(spp_nerd)); app.post('/api/tokenizers/nerdstash/decode', jsonParser, createSentencepieceDecodingHandler(spp_nerd));
app.post('/api/decode/nerdstash_v2', jsonParser, createSentencepieceDecodingHandler(spp_nerd_v2)); app.post('/api/tokenizers/nerdstash_v2/decode', jsonParser, createSentencepieceDecodingHandler(spp_nerd_v2));
app.post('/api/decode/mistral', jsonParser, createSentencepieceDecodingHandler(spp_mistral)); app.post('/api/tokenizers/mistral/decode', jsonParser, createSentencepieceDecodingHandler(spp_mistral));
app.post('/api/decode/yi', jsonParser, createSentencepieceDecodingHandler(spp_yi)); app.post('/api/tokenizers/yi/decode', jsonParser, createSentencepieceDecodingHandler(spp_yi));
app.post('/api/decode/gpt2', jsonParser, createTiktokenDecodingHandler('gpt2')); app.post('/api/tokenizers/gpt2/decode', jsonParser, createTiktokenDecodingHandler('gpt2'));
app.post('/api/tokenize/openai-encode', jsonParser, async function (req, res) { app.post('/api/tokenizers/openai/encode', jsonParser, async function (req, res) {
try { try {
const queryModel = String(req.query.model || ''); const queryModel = String(req.query.model || '');
@ -435,7 +435,7 @@ function registerEndpoints(app, jsonParser) {
} }
}); });
app.post('/api/decode/openai', jsonParser, async function (req, res) { app.post('/api/tokenizers/openai/decode', jsonParser, async function (req, res) {
try { try {
const queryModel = String(req.query.model || ''); const queryModel = String(req.query.model || '');
@ -469,7 +469,7 @@ function registerEndpoints(app, jsonParser) {
} }
}); });
app.post('/api/tokenize/openai', jsonParser, async function (req, res) { app.post('/api/tokenizers/openai/count', jsonParser, async function (req, res) {
try { try {
if (!req.body) return res.sendStatus(400); if (!req.body) return res.sendStatus(400);