From fec27820ff7b10eb5256577b8f1a28861164239c Mon Sep 17 00:00:00 2001 From: DonMoralez Date: Wed, 13 Dec 2023 21:19:26 +0200 Subject: [PATCH 01/82] (claude)reworked prefix assignment, sysprompt mode, console message display --- public/index.html | 18 ++++++-- public/scripts/openai.js | 26 +++++++++++ server.js | 20 ++++----- src/chat-completion.js | 96 +++++++++++++++++----------------------- 4 files changed, 90 insertions(+), 70 deletions(-) diff --git a/public/index.html b/public/index.html index 348a47fa3..3ad3b7eba 100644 --- a/public/index.html +++ b/public/index.html @@ -546,10 +546,6 @@ -
- Assistant Prefill - -
@@ -1523,6 +1519,20 @@
Exclude the assistant suffix from being added to the end of prompt (Requires jailbreak with 'Assistant:' in it).
+
+ Assistant Prefill + +
+ +
+ Exclude the 'Human: ' prefix from being added to the beginning of the prompt. Instead, place it between the sysprompt and the first message with the role 'assistant'(right before 'Chat History', by default). +
+
+ Human: first message + +
diff --git a/public/scripts/openai.js b/public/scripts/openai.js index a9bc5e304..42dc37c00 100644 --- a/public/scripts/openai.js +++ b/public/scripts/openai.js @@ -222,8 +222,10 @@ const default_settings = { show_external_models: false, proxy_password: '', assistant_prefill: '', + human_sysprompt_message: '', use_ai21_tokenizer: false, exclude_assistant: false, + claude_use_sysprompt: false, use_alt_scale: false, squash_system_messages: false, image_inlining: false, @@ -275,8 +277,10 @@ const oai_settings = { show_external_models: false, proxy_password: '', assistant_prefill: '', + human_sysprompt_message: '', use_ai21_tokenizer: false, exclude_assistant: false, + claude_use_sysprompt: false, use_alt_scale: false, squash_system_messages: false, image_inlining: false, @@ -1519,7 +1523,9 @@ async function sendOpenAIRequest(type, messages, signal) { if (isClaude) { generate_data['top_k'] = Number(oai_settings.top_k_openai); generate_data['exclude_assistant'] = oai_settings.exclude_assistant; + generate_data['claude_use_sysprompt'] = oai_settings.claude_use_sysprompt; generate_data['stop'] = getCustomStoppingStrings(); // Claude shouldn't have limits on stop strings. + generate_data['human_sysprompt_message'] = substituteParams(oai_settings.human_sysprompt_message); // Don't add a prefill on quiet gens (summarization) if (!isQuiet && !oai_settings.exclude_assistant) { generate_data['assistant_prefill'] = substituteParams(oai_settings.assistant_prefill); @@ -2295,6 +2301,7 @@ function loadOpenAISettings(data, settings) { oai_settings.show_external_models = settings.show_external_models ?? default_settings.show_external_models; oai_settings.proxy_password = settings.proxy_password ?? default_settings.proxy_password; oai_settings.assistant_prefill = settings.assistant_prefill ?? default_settings.assistant_prefill; + oai_settings.human_sysprompt_message = settings.human_sysprompt_message ?? default_settings.human_sysprompt_message; oai_settings.image_inlining = settings.image_inlining ?? default_settings.image_inlining; oai_settings.bypass_status_check = settings.bypass_status_check ?? default_settings.bypass_status_check; @@ -2312,11 +2319,13 @@ function loadOpenAISettings(data, settings) { if (settings.openai_model !== undefined) oai_settings.openai_model = settings.openai_model; if (settings.use_ai21_tokenizer !== undefined) { oai_settings.use_ai21_tokenizer = !!settings.use_ai21_tokenizer; oai_settings.use_ai21_tokenizer ? ai21_max = 8191 : ai21_max = 9200; } if (settings.exclude_assistant !== undefined) oai_settings.exclude_assistant = !!settings.exclude_assistant; + if (settings.claude_use_sysprompt !== undefined) oai_settings.claude_use_sysprompt = !!settings.claude_use_sysprompt; if (settings.use_alt_scale !== undefined) { oai_settings.use_alt_scale = !!settings.use_alt_scale; updateScaleForm(); } $('#stream_toggle').prop('checked', oai_settings.stream_openai); $('#api_url_scale').val(oai_settings.api_url_scale); $('#openai_proxy_password').val(oai_settings.proxy_password); $('#claude_assistant_prefill').val(oai_settings.assistant_prefill); + $('#claude_human_sysprompt_message').val(oai_settings.human_sysprompt_message); $('#openai_image_inlining').prop('checked', oai_settings.image_inlining); $('#openai_bypass_status_check').prop('checked', oai_settings.bypass_status_check); @@ -2342,6 +2351,7 @@ function loadOpenAISettings(data, settings) { $('#openai_external_category').toggle(oai_settings.show_external_models); $('#use_ai21_tokenizer').prop('checked', oai_settings.use_ai21_tokenizer); $('#exclude_assistant').prop('checked', oai_settings.exclude_assistant); + $('#claude_use_sysprompt').prop('checked', oai_settings.claude_use_sysprompt); $('#scale-alt').prop('checked', oai_settings.use_alt_scale); $('#openrouter_use_fallback').prop('checked', oai_settings.openrouter_use_fallback); $('#openrouter_force_instruct').prop('checked', oai_settings.openrouter_force_instruct); @@ -2531,8 +2541,10 @@ async function saveOpenAIPreset(name, settings, triggerUi = true) { api_url_scale: settings.api_url_scale, show_external_models: settings.show_external_models, assistant_prefill: settings.assistant_prefill, + human_sysprompt_message: settings.human_sysprompt_message, use_ai21_tokenizer: settings.use_ai21_tokenizer, exclude_assistant: settings.exclude_assistant, + claude_use_sysprompt: settings.claude_use_sysprompt, use_alt_scale: settings.use_alt_scale, squash_system_messages: settings.squash_system_messages, image_inlining: settings.image_inlining, @@ -2891,8 +2903,10 @@ function onSettingsPresetChange() { show_external_models: ['#openai_show_external_models', 'show_external_models', true], proxy_password: ['#openai_proxy_password', 'proxy_password', false], assistant_prefill: ['#claude_assistant_prefill', 'assistant_prefill', false], + human_sysprompt_message: ['#claude_human_sysprompt_message', 'human_sysprompt_message', false], use_ai21_tokenizer: ['#use_ai21_tokenizer', 'use_ai21_tokenizer', true], exclude_assistant: ['#exclude_assistant', 'exclude_assistant', true], + claude_use_sysprompt: ['#claude_use_sysprompt', 'claude_use_sysprompt', true], use_alt_scale: ['#use_alt_scale', 'use_alt_scale', true], squash_system_messages: ['#squash_system_messages', 'squash_system_messages', true], image_inlining: ['#openai_image_inlining', 'image_inlining', true], @@ -3345,6 +3359,7 @@ function toggleChatCompletionForms() { if (chat_completion_sources.CLAUDE == oai_settings.chat_completion_source) { $('#claude_assistant_prefill_block').toggle(!oai_settings.exclude_assistant); + $('#claude_human_sysprompt_message_block').toggle(oai_settings.claude_use_sysprompt); } } @@ -3497,6 +3512,12 @@ $(document).ready(async function () { saveSettingsDebounced(); }); + $('#claude_use_sysprompt').on('change', function () { + oai_settings.claude_use_sysprompt = !!$('#claude_use_sysprompt').prop('checked'); + $('#claude_human_sysprompt_message_block').toggle(oai_settings.claude_use_sysprompt); + saveSettingsDebounced(); + }); + $('#names_in_completion').on('change', function () { oai_settings.names_in_completion = !!$('#names_in_completion').prop('checked'); saveSettingsDebounced(); @@ -3657,6 +3678,11 @@ $(document).ready(async function () { saveSettingsDebounced(); }); + $('#claude_human_sysprompt_message').on('input', function () { + oai_settings.human_sysprompt_message = String($(this).val()); + saveSettingsDebounced(); + }); + $('#openrouter_use_fallback').on('input', function () { oai_settings.openrouter_use_fallback = !!$(this).prop('checked'); saveSettingsDebounced(); diff --git a/server.js b/server.js index ec9fb03aa..a627ed3f7 100644 --- a/server.js +++ b/server.js @@ -1297,9 +1297,11 @@ async function sendClaudeRequest(request, response) { const api_url = new URL(request.body.reverse_proxy || API_CLAUDE).toString(); const api_key_claude = request.body.reverse_proxy ? request.body.proxy_password : readSecret(SECRET_KEYS.CLAUDE); + const chalk = require('chalk'); + const divider = '-'.repeat(process.stdout.columns); if (!api_key_claude) { - console.log('Claude API key is missing.'); + console.log(chalk.red(`Claude API key is missing.\n${divider}`)); return response.status(400).send({ error: true }); } @@ -1310,14 +1312,10 @@ async function sendClaudeRequest(request, response) { controller.abort(); }); - let doSystemPrompt = request.body.model === 'claude-2' || request.body.model === 'claude-2.1'; - let requestPrompt = convertClaudePrompt(request.body.messages, true, !request.body.exclude_assistant, doSystemPrompt); + let isSyspromptSupported = request.body.model === 'claude-2' || request.body.model === 'claude-2.1'; + let requestPrompt = convertClaudePrompt(request.body.messages, !request.body.exclude_assistant, request.body.assistant_prefill, isSyspromptSupported, request.body.claude_use_sysprompt, request.body.human_sysprompt_message); - if (request.body.assistant_prefill && !request.body.exclude_assistant) { - requestPrompt += request.body.assistant_prefill; - } - - console.log('Claude request:', requestPrompt); + console.log(chalk.green(`${divider}\nClaude request\n`) + chalk.cyan(`PROMPT\n${divider}\n${requestPrompt}\n${divider}`)); const stop_sequences = ['\n\nHuman:', '\n\nSystem:', '\n\nAssistant:']; // Add custom stop sequences @@ -1351,20 +1349,20 @@ async function sendClaudeRequest(request, response) { forwardFetchResponse(generateResponse, response); } else { if (!generateResponse.ok) { - console.log(`Claude API returned error: ${generateResponse.status} ${generateResponse.statusText} ${await generateResponse.text()}`); + console.log(chalk.red(`Claude API returned error: ${generateResponse.status} ${generateResponse.statusText}\n${await generateResponse.text()}\n${divider}`)); return response.status(generateResponse.status).send({ error: true }); } const generateResponseJson = await generateResponse.json(); const responseText = generateResponseJson.completion; - console.log('Claude response:', responseText); + console.log(chalk.green(`Claude response\n${divider}\n${responseText}\n${divider}`)); // Wrap it back to OAI format const reply = { choices: [{ 'message': { 'content': responseText } }] }; return response.send(reply); } } catch (error) { - console.log('Error communicating with Claude: ', error); + console.log(chalk.red(`Error communicating with Claude: ${error}\n${divider}`)); if (!response.headersSent) { return response.status(500).send({ error: true }); } diff --git a/src/chat-completion.js b/src/chat-completion.js index 4fc21a550..d673ae37d 100644 --- a/src/chat-completion.js +++ b/src/chat-completion.js @@ -1,73 +1,59 @@ /** * Convert a prompt from the ChatML objects to the format used by Claude. * @param {object[]} messages Array of messages - * @param {boolean} addHumanPrefix Add Human prefix - * @param {boolean} addAssistantPostfix Add Assistant postfix - * @param {boolean} withSystemPrompt Build system prompt before "\n\nHuman: " + * @param {boolean} addAssistantPostfix Add Assistant postfix. + * @param {string} addAssistantPrefill Add Assistant prefill after the assistant postfix. + * @param {boolean} withSyspromptSupport Indicates if the Claude model supports the system prompt format. + * @param {boolean} useSystemPrompt Indicates if the system prompt format should be used. + * @param {string} addSysHumanMsg Add Human message between system prompt and assistant. * @returns {string} Prompt for Claude * @copyright Prompt Conversion script taken from RisuAI by kwaroran (GPLv3). */ -function convertClaudePrompt(messages, addHumanPrefix, addAssistantPostfix, withSystemPrompt) { - // Claude doesn't support message names, so we'll just add them to the message content. - for (const message of messages) { - if (message.name && message.role !== 'system') { - message.content = message.name + ': ' + message.content; - delete message.name; - } - } - let systemPrompt = ''; - if (withSystemPrompt) { - let lastSystemIdx = -1; +function convertClaudePrompt(messages, addAssistantPostfix, addAssistantPrefill, withSyspromptSupport, useSystemPrompt, addSysHumanMsg) { - for (let i = 0; i < messages.length - 1; i++) { - const message = messages[i]; - if (message.role === 'system' && !message.name) { - systemPrompt += message.content + '\n\n'; - } else { - lastSystemIdx = i - 1; - break; - } + // Find the index of the first message with an assistant role and check for a "'user' role/Human:" before it. + let hasUser = false; + const firstAssistantIndex = messages.findIndex((message) => { + if (message.role === 'user' || message.content.includes('Human:')) { + hasUser = true; + } + return message.role === 'assistant'; + }); + + let setHumanMsg = addSysHumanMsg ? '\n\nHuman: ' + addSysHumanMsg : '\n\nHuman: Let\'s get started.'; + let requestPrompt = messages.map((v, i) => { + // Claude doesn't support message names, so we'll just add them to the message content. + if (v.name && v.role !== 'system') { + v.content = `${v.name}: ${v.content}`; + delete v.name; } - if (lastSystemIdx >= 0) { - messages.splice(0, lastSystemIdx + 1); - } - } - let requestPrompt = messages.map((v) => { let prefix = ''; - switch (v.role) { - case 'assistant': - prefix = '\n\nAssistant: '; - break; - case 'user': - prefix = '\n\nHuman: '; - break; - case 'system': - // According to the Claude docs, H: and A: should be used for example conversations. - if (v.name === 'example_assistant') { - prefix = '\n\nA: '; - } else if (v.name === 'example_user') { - prefix = '\n\nH: '; - } else { - prefix = '\n\n'; - } - break; + // Switches to system prompt format by adding empty prefix to the first message of the assistant, when the "use system prompt" checked and the model is 2.1. + // Otherwise, use the default message format by adding "Human: " prefix to the first message(compatible with all claude models including 2.1.) + if (i === 0) { + prefix = withSyspromptSupport && useSystemPrompt ? '' : '\n\nHuman: '; + // For system prompt format. If there is no message with role "user" or prefix "Human:" change the first assistant's prefix(insert the human's message). + } else if (i === firstAssistantIndex && !hasUser && withSyspromptSupport && useSystemPrompt) { + prefix = `${setHumanMsg}\n\nAssistant: `; + //prefix = addSysHumanMsg ? '\n\nHuman: ' + addSysHumanMsg + '\n\nAssistant: ' : '\n\nHuman: Let\'s get started.\n\nAssistant: '; + // Merge two messages with "\n\nHuman: " prefixes into one before the first Assistant's message. Fix messages order for default claude format when(messages > Context Size). + } else if (i > 0 && i === firstAssistantIndex - 1 && v.role === 'user' && (!withSyspromptSupport || (withSyspromptSupport && !useSystemPrompt))) { + prefix = '\n\nFirst message: '; + //Define role prefixes(role : prefix). Set the correct prefix according to the role/name. + } else { + prefix = { + 'assistant': '\n\nAssistant: ', + 'user': '\n\nHuman: ', + 'system': v.name === 'example_assistant' ? '\n\nA: ' : v.name === 'example_user' ? '\n\nH: ' : '\n\n', + }[v.role] || '\n\n'; } return prefix + v.content; }).join(''); - if (addHumanPrefix) { - requestPrompt = '\n\nHuman: ' + requestPrompt; - } - - if (addAssistantPostfix) { - requestPrompt = requestPrompt + '\n\nAssistant: '; - } - - if (withSystemPrompt) { - requestPrompt = systemPrompt + requestPrompt; - } + //Add the assistant suffix(if the option unchecked), add a prefill after it(if filled). Also Add the first human message before the assistant suffix(when using sysprompt and there are no other messages with the role 'Assistant'). + requestPrompt += addAssistantPostfix ? `${withSyspromptSupport && useSystemPrompt && firstAssistantIndex === -1 ? setHumanMsg : ''}\n\nAssistant: ${addAssistantPrefill ? addAssistantPrefill : ''}` : ''; return requestPrompt; } From 54dd3799e866e246f590363e825f57c660502aab Mon Sep 17 00:00:00 2001 From: DonMoralez Date: Thu, 14 Dec 2023 20:42:08 +0200 Subject: [PATCH 02/82] Update chat-completions.js --- src/endpoints/backends/chat-completions.js | 150 ++------------------- 1 file changed, 9 insertions(+), 141 deletions(-) diff --git a/src/endpoints/backends/chat-completions.js b/src/endpoints/backends/chat-completions.js index af463bd21..d9cf29bf6 100644 --- a/src/endpoints/backends/chat-completions.js +++ b/src/endpoints/backends/chat-completions.js @@ -20,9 +20,11 @@ const API_CLAUDE = 'https://api.anthropic.com/v1'; async function sendClaudeRequest(request, response) { const apiUrl = new URL(request.body.reverse_proxy || API_CLAUDE).toString(); const apiKey = request.body.reverse_proxy ? request.body.proxy_password : readSecret(SECRET_KEYS.CLAUDE); + const chalk = require('chalk'); + const divider = '-'.repeat(process.stdout.columns); if (!apiKey) { - console.log('Claude API key is missing.'); + console.log(chalk.red(`Claude API key is missing.\n${divider}`)); return response.status(400).send({ error: true }); } @@ -33,14 +35,10 @@ async function sendClaudeRequest(request, response) { controller.abort(); }); - let doSystemPrompt = request.body.model === 'claude-2' || request.body.model === 'claude-2.1'; - let requestPrompt = convertClaudePrompt(request.body.messages, true, !request.body.exclude_assistant, doSystemPrompt); + let isSyspromptSupported = request.body.model === 'claude-2' || request.body.model === 'claude-2.1'; + let requestPrompt = convertClaudePrompt(request.body.messages, !request.body.exclude_assistant, request.body.assistant_prefill, isSyspromptSupported, request.body.claude_use_sysprompt, request.body.human_sysprompt_message); - if (request.body.assistant_prefill && !request.body.exclude_assistant) { - requestPrompt += request.body.assistant_prefill; - } - - console.log('Claude request:', requestPrompt); + console.log(chalk.green(`${divider}\nClaude request\n`) + chalk.cyan(`PROMPT\n${divider}\n${requestPrompt}\n${divider}`)); const stop_sequences = ['\n\nHuman:', '\n\nSystem:', '\n\nAssistant:']; // Add custom stop sequences @@ -74,150 +72,20 @@ async function sendClaudeRequest(request, response) { forwardFetchResponse(generateResponse, response); } else { if (!generateResponse.ok) { - console.log(`Claude API returned error: ${generateResponse.status} ${generateResponse.statusText} ${await generateResponse.text()}`); + console.log(chalk.red(`Claude API returned error: ${generateResponse.status} ${generateResponse.statusText}\n${await generateResponse.text()}\n${divider}`)); return response.status(generateResponse.status).send({ error: true }); } const generateResponseJson = await generateResponse.json(); const responseText = generateResponseJson.completion; - console.log('Claude response:', responseText); + console.log(chalk.green(`Claude response\n${divider}\n${responseText}\n${divider}`)); // Wrap it back to OAI format const reply = { choices: [{ 'message': { 'content': responseText } }] }; return response.send(reply); } } catch (error) { - console.log('Error communicating with Claude: ', error); - if (!response.headersSent) { - return response.status(500).send({ error: true }); - } - } -} - -/** - * Sends a request to Scale Spellbook API. - * @param {import("express").Request} request Express request - * @param {import("express").Response} response Express response - */ -async function sendScaleRequest(request, response) { - const apiUrl = new URL(request.body.api_url_scale).toString(); - const apiKey = readSecret(SECRET_KEYS.SCALE); - - if (!apiKey) { - console.log('Scale API key is missing.'); - return response.status(400).send({ error: true }); - } - - const requestPrompt = convertTextCompletionPrompt(request.body.messages); - console.log('Scale request:', requestPrompt); - - try { - const controller = new AbortController(); - request.socket.removeAllListeners('close'); - request.socket.on('close', function () { - controller.abort(); - }); - - const generateResponse = await fetch(apiUrl, { - method: 'POST', - body: JSON.stringify({ input: { input: requestPrompt } }), - headers: { - 'Content-Type': 'application/json', - 'Authorization': `Basic ${apiKey}`, - }, - timeout: 0, - }); - - if (!generateResponse.ok) { - console.log(`Scale API returned error: ${generateResponse.status} ${generateResponse.statusText} ${await generateResponse.text()}`); - return response.status(generateResponse.status).send({ error: true }); - } - - const generateResponseJson = await generateResponse.json(); - console.log('Scale response:', generateResponseJson); - - const reply = { choices: [{ 'message': { 'content': generateResponseJson.output } }] }; - return response.send(reply); - } catch (error) { - console.log(error); - if (!response.headersSent) { - return response.status(500).send({ error: true }); - } - } -} - -/** - * Sends a request to Google AI API. - * @param {express.Request} request Express request - * @param {express.Response} response Express response - */ -async function sendPalmRequest(request, response) { - const api_key_palm = readSecret(SECRET_KEYS.PALM); - - if (!api_key_palm) { - console.log('Palm API key is missing.'); - return response.status(400).send({ error: true }); - } - - const body = { - prompt: { - text: request.body.messages, - }, - stopSequences: request.body.stop, - safetySettings: PALM_SAFETY, - temperature: request.body.temperature, - topP: request.body.top_p, - topK: request.body.top_k || undefined, - maxOutputTokens: request.body.max_tokens, - candidate_count: 1, - }; - - console.log('Palm request:', body); - - try { - const controller = new AbortController(); - request.socket.removeAllListeners('close'); - request.socket.on('close', function () { - controller.abort(); - }); - - const generateResponse = await fetch(`https://generativelanguage.googleapis.com/v1beta2/models/text-bison-001:generateText?key=${api_key_palm}`, { - body: JSON.stringify(body), - method: 'POST', - headers: { - 'Content-Type': 'application/json', - }, - signal: controller.signal, - timeout: 0, - }); - - if (!generateResponse.ok) { - console.log(`Palm API returned error: ${generateResponse.status} ${generateResponse.statusText} ${await generateResponse.text()}`); - return response.status(generateResponse.status).send({ error: true }); - } - - const generateResponseJson = await generateResponse.json(); - const responseText = generateResponseJson?.candidates?.[0]?.output; - - if (!responseText) { - console.log('Palm API returned no response', generateResponseJson); - let message = `Palm API returned no response: ${JSON.stringify(generateResponseJson)}`; - - // Check for filters - if (generateResponseJson?.filters?.[0]?.reason) { - message = `Palm filter triggered: ${generateResponseJson.filters[0].reason}`; - } - - return response.send({ error: { message } }); - } - - console.log('Palm response:', responseText); - - // Wrap it back to OAI format - const reply = { choices: [{ 'message': { 'content': responseText } }] }; - return response.send(reply); - } catch (error) { - console.log('Error communicating with Palm API: ', error); + console.log(chalk.red(`Error communicating with Claude: ${error}\n${divider}`)); if (!response.headersSent) { return response.status(500).send({ error: true }); } From 84a94b37e101b204cca1658cacba6bbdf1442369 Mon Sep 17 00:00:00 2001 From: DonMoralez Date: Thu, 14 Dec 2023 20:55:20 +0200 Subject: [PATCH 03/82] Revert "Update chat-completions.js" This reverts commit 54dd3799e866e246f590363e825f57c660502aab. --- src/endpoints/backends/chat-completions.js | 150 +++++++++++++++++++-- 1 file changed, 141 insertions(+), 9 deletions(-) diff --git a/src/endpoints/backends/chat-completions.js b/src/endpoints/backends/chat-completions.js index d9cf29bf6..af463bd21 100644 --- a/src/endpoints/backends/chat-completions.js +++ b/src/endpoints/backends/chat-completions.js @@ -20,11 +20,9 @@ const API_CLAUDE = 'https://api.anthropic.com/v1'; async function sendClaudeRequest(request, response) { const apiUrl = new URL(request.body.reverse_proxy || API_CLAUDE).toString(); const apiKey = request.body.reverse_proxy ? request.body.proxy_password : readSecret(SECRET_KEYS.CLAUDE); - const chalk = require('chalk'); - const divider = '-'.repeat(process.stdout.columns); if (!apiKey) { - console.log(chalk.red(`Claude API key is missing.\n${divider}`)); + console.log('Claude API key is missing.'); return response.status(400).send({ error: true }); } @@ -35,10 +33,14 @@ async function sendClaudeRequest(request, response) { controller.abort(); }); - let isSyspromptSupported = request.body.model === 'claude-2' || request.body.model === 'claude-2.1'; - let requestPrompt = convertClaudePrompt(request.body.messages, !request.body.exclude_assistant, request.body.assistant_prefill, isSyspromptSupported, request.body.claude_use_sysprompt, request.body.human_sysprompt_message); + let doSystemPrompt = request.body.model === 'claude-2' || request.body.model === 'claude-2.1'; + let requestPrompt = convertClaudePrompt(request.body.messages, true, !request.body.exclude_assistant, doSystemPrompt); - console.log(chalk.green(`${divider}\nClaude request\n`) + chalk.cyan(`PROMPT\n${divider}\n${requestPrompt}\n${divider}`)); + if (request.body.assistant_prefill && !request.body.exclude_assistant) { + requestPrompt += request.body.assistant_prefill; + } + + console.log('Claude request:', requestPrompt); const stop_sequences = ['\n\nHuman:', '\n\nSystem:', '\n\nAssistant:']; // Add custom stop sequences @@ -72,20 +74,150 @@ async function sendClaudeRequest(request, response) { forwardFetchResponse(generateResponse, response); } else { if (!generateResponse.ok) { - console.log(chalk.red(`Claude API returned error: ${generateResponse.status} ${generateResponse.statusText}\n${await generateResponse.text()}\n${divider}`)); + console.log(`Claude API returned error: ${generateResponse.status} ${generateResponse.statusText} ${await generateResponse.text()}`); return response.status(generateResponse.status).send({ error: true }); } const generateResponseJson = await generateResponse.json(); const responseText = generateResponseJson.completion; - console.log(chalk.green(`Claude response\n${divider}\n${responseText}\n${divider}`)); + console.log('Claude response:', responseText); // Wrap it back to OAI format const reply = { choices: [{ 'message': { 'content': responseText } }] }; return response.send(reply); } } catch (error) { - console.log(chalk.red(`Error communicating with Claude: ${error}\n${divider}`)); + console.log('Error communicating with Claude: ', error); + if (!response.headersSent) { + return response.status(500).send({ error: true }); + } + } +} + +/** + * Sends a request to Scale Spellbook API. + * @param {import("express").Request} request Express request + * @param {import("express").Response} response Express response + */ +async function sendScaleRequest(request, response) { + const apiUrl = new URL(request.body.api_url_scale).toString(); + const apiKey = readSecret(SECRET_KEYS.SCALE); + + if (!apiKey) { + console.log('Scale API key is missing.'); + return response.status(400).send({ error: true }); + } + + const requestPrompt = convertTextCompletionPrompt(request.body.messages); + console.log('Scale request:', requestPrompt); + + try { + const controller = new AbortController(); + request.socket.removeAllListeners('close'); + request.socket.on('close', function () { + controller.abort(); + }); + + const generateResponse = await fetch(apiUrl, { + method: 'POST', + body: JSON.stringify({ input: { input: requestPrompt } }), + headers: { + 'Content-Type': 'application/json', + 'Authorization': `Basic ${apiKey}`, + }, + timeout: 0, + }); + + if (!generateResponse.ok) { + console.log(`Scale API returned error: ${generateResponse.status} ${generateResponse.statusText} ${await generateResponse.text()}`); + return response.status(generateResponse.status).send({ error: true }); + } + + const generateResponseJson = await generateResponse.json(); + console.log('Scale response:', generateResponseJson); + + const reply = { choices: [{ 'message': { 'content': generateResponseJson.output } }] }; + return response.send(reply); + } catch (error) { + console.log(error); + if (!response.headersSent) { + return response.status(500).send({ error: true }); + } + } +} + +/** + * Sends a request to Google AI API. + * @param {express.Request} request Express request + * @param {express.Response} response Express response + */ +async function sendPalmRequest(request, response) { + const api_key_palm = readSecret(SECRET_KEYS.PALM); + + if (!api_key_palm) { + console.log('Palm API key is missing.'); + return response.status(400).send({ error: true }); + } + + const body = { + prompt: { + text: request.body.messages, + }, + stopSequences: request.body.stop, + safetySettings: PALM_SAFETY, + temperature: request.body.temperature, + topP: request.body.top_p, + topK: request.body.top_k || undefined, + maxOutputTokens: request.body.max_tokens, + candidate_count: 1, + }; + + console.log('Palm request:', body); + + try { + const controller = new AbortController(); + request.socket.removeAllListeners('close'); + request.socket.on('close', function () { + controller.abort(); + }); + + const generateResponse = await fetch(`https://generativelanguage.googleapis.com/v1beta2/models/text-bison-001:generateText?key=${api_key_palm}`, { + body: JSON.stringify(body), + method: 'POST', + headers: { + 'Content-Type': 'application/json', + }, + signal: controller.signal, + timeout: 0, + }); + + if (!generateResponse.ok) { + console.log(`Palm API returned error: ${generateResponse.status} ${generateResponse.statusText} ${await generateResponse.text()}`); + return response.status(generateResponse.status).send({ error: true }); + } + + const generateResponseJson = await generateResponse.json(); + const responseText = generateResponseJson?.candidates?.[0]?.output; + + if (!responseText) { + console.log('Palm API returned no response', generateResponseJson); + let message = `Palm API returned no response: ${JSON.stringify(generateResponseJson)}`; + + // Check for filters + if (generateResponseJson?.filters?.[0]?.reason) { + message = `Palm filter triggered: ${generateResponseJson.filters[0].reason}`; + } + + return response.send({ error: { message } }); + } + + console.log('Palm response:', responseText); + + // Wrap it back to OAI format + const reply = { choices: [{ 'message': { 'content': responseText } }] }; + return response.send(reply); + } catch (error) { + console.log('Error communicating with Palm API: ', error); if (!response.headersSent) { return response.status(500).send({ error: true }); } From 2858e095a8f813dc00eb9e2a6a429ae7a512e053 Mon Sep 17 00:00:00 2001 From: DonMoralez Date: Thu, 14 Dec 2023 21:07:52 +0200 Subject: [PATCH 04/82] Update chat-completions.js --- src/endpoints/backends/chat-completions.js | 20 +++++++++----------- 1 file changed, 9 insertions(+), 11 deletions(-) diff --git a/src/endpoints/backends/chat-completions.js b/src/endpoints/backends/chat-completions.js index af463bd21..ae0370cc5 100644 --- a/src/endpoints/backends/chat-completions.js +++ b/src/endpoints/backends/chat-completions.js @@ -20,9 +20,11 @@ const API_CLAUDE = 'https://api.anthropic.com/v1'; async function sendClaudeRequest(request, response) { const apiUrl = new URL(request.body.reverse_proxy || API_CLAUDE).toString(); const apiKey = request.body.reverse_proxy ? request.body.proxy_password : readSecret(SECRET_KEYS.CLAUDE); + const chalk = require('chalk'); + const divider = '-'.repeat(process.stdout.columns); if (!apiKey) { - console.log('Claude API key is missing.'); + console.log(chalk.red(`Claude API key is missing.\n${divider}`)); return response.status(400).send({ error: true }); } @@ -33,14 +35,10 @@ async function sendClaudeRequest(request, response) { controller.abort(); }); - let doSystemPrompt = request.body.model === 'claude-2' || request.body.model === 'claude-2.1'; - let requestPrompt = convertClaudePrompt(request.body.messages, true, !request.body.exclude_assistant, doSystemPrompt); + let isSyspromptSupported = request.body.model === 'claude-2' || request.body.model === 'claude-2.1'; + let requestPrompt = convertClaudePrompt(request.body.messages, !request.body.exclude_assistant, request.body.assistant_prefill, isSyspromptSupported, request.body.claude_use_sysprompt, request.body.human_sysprompt_message); - if (request.body.assistant_prefill && !request.body.exclude_assistant) { - requestPrompt += request.body.assistant_prefill; - } - - console.log('Claude request:', requestPrompt); + console.log(chalk.green(`${divider}\nClaude request\n`) + chalk.cyan(`PROMPT\n${divider}\n${requestPrompt}\n${divider}`)); const stop_sequences = ['\n\nHuman:', '\n\nSystem:', '\n\nAssistant:']; // Add custom stop sequences @@ -74,20 +72,20 @@ async function sendClaudeRequest(request, response) { forwardFetchResponse(generateResponse, response); } else { if (!generateResponse.ok) { - console.log(`Claude API returned error: ${generateResponse.status} ${generateResponse.statusText} ${await generateResponse.text()}`); + console.log(chalk.red(`Claude API returned error: ${generateResponse.status} ${generateResponse.statusText}\n${await generateResponse.text()}\n${divider}`)); return response.status(generateResponse.status).send({ error: true }); } const generateResponseJson = await generateResponse.json(); const responseText = generateResponseJson.completion; - console.log('Claude response:', responseText); + console.log(chalk.green(`Claude response\n${divider}\n${responseText}\n${divider}`)); // Wrap it back to OAI format const reply = { choices: [{ 'message': { 'content': responseText } }] }; return response.send(reply); } } catch (error) { - console.log('Error communicating with Claude: ', error); + console.log(chalk.red(`Error communicating with Claude: ${error}\n${divider}`)); if (!response.headersSent) { return response.status(500).send({ error: true }); } From 2ebc16850522f13aac59ed4e21da12174e6311b8 Mon Sep 17 00:00:00 2001 From: DonMoralez Date: Thu, 14 Dec 2023 21:36:13 +0200 Subject: [PATCH 05/82] Update prompt-converters.js --- src/endpoints/prompt-converters.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/endpoints/prompt-converters.js b/src/endpoints/prompt-converters.js index 3cfa38cce..1ccdd9aae 100644 --- a/src/endpoints/prompt-converters.js +++ b/src/endpoints/prompt-converters.js @@ -39,7 +39,7 @@ function convertClaudePrompt(messages, addAssistantPostfix, addAssistantPrefill, prefix = `${setHumanMsg}\n\nAssistant: `; //prefix = addSysHumanMsg ? '\n\nHuman: ' + addSysHumanMsg + '\n\nAssistant: ' : '\n\nHuman: Let\'s get started.\n\nAssistant: '; // Merge two messages with "\n\nHuman: " prefixes into one before the first Assistant's message. Fix messages order for default claude format when(messages > Context Size). - } else if (i > 0 && i === firstAssistantIndex - 1 && v.role === 'user' && (!withSyspromptSupport || (withSyspromptSupport && !useSystemPrompt))) { + } else if (i > 0 && i === firstAssistantIndex - 1 && v.role === 'user' && !(withSyspromptSupport && useSystemPrompt)) { prefix = '\n\nFirst message: '; //Define role prefixes(role : prefix). Set the correct prefix according to the role/name. } else { From d71b5430ad3023c65ddb4dc4c6bb92f4a88f5186 Mon Sep 17 00:00:00 2001 From: DonMoralez Date: Fri, 15 Dec 2023 13:10:53 +0200 Subject: [PATCH 06/82] Update prompt-converters.js --- src/endpoints/prompt-converters.js | 59 +++++++++++++++++++++++------- 1 file changed, 46 insertions(+), 13 deletions(-) diff --git a/src/endpoints/prompt-converters.js b/src/endpoints/prompt-converters.js index 1ccdd9aae..bab45840f 100644 --- a/src/endpoints/prompt-converters.js +++ b/src/endpoints/prompt-converters.js @@ -13,13 +13,45 @@ function convertClaudePrompt(messages, addAssistantPostfix, addAssistantPrefill, withSyspromptSupport, useSystemPrompt, addSysHumanMsg) { // Find the index of the first message with an assistant role and check for a "'user' role/Human:" before it. - let hasUser = false; - const firstAssistantIndex = messages.findIndex((message) => { - if (message.role === 'user' || message.content.includes('Human:')) { - hasUser = true; + console.log(JSON.stringify(messages, null, 2)); + if (messages.length > 0) { + //messages[0].role = withSyspromptSupport && useSystemPrompt ? 'system' : 'user'; + if (addAssistantPostfix) { + let role = 'assistant'; + if (messages.length > 0 && messages[messages.length - 1].role === 'assistant' || messages[messages.length - 1].content.includes('Assistant:')) { + role = 'system'; + } + messages.push({ + role: role, + content: addAssistantPrefill || '', + }); } - return message.role === 'assistant'; - }); + + let hasUser = false; + const firstAssistantIndex = messages.findIndex((message, i) => { + if (i > 0 && (message.role === 'user' || message.content.includes('Human:'))) { + hasUser = true; + } + return message.role === 'assistant' && i > 0; + }); + if (withSyspromptSupport && useSystemPrompt) { + messages[0].role = 'system'; + + if (!hasUser) { + messages.splice(firstAssistantIndex, 0, { + role: 'user', + content: addSysHumanMsg || 'Let\'s get started.', + }); + } + } else { + messages[0].role = 'user'; + if (firstAssistantIndex > 0) { + messages[firstAssistantIndex - 1].role = firstAssistantIndex - 1 !== 0 && messages[firstAssistantIndex - 1].role === 'user' ? 'FirstMsg' : messages[firstAssistantIndex - 1].role; + } + } + } + + console.log(JSON.stringify(messages, null, 2)); let setHumanMsg = addSysHumanMsg ? '\n\nHuman: ' + addSysHumanMsg : '\n\nHuman: Let\'s get started.'; let requestPrompt = messages.map((v, i) => { @@ -32,29 +64,30 @@ function convertClaudePrompt(messages, addAssistantPostfix, addAssistantPrefill, let prefix = ''; // Switches to system prompt format by adding empty prefix to the first message of the assistant, when the "use system prompt" checked and the model is 2.1. // Otherwise, use the default message format by adding "Human: " prefix to the first message(compatible with all claude models including 2.1.) - if (i === 0) { + /* if (i === 0) { prefix = withSyspromptSupport && useSystemPrompt ? '' : '\n\nHuman: '; // For system prompt format. If there is no message with role "user" or prefix "Human:" change the first assistant's prefix(insert the human's message). - } else if (i === firstAssistantIndex && !hasUser && withSyspromptSupport && useSystemPrompt) { + } else if (i === firstAssistantIndex && !hasUser && withSyspromptSupport && useSystemPrompt) { prefix = `${setHumanMsg}\n\nAssistant: `; //prefix = addSysHumanMsg ? '\n\nHuman: ' + addSysHumanMsg + '\n\nAssistant: ' : '\n\nHuman: Let\'s get started.\n\nAssistant: '; // Merge two messages with "\n\nHuman: " prefixes into one before the first Assistant's message. Fix messages order for default claude format when(messages > Context Size). - } else if (i > 0 && i === firstAssistantIndex - 1 && v.role === 'user' && !(withSyspromptSupport && useSystemPrompt)) { + } else*/ if (i > 0 && i === firstAssistantIndex - 1 && v.role === 'user' && !(withSyspromptSupport && useSystemPrompt)) { prefix = '\n\nFirst message: '; //Define role prefixes(role : prefix). Set the correct prefix according to the role/name. } else { prefix = { 'assistant': '\n\nAssistant: ', 'user': '\n\nHuman: ', - 'system': v.name === 'example_assistant' ? '\n\nA: ' : v.name === 'example_user' ? '\n\nH: ' : '\n\n', - }[v.role] || '\n\n'; + 'FirstMsg': '\n\nFirst message: ', + 'system': i === 0 ? '' : v.name === 'example_assistant' ? '\n\nA: ' : v.name === 'example_user' ? '\n\nH: ' : i === messages.length - 1 ? '\n' : '\n\n', + }[v.role] ?? '\n\n'; } return prefix + v.content; }).join(''); //Add the assistant suffix(if the option unchecked), add a prefill after it(if filled). Also Add the first human message before the assistant suffix(when using sysprompt and there are no other messages with the role 'Assistant'). - requestPrompt += addAssistantPostfix ? `${withSyspromptSupport && useSystemPrompt && firstAssistantIndex === -1 ? setHumanMsg : ''}\n\nAssistant: ${addAssistantPrefill ? addAssistantPrefill : ''}` : ''; - + //requestPrompt += addAssistantPostfix ? `${withSyspromptSupport && useSystemPrompt && firstAssistantIndex === -1 ? setHumanMsg : ''}\n\nAssistant: ${addAssistantPrefill ? addAssistantPrefill : ''}` : ''; + requestPrompt += ''; return requestPrompt; } From 04f3aa8b66e3b1b20a37bb886a7a43a8084a5c9f Mon Sep 17 00:00:00 2001 From: DonMoralez Date: Fri, 15 Dec 2023 20:15:48 +0200 Subject: [PATCH 07/82] Update prompt-converters.js --- src/endpoints/prompt-converters.js | 54 ++++++++++-------------------- 1 file changed, 18 insertions(+), 36 deletions(-) diff --git a/src/endpoints/prompt-converters.js b/src/endpoints/prompt-converters.js index 245de41e9..84778318b 100644 --- a/src/endpoints/prompt-converters.js +++ b/src/endpoints/prompt-converters.js @@ -12,21 +12,17 @@ function convertClaudePrompt(messages, addAssistantPostfix, addAssistantPrefill, withSyspromptSupport, useSystemPrompt, addSysHumanMsg) { - // Find the index of the first message with an assistant role and check for a "'user' role/Human:" before it. console.log(JSON.stringify(messages, null, 2)); + //Prepare messages for claude. if (messages.length > 0) { - //messages[0].role = withSyspromptSupport && useSystemPrompt ? 'system' : 'user'; + //Add the assistant's message to the end of messages. if (addAssistantPostfix) { - let role = 'assistant'; - if (messages.length > 0 && messages[messages.length - 1].role === 'assistant' || messages[messages.length - 1].content.includes('Assistant:')) { - role = 'system'; - } messages.push({ - role: role, + role: 'assistant', content: addAssistantPrefill || '', }); } - + // Find the index of the first message with an assistant role and check for a "'user' role/Human:" before it. let hasUser = false; const firstAssistantIndex = messages.findIndex((message, i) => { if (i > 0 && (message.role === 'user' || message.content.includes('Human:'))) { @@ -34,26 +30,28 @@ function convertClaudePrompt(messages, addAssistantPostfix, addAssistantPrefill, } return message.role === 'assistant' && i > 0; }); + // When 2.1 and 'Use system prompt" checked, switches to system prompt format by setting the first message's role to 'system'. + // Also, insert the human's message before the first the assistant one, in case there are no such message or prefix found. if (withSyspromptSupport && useSystemPrompt) { messages[0].role = 'system'; - - if (!hasUser) { + if (firstAssistantIndex > 0 && !hasUser) { messages.splice(firstAssistantIndex, 0, { role: 'user', content: addSysHumanMsg || 'Let\'s get started.', }); } } else { + // Otherwise, use the default message format by setting the first message's role to 'user'(compatible with all claude models including 2.1.) messages[0].role = 'user'; + // Also, fix messages order for default claude format when(messages > Context Size) by merging two messages with "\n\nHuman: " prefixes into one before the first Assistant's message. if (firstAssistantIndex > 0) { - messages[firstAssistantIndex - 1].role = firstAssistantIndex - 1 !== 0 && messages[firstAssistantIndex - 1].role === 'user' ? 'FirstMsg' : messages[firstAssistantIndex - 1].role; + messages[firstAssistantIndex - 1].role = firstAssistantIndex - 1 !== 0 && messages[firstAssistantIndex - 1].role === 'user' ? 'firstMsg' : messages[firstAssistantIndex - 1].role; } } } console.log(JSON.stringify(messages, null, 2)); - - let setHumanMsg = addSysHumanMsg ? '\n\nHuman: ' + addSysHumanMsg : '\n\nHuman: Let\'s get started.'; + // Convert messages to requestPrompt. let requestPrompt = messages.map((v, i) => { // Claude doesn't support message names, so we'll just add them to the message content. if (v.name && v.role !== 'system') { @@ -62,32 +60,16 @@ function convertClaudePrompt(messages, addAssistantPostfix, addAssistantPrefill, } let prefix = ''; - // Switches to system prompt format by adding empty prefix to the first message of the assistant, when the "use system prompt" checked and the model is 2.1. - // Otherwise, use the default message format by adding "Human: " prefix to the first message(compatible with all claude models including 2.1.) - /* if (i === 0) { - prefix = withSyspromptSupport && useSystemPrompt ? '' : '\n\nHuman: '; - // For system prompt format. If there is no message with role "user" or prefix "Human:" change the first assistant's prefix(insert the human's message). - } else if (i === firstAssistantIndex && !hasUser && withSyspromptSupport && useSystemPrompt) { - prefix = `${setHumanMsg}\n\nAssistant: `; - //prefix = addSysHumanMsg ? '\n\nHuman: ' + addSysHumanMsg + '\n\nAssistant: ' : '\n\nHuman: Let\'s get started.\n\nAssistant: '; - // Merge two messages with "\n\nHuman: " prefixes into one before the first Assistant's message. Fix messages order for default claude format when(messages > Context Size). - } else*/ if (i > 0 && i === firstAssistantIndex - 1 && v.role === 'user' && !(withSyspromptSupport && useSystemPrompt)) { - prefix = '\n\nFirst message: '; - //Define role prefixes(role : prefix). Set the correct prefix according to the role/name. - } else { - prefix = { - 'assistant': '\n\nAssistant: ', - 'user': '\n\nHuman: ', - 'FirstMsg': '\n\nFirst message: ', - 'system': i === 0 ? '' : v.name === 'example_assistant' ? '\n\nA: ' : v.name === 'example_user' ? '\n\nH: ' : i === messages.length - 1 ? '\n' : '\n\n', - }[v.role] ?? '\n\n'; - } + prefix = { + 'assistant': '\n\nAssistant: ', + 'user': '\n\nHuman: ', + 'system': i === 0 ? '' : v.name === 'example_assistant' ? '\n\nA: ' : v.name === 'example_user' ? '\n\nH: ' : '\n\n', + 'firstMsg': '\n\nFirst message: ', + }[v.role] ?? '\n\n'; + //} return prefix + v.content; }).join(''); - //Add the assistant suffix(if the option unchecked), add a prefill after it(if filled). Also Add the first human message before the assistant suffix(when using sysprompt and there are no other messages with the role 'Assistant'). - //requestPrompt += addAssistantPostfix ? `${withSyspromptSupport && useSystemPrompt && firstAssistantIndex === -1 ? setHumanMsg : ''}\n\nAssistant: ${addAssistantPrefill ? addAssistantPrefill : ''}` : ''; - requestPrompt += ''; return requestPrompt; } From d061c830c24f0c290857e6fa2adecabec601b59d Mon Sep 17 00:00:00 2001 From: DonMoralez Date: Fri, 15 Dec 2023 20:58:03 +0200 Subject: [PATCH 08/82] Update prompt-converters.js --- src/endpoints/prompt-converters.js | 14 ++++++-------- 1 file changed, 6 insertions(+), 8 deletions(-) diff --git a/src/endpoints/prompt-converters.js b/src/endpoints/prompt-converters.js index 84778318b..2710c7914 100644 --- a/src/endpoints/prompt-converters.js +++ b/src/endpoints/prompt-converters.js @@ -9,7 +9,6 @@ * @returns {string} Prompt for Claude * @copyright Prompt Conversion script taken from RisuAI by kwaroran (GPLv3). */ - function convertClaudePrompt(messages, addAssistantPostfix, addAssistantPrefill, withSyspromptSupport, useSystemPrompt, addSysHumanMsg) { console.log(JSON.stringify(messages, null, 2)); @@ -43,9 +42,9 @@ function convertClaudePrompt(messages, addAssistantPostfix, addAssistantPrefill, } else { // Otherwise, use the default message format by setting the first message's role to 'user'(compatible with all claude models including 2.1.) messages[0].role = 'user'; - // Also, fix messages order for default claude format when(messages > Context Size) by merging two messages with "\n\nHuman: " prefixes into one before the first Assistant's message. + // Fix messages order for default message format when(messages > Context Size) by merging two messages with "\n\nHuman: " prefixes into one, before the first Assistant's message. if (firstAssistantIndex > 0) { - messages[firstAssistantIndex - 1].role = firstAssistantIndex - 1 !== 0 && messages[firstAssistantIndex - 1].role === 'user' ? 'firstMsg' : messages[firstAssistantIndex - 1].role; + messages[firstAssistantIndex - 1].role = firstAssistantIndex - 1 !== 0 && messages[firstAssistantIndex - 1].role === 'user' ? 'FixHumMsg' : messages[firstAssistantIndex - 1].role; } } } @@ -58,15 +57,14 @@ function convertClaudePrompt(messages, addAssistantPostfix, addAssistantPrefill, v.content = `${v.name}: ${v.content}`; delete v.name; } - - let prefix = ''; - prefix = { + //let prefix = ''; // Set prefix according to the role. + let prefix = { 'assistant': '\n\nAssistant: ', 'user': '\n\nHuman: ', 'system': i === 0 ? '' : v.name === 'example_assistant' ? '\n\nA: ' : v.name === 'example_user' ? '\n\nH: ' : '\n\n', - 'firstMsg': '\n\nFirst message: ', + 'FixHumMsg': '\n\nFirst message: ', }[v.role] ?? '\n\n'; - //} + return prefix + v.content; }).join(''); From 6b590148928f9be08747c13a6867a613c7289cb3 Mon Sep 17 00:00:00 2001 From: DonMoralez Date: Sat, 16 Dec 2023 00:24:48 +0200 Subject: [PATCH 09/82] (Fix) "squash sys. messages" processed empty messages, adding \n --- public/scripts/openai.js | 2 +- src/endpoints/prompt-converters.js | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/public/scripts/openai.js b/public/scripts/openai.js index 532493b9e..1bdf4c0b2 100644 --- a/public/scripts/openai.js +++ b/public/scripts/openai.js @@ -1960,7 +1960,7 @@ class ChatCompletion { for (let message of this.messages.collection) { if (!excludeList.includes(message.identifier) && message.role === 'system' && !message.name) { - if (lastMessage && lastMessage.role === 'system') { + if (lastMessage && message.content && lastMessage.role === 'system') { lastMessage.content += '\n' + message.content; lastMessage.tokens = tokenHandler.count({ role: lastMessage.role, content: lastMessage.content }); } diff --git a/src/endpoints/prompt-converters.js b/src/endpoints/prompt-converters.js index 2710c7914..7087ed77a 100644 --- a/src/endpoints/prompt-converters.js +++ b/src/endpoints/prompt-converters.js @@ -18,7 +18,7 @@ function convertClaudePrompt(messages, addAssistantPostfix, addAssistantPrefill, if (addAssistantPostfix) { messages.push({ role: 'assistant', - content: addAssistantPrefill || '', + content: addAssistantPrefill || '4', }); } // Find the index of the first message with an assistant role and check for a "'user' role/Human:" before it. @@ -63,7 +63,7 @@ function convertClaudePrompt(messages, addAssistantPostfix, addAssistantPrefill, 'user': '\n\nHuman: ', 'system': i === 0 ? '' : v.name === 'example_assistant' ? '\n\nA: ' : v.name === 'example_user' ? '\n\nH: ' : '\n\n', 'FixHumMsg': '\n\nFirst message: ', - }[v.role] ?? '\n\n'; + }[v.role] ?? '\n\n4'; return prefix + v.content; }).join(''); From 125d2997dbd9d70a557ca3a92e2504f3e203a5e5 Mon Sep 17 00:00:00 2001 From: DonMoralez Date: Sat, 16 Dec 2023 14:12:06 +0200 Subject: [PATCH 10/82] Add prefixes sequence check for claude --- src/endpoints/backends/chat-completions.js | 28 ++++++++++++++++++++++ src/endpoints/prompt-converters.js | 18 +++++++++----- 2 files changed, 40 insertions(+), 6 deletions(-) diff --git a/src/endpoints/backends/chat-completions.js b/src/endpoints/backends/chat-completions.js index 26a0e5d48..75531e766 100644 --- a/src/endpoints/backends/chat-completions.js +++ b/src/endpoints/backends/chat-completions.js @@ -40,6 +40,34 @@ async function sendClaudeRequest(request, response) { let requestPrompt = convertClaudePrompt(request.body.messages, !request.body.exclude_assistant, request.body.assistant_prefill, isSyspromptSupported, request.body.claude_use_sysprompt, request.body.human_sysprompt_message); console.log(chalk.green(`${divider}\nClaude request\n`) + chalk.cyan(`PROMPT\n${divider}\n${requestPrompt}\n${divider}`)); + + // Check Claude messages sequence and prefixes presence. + const sequence = requestPrompt.split('\n').filter(x => x.startsWith('Human:') || x.startsWith('Assistant:')); + let humanErrorCount = 0; + let assistantErrorCount = 0; + let humanFound = sequence.some(line => line.startsWith('Human:')); + let assistantFound = sequence.some(line => line.startsWith('Assistant:')); + + for (let i = 0; i < sequence.length - 1; i++) { + + if (sequence[i].startsWith(sequence[i + 1].split(':')[0])) { + if (sequence[i].startsWith('Human:')) { + humanErrorCount++; + } else if (sequence[i].startsWith('Assistant:')) { + assistantErrorCount++; + } + } + } + + if (!humanFound) { + console.log(chalk.red(`${divider}\nWarning: No Human prefix found in the prompt.\n${divider}`)); + } + if (!assistantFound) { + console.log(chalk.red(`${divider}\nWarning: No Assistant prefix found in the prompt.\n${divider}`)); + } + if (humanErrorCount > 0 || assistantErrorCount > 0) { + console.log(chalk.red(`${divider}\nWarning: Detected incorrect Prefix sequence(s).\nIncorrect 'Human:' prefix(es): ${humanErrorCount}.\nIncorrect 'Assistant:' prefix(es): ${assistantErrorCount}.\nCheck the prompt above and fix it in the sillytavern.\nThe correct sequence should look like this:\nSystem prompt message <--(for new sysprompt format only)\n <------------------(Every message start with Assistant:/Human:prefix should have one empty line above)\nHuman:\n\nAssistant:\n\...\n\nHuman:\n\nAssistant:\n${divider}`)); + } const stop_sequences = ['\n\nHuman:', '\n\nSystem:', '\n\nAssistant:']; // Add custom stop sequences diff --git a/src/endpoints/prompt-converters.js b/src/endpoints/prompt-converters.js index 7087ed77a..ce5be3a53 100644 --- a/src/endpoints/prompt-converters.js +++ b/src/endpoints/prompt-converters.js @@ -14,26 +14,31 @@ function convertClaudePrompt(messages, addAssistantPostfix, addAssistantPrefill, console.log(JSON.stringify(messages, null, 2)); //Prepare messages for claude. if (messages.length > 0) { + messages[0].role = 'system'; //Add the assistant's message to the end of messages. if (addAssistantPostfix) { messages.push({ role: 'assistant', - content: addAssistantPrefill || '4', + content: addAssistantPrefill || '', }); } // Find the index of the first message with an assistant role and check for a "'user' role/Human:" before it. let hasUser = false; + let hasAssist = false; const firstAssistantIndex = messages.findIndex((message, i) => { - if (i > 0 && (message.role === 'user' || message.content.includes('Human:'))) { + if (i >= 0 && (message.role === 'user' || message.content.includes('\n\nHuman: '))) { + if (message.content.includes('\n\nAssistant: ')) { + hasAssist = true; + } hasUser = true; } return message.role === 'assistant' && i > 0; }); - // When 2.1 and 'Use system prompt" checked, switches to system prompt format by setting the first message's role to 'system'. - // Also, insert the human's message before the first the assistant one, in case there are no such message or prefix found. + // When 2.1 and 'Use system prompt" checked, switches to the system prompt format by setting the first message's role to the 'system'. + // Inserts the human's message before the first the assistant one, if there are no such message or prefix found. if (withSyspromptSupport && useSystemPrompt) { messages[0].role = 'system'; - if (firstAssistantIndex > 0 && !hasUser) { + if (firstAssistantIndex > 0 && (!hasUser || (hasUser && hasAssist))) { //addSysHumanMsg for test messages.splice(firstAssistantIndex, 0, { role: 'user', content: addSysHumanMsg || 'Let\'s get started.', @@ -50,6 +55,7 @@ function convertClaudePrompt(messages, addAssistantPostfix, addAssistantPrefill, } console.log(JSON.stringify(messages, null, 2)); + // Convert messages to requestPrompt. let requestPrompt = messages.map((v, i) => { // Claude doesn't support message names, so we'll just add them to the message content. @@ -63,7 +69,7 @@ function convertClaudePrompt(messages, addAssistantPostfix, addAssistantPrefill, 'user': '\n\nHuman: ', 'system': i === 0 ? '' : v.name === 'example_assistant' ? '\n\nA: ' : v.name === 'example_user' ? '\n\nH: ' : '\n\n', 'FixHumMsg': '\n\nFirst message: ', - }[v.role] ?? '\n\n4'; + }[v.role] ?? '\n\n'; return prefix + v.content; }).join(''); From 9f2d32524c5a4c1ccf0065604ab5f5552cb45199 Mon Sep 17 00:00:00 2001 From: Fayiron <59962292+Fayiron@users.noreply.github.com> Date: Sat, 16 Dec 2023 14:39:30 +0100 Subject: [PATCH 11/82] Add TogetherAI as a chat completion source, basic --- public/index.html | 7 ++++--- public/script.js | 8 +++++++- public/scripts/RossAscends-mods.js | 1 + public/scripts/extensions/vectors/index.js | 5 +++-- public/scripts/extensions/vectors/settings.html | 1 + 5 files changed, 16 insertions(+), 6 deletions(-) diff --git a/public/index.html b/public/index.html index 58e42f700..8b79b2455 100644 --- a/public/index.html +++ b/public/index.html @@ -509,7 +509,7 @@
-
+
Top P
@@ -749,7 +749,7 @@
-
+
Seed
@@ -1598,7 +1598,7 @@ - +
@@ -1847,6 +1847,7 @@ +

OpenAI API key

diff --git a/public/script.js b/public/script.js index 64cb41752..5a173d7f8 100644 --- a/public/script.js +++ b/public/script.js @@ -5326,6 +5326,7 @@ function changeMainAPI() { case chat_completion_sources.OPENAI: case chat_completion_sources.AI21: case chat_completion_sources.PALM: + case chat_completion_sources.TOGETHERAI: default: setupChatCompletionPromptManager(oai_settings); break; @@ -7460,6 +7461,11 @@ function connectAPISlash(_, text) { source: 'palm', button: '#api_button_openai', }, + 'togetherai': { + selected: 'openai', + source: 'togetherai', + button: '#api_button_openai', + }, }; const apiConfig = apiMap[text]; @@ -7734,7 +7740,7 @@ jQuery(async function () { } registerSlashCommand('dupe', DupeChar, [], '– duplicates the currently selected character', true, true); - registerSlashCommand('api', connectAPISlash, [], '(kobold, horde, novel, ooba, oai, claude, windowai, openrouter, scale, ai21, palm) – connect to an API', true, true); + registerSlashCommand('api', connectAPISlash, [], '(kobold, horde, novel, ooba, oai, claude, windowai, openrouter, scale, ai21, palm, togetherai) – connect to an API', true, true); registerSlashCommand('impersonate', doImpersonate, ['imp'], '– calls an impersonation response', true, true); registerSlashCommand('delchat', doDeleteChat, [], '– deletes the current chat', true, true); registerSlashCommand('closechat', doCloseChat, [], '– closes the current chat', true, true); diff --git a/public/scripts/RossAscends-mods.js b/public/scripts/RossAscends-mods.js index a565502eb..875dd571d 100644 --- a/public/scripts/RossAscends-mods.js +++ b/public/scripts/RossAscends-mods.js @@ -416,6 +416,7 @@ function RA_autoconnect(PrevApi) { || (secret_state[SECRET_KEYS.OPENROUTER] && oai_settings.chat_completion_source == chat_completion_sources.OPENROUTER) || (secret_state[SECRET_KEYS.AI21] && oai_settings.chat_completion_source == chat_completion_sources.AI21) || (secret_state[SECRET_KEYS.PALM] && oai_settings.chat_completion_source == chat_completion_sources.PALM) + || (secret_state[SECRET_KEYS.TOGETHERAI] && oai_settings.chat_completion_source == chat_completion_sources.TOGETHERAI) ) { $('#api_button_openai').trigger('click'); } diff --git a/public/scripts/extensions/vectors/index.js b/public/scripts/extensions/vectors/index.js index dd5744c72..89137d939 100644 --- a/public/scripts/extensions/vectors/index.js +++ b/public/scripts/extensions/vectors/index.js @@ -392,8 +392,9 @@ async function getSavedHashes(collectionId) { * @returns {Promise} */ async function insertVectorItems(collectionId, items) { - if (settings.source === 'openai' && !secret_state[SECRET_KEYS.OPENAI] || - settings.source === 'palm' && !secret_state[SECRET_KEYS.PALM]) { + if ((settings.source === 'openai' && !secret_state[SECRET_KEYS.OPENAI]) || + (settings.source === 'palm' && !secret_state[SECRET_KEYS.PALM]) || + (settings.source === 'togetherai' && !secret_state[SECRET_KEYS.TOGETHERAI])) { throw new Error('Vectors: API key missing', { cause: 'api_key_missing' }); } diff --git a/public/scripts/extensions/vectors/settings.html b/public/scripts/extensions/vectors/settings.html index fa6c6f4c7..c7fbbeab4 100644 --- a/public/scripts/extensions/vectors/settings.html +++ b/public/scripts/extensions/vectors/settings.html @@ -13,6 +13,7 @@ +
From ccb6ba8cb381afc1db27da3b0e8d27971c44db00 Mon Sep 17 00:00:00 2001 From: Fayiron <59962292+Fayiron@users.noreply.github.com> Date: Sat, 16 Dec 2023 14:48:49 +0100 Subject: [PATCH 12/82] Add TogetherAI integration form --- public/index.html | 17 +++++++++++++++++ 1 file changed, 17 insertions(+) diff --git a/public/index.html b/public/index.html index 8b79b2455..b65ce4490 100644 --- a/public/index.html +++ b/public/index.html @@ -2125,6 +2125,23 @@ +
+

TogetherAI API Key

+
+ + +
+
+ For privacy reasons, your API key will be hidden after you reload the page. +
+
+

TogetherAI Model

+ +
From 2e1660e895531ffebd013f1915b6253307b71144 Mon Sep 17 00:00:00 2001 From: Fayiron <59962292+Fayiron@users.noreply.github.com> Date: Sat, 16 Dec 2023 15:08:03 +0100 Subject: [PATCH 13/82] Add TogetherAI model and settings --- public/scripts/openai.js | 43 +++++++++++++++++++++++++++++++++++++++- 1 file changed, 42 insertions(+), 1 deletion(-) diff --git a/public/scripts/openai.js b/public/scripts/openai.js index 30f4c6e29..e6aa3c43d 100644 --- a/public/scripts/openai.js +++ b/public/scripts/openai.js @@ -164,6 +164,7 @@ export const chat_completion_sources = { OPENROUTER: 'openrouter', AI21: 'ai21', PALM: 'palm', + TOGETHERAI: 'togetherai', }; const prefixMap = selected_group ? { @@ -207,6 +208,7 @@ const default_settings = { openai_model: 'gpt-3.5-turbo', claude_model: 'claude-instant-v1', ai21_model: 'j2-ultra', + togetherai_model: 'togethercomputer/GPT-NeoXT-Chat-Base-20B', // unsure here windowai_model: '', openrouter_model: openrouter_website_model, openrouter_use_fallback: false, @@ -261,6 +263,7 @@ const oai_settings = { openai_model: 'gpt-3.5-turbo', claude_model: 'claude-instant-v1', ai21_model: 'j2-ultra', + togetherai_model: 'togethercomputer/GPT-NeoXT-Chat-Base-20B', // unsure here windowai_model: '', openrouter_model: openrouter_website_model, openrouter_use_fallback: false, @@ -1258,6 +1261,8 @@ function getChatCompletionModel() { return oai_settings.openrouter_model !== openrouter_website_model ? oai_settings.openrouter_model : null; case chat_completion_sources.AI21: return oai_settings.ai21_model; + case chat_completion_sources.TOGETHERAI: + return oai_settings.togetherai_model; default: throw new Error(`Unknown chat completion source: ${oai_settings.chat_completion_source}`); } @@ -1445,6 +1450,7 @@ async function sendOpenAIRequest(type, messages, signal) { const isAI21 = oai_settings.chat_completion_source == chat_completion_sources.AI21; const isPalm = oai_settings.chat_completion_source == chat_completion_sources.PALM; const isOAI = oai_settings.chat_completion_source == chat_completion_sources.OPENAI; + const isTogetherAI = oai_settings.chat_completion_source == chat_completion_sources.TOGETHERAI; const isTextCompletion = (isOAI && textCompletionModels.includes(oai_settings.openai_model)) || (isOpenRouter && oai_settings.openrouter_force_instruct && power_user.instruct.enabled); const isQuiet = type === 'quiet'; const isImpersonate = type === 'impersonate'; @@ -1552,7 +1558,7 @@ async function sendOpenAIRequest(type, messages, signal) { generate_data['stop_tokens'] = [name1 + ':', oai_settings.new_chat_prompt, oai_settings.new_group_chat_prompt]; } - if ((isOAI || isOpenRouter) && oai_settings.seed >= 0) { + if ((isOAI || isOpenRouter || isTogetherAI) && oai_settings.seed >= 0) { generate_data['seed'] = oai_settings.seed; } @@ -2329,6 +2335,7 @@ function loadOpenAISettings(data, settings) { oai_settings.assistant_prefill = settings.assistant_prefill ?? default_settings.assistant_prefill; oai_settings.image_inlining = settings.image_inlining ?? default_settings.image_inlining; oai_settings.bypass_status_check = settings.bypass_status_check ?? default_settings.bypass_status_check; + oai_settings.togetherai_model = settings.togetherai_model ?? default_settings.togetherai_model; oai_settings.prompts = settings.prompts ?? default_settings.prompts; oai_settings.prompt_order = settings.prompt_order ?? default_settings.prompt_order; @@ -2360,6 +2367,8 @@ function loadOpenAISettings(data, settings) { $(`#model_windowai_select option[value="${oai_settings.windowai_model}"`).attr('selected', true); $('#model_ai21_select').val(oai_settings.ai21_model); $(`#model_ai21_select option[value="${oai_settings.ai21_model}"`).attr('selected', true); + $('#model_togetherai_select').val(oai_settings.togetherai_model); + $(`#model_togetherai_select option[value="${oai_settings.togetherai_model}"`).attr('selected', true); $('#openai_max_context').val(oai_settings.openai_max_context); $('#openai_max_context_counter').val(`${oai_settings.openai_max_context}`); $('#model_openrouter_select').val(oai_settings.openrouter_model); @@ -2554,6 +2563,7 @@ async function saveOpenAIPreset(name, settings, triggerUi = true) { openrouter_group_models: settings.openrouter_group_models, openrouter_sort_models: settings.openrouter_sort_models, ai21_model: settings.ai21_model, + togetherai_model: settings.togetherai_model, temperature: settings.temp_openai, frequency_penalty: settings.freq_pen_openai, presence_penalty: settings.pres_pen_openai, @@ -2924,6 +2934,7 @@ function onSettingsPresetChange() { openrouter_group_models: ['#openrouter_group_models', 'openrouter_group_models', false], openrouter_sort_models: ['#openrouter_sort_models', 'openrouter_sort_models', false], ai21_model: ['#model_ai21_select', 'ai21_model', false], + togetherai_model: ['#model_togetherai_select', 'togetherai_model', false], openai_max_context: ['#openai_max_context', 'openai_max_context', false], openai_max_tokens: ['#openai_max_tokens', 'openai_max_tokens', false], wrap_in_quotes: ['#wrap_in_quotes', 'wrap_in_quotes', true], @@ -3102,6 +3113,11 @@ async function onModelChange() { oai_settings.ai21_model = value; } + if ($(this).is('#model_togetherai_select')) { + console.log('TogetherAI model changed to', value); + oai_settings.togetherai_model = value; + } + if (oai_settings.chat_completion_source == chat_completion_sources.SCALE) { if (oai_settings.max_context_unlocked) { $('#openai_max_context').attr('max', unlocked_max); @@ -3191,6 +3207,14 @@ async function onModelChange() { } } + // not sure if this is enough + if (oai_settings.chat_completion_source == chat_completion_sources.TOGETHERAI) { + $('#openai_max_context').attr('max', max_2k); // assuming togethercomputer/GPT-NeoXT-Chat-Base-20B + oai_settings.openai_max_context = Math.min(oai_settings.openai_max_context, Number($('#openai_max_context').attr('max'))); + $('#openai_max_context').val(oai_settings.openai_max_context).trigger('input'); + + } + if (oai_settings.chat_completion_source == chat_completion_sources.OPENAI) { $('#openai_max_context').attr('max', getMaxContextOpenAI(value)); oai_settings.openai_max_context = Math.min(oai_settings.openai_max_context, Number($('#openai_max_context').attr('max'))); @@ -3363,6 +3387,19 @@ async function onConnectButtonClick(e) { } } + if (oai_settings.chat_completion_source == chat_completion_sources.TOGETHERAI) { + const api_key_togetherai = String($('#api_key_togetherai').val()).trim(); + + if (api_key_togetherai.length) { + await writeSecret(SECRET_KEYS.TOGETHERAI, api_key_togetherai); + } + + if (!secret_state[SECRET_KEYS.TOGETHERAI]) { + console.log('No secret key saved for TogetherAI'); + return; + } + } + startStatusLoading(); saveSettingsDebounced(); await getStatusOpen(); @@ -3395,6 +3432,9 @@ function toggleChatCompletionForms() { else if (oai_settings.chat_completion_source == chat_completion_sources.AI21) { $('#model_ai21_select').trigger('change'); } + else if (oai_settings.chat_completion_source == chat_completion_sources.TOGETHERAI) { + $('#model_togetherai_select').trigger('change'); + } $('[data-source]').each(function () { const validSources = $(this).data('source').split(','); $(this).toggle(validSources.includes(oai_settings.chat_completion_source)); @@ -3792,6 +3832,7 @@ $(document).ready(async function () { $('#openrouter_group_models').on('change', onOpenrouterModelSortChange); $('#openrouter_sort_models').on('change', onOpenrouterModelSortChange); $('#model_ai21_select').on('change', onModelChange); + $('#model_togetherai_select').on('change', onModelChange); $('#settings_preset_openai').on('change', onSettingsPresetChange); $('#new_oai_preset').on('click', onNewPresetClick); $('#delete_oai_preset').on('click', onDeletePresetClick); From eb2a85cfe00c5de2a02ad8afff970902d1046ff3 Mon Sep 17 00:00:00 2001 From: Fayiron <59962292+Fayiron@users.noreply.github.com> Date: Sat, 16 Dec 2023 15:11:04 +0100 Subject: [PATCH 14/82] Add TogetherAI API key to secrets.js --- public/scripts/secrets.js | 2 ++ 1 file changed, 2 insertions(+) diff --git a/public/scripts/secrets.js b/public/scripts/secrets.js index 84279641d..798baa7b2 100644 --- a/public/scripts/secrets.js +++ b/public/scripts/secrets.js @@ -14,6 +14,7 @@ export const SECRET_KEYS = { SCALE_COOKIE: 'scale_cookie', PALM: 'api_key_palm', SERPAPI: 'api_key_serpapi', + TOGETHERAI: 'api_key_togetherai', }; const INPUT_MAP = { @@ -29,6 +30,7 @@ const INPUT_MAP = { [SECRET_KEYS.PALM]: '#api_key_palm', [SECRET_KEYS.APHRODITE]: '#api_key_aphrodite', [SECRET_KEYS.TABBY]: '#api_key_tabby', + [SECRET_KEYS.TOGETHERAI]: '#api_key_togetherai', }; async function clearSecret() { From 7b0ecd3e61880447c8aec41e7d34e9a5e0b52bec Mon Sep 17 00:00:00 2001 From: Fayiron <59962292+Fayiron@users.noreply.github.com> Date: Sat, 16 Dec 2023 15:17:03 +0100 Subject: [PATCH 15/82] more stuff --- src/constants.js | 1 + src/endpoints/secrets.js | 1 + 2 files changed, 2 insertions(+) diff --git a/src/constants.js b/src/constants.js index 32ea6fad5..aa1e2a68d 100644 --- a/src/constants.js +++ b/src/constants.js @@ -140,6 +140,7 @@ const CHAT_COMPLETION_SOURCES = { OPENROUTER: 'openrouter', AI21: 'ai21', PALM: 'palm', + TOGETHERAI: 'togetherai', }; const UPLOADS_PATH = './uploads'; diff --git a/src/endpoints/secrets.js b/src/endpoints/secrets.js index 54687cbeb..11b1bd340 100644 --- a/src/endpoints/secrets.js +++ b/src/endpoints/secrets.js @@ -25,6 +25,7 @@ const SECRET_KEYS = { DEEPLX_URL: 'deeplx_url', PALM: 'api_key_palm', SERPAPI: 'api_key_serpapi', + TOGETHERAI: 'api_key_togetherai', }; /** From 80e82171c1e635ad035fab097c3d515afd267322 Mon Sep 17 00:00:00 2001 From: Fayiron <59962292+Fayiron@users.noreply.github.com> Date: Sat, 16 Dec 2023 17:06:52 +0100 Subject: [PATCH 16/82] Add support for multiple OpenAI compatible endpoints --- src/endpoints/togetherai.js | 4 ++++ src/endpoints/vectors.js | 3 ++- src/openai-vectors.js | 43 +++++++++++++++++++++++++++---------- 3 files changed, 38 insertions(+), 12 deletions(-) create mode 100644 src/endpoints/togetherai.js diff --git a/src/endpoints/togetherai.js b/src/endpoints/togetherai.js new file mode 100644 index 000000000..309eff1f3 --- /dev/null +++ b/src/endpoints/togetherai.js @@ -0,0 +1,4 @@ +/** + * sends a request to the together AI api + * + */ \ No newline at end of file diff --git a/src/endpoints/vectors.js b/src/endpoints/vectors.js index 387803ccb..eb3efc88b 100644 --- a/src/endpoints/vectors.js +++ b/src/endpoints/vectors.js @@ -12,8 +12,9 @@ const { jsonParser } = require('../express-common'); */ async function getVector(source, text) { switch (source) { + case 'togetherai': case 'openai': - return require('../openai-vectors').getOpenAIVector(text); + return require('../openai-vectors').getOpenAIVector(text, source); case 'transformers': return require('../embedding').getTransformersVector(text); case 'palm': diff --git a/src/openai-vectors.js b/src/openai-vectors.js index ecb245065..08bd15bf0 100644 --- a/src/openai-vectors.js +++ b/src/openai-vectors.js @@ -2,19 +2,40 @@ const fetch = require('node-fetch').default; const { SECRET_KEYS, readSecret } = require('./endpoints/secrets'); /** - * Gets the vector for the given text from OpenAI ada model + * Gets the vector for the given text from an OpenAI compatible endpoint. * @param {string} text - The text to get the vector for + * @param {string} source - The source of the vector * @returns {Promise} - The vector for the text */ -async function getOpenAIVector(text) { - const key = readSecret(SECRET_KEYS.OPENAI); +async function getOpenAIVector(text, source) { + + // dictionary of sources to endpoints with source as key and endpoint, model and secret key as value + const endpoints = { + 'togetherai': { + endpoint: 'https://api.togetherai.xyz/v1/embeddings', // is this correct? + model: 'togethercomputer/GPT-NeoXT-Chat-Base-20B', + secret: SECRET_KEYS.TOGETHERAI, + }, + 'openai': { + endpoint: 'https://api.openai.com/v1/embeddings', + model: 'text-embedding-ada-002', + secret: SECRET_KEYS.OPENAI, + }, + 'mistral': { + endpoint: 'https://api.mistral.ai/v1/embeddings', + model: 'mistral-embed', + secret: SECRET_KEYS.MISTRAL, + }, + }; + + const key = readSecret(endpoints[source].secret); if (!key) { - console.log('No OpenAI key found'); - throw new Error('No OpenAI key found'); + console.log('No %s key found.', source); + throw new Error('No ${source} key found.'); } - const response = await fetch('https://api.openai.com/v1/embeddings', { + const response = await fetch(endpoints[source].endpoint, { method: 'POST', headers: { 'Content-Type': 'application/json', @@ -22,22 +43,22 @@ async function getOpenAIVector(text) { }, body: JSON.stringify({ input: text, - model: 'text-embedding-ada-002', + model: endpoints[source].model, }), }); if (!response.ok) { const text = await response.text(); - console.log('OpenAI request failed', response.statusText, text); - throw new Error('OpenAI request failed'); + console.log('${source} request failed', response.statusText, text); + throw new Error('${source} request failed'); } const data = await response.json(); const vector = data?.data[0]?.embedding; if (!Array.isArray(vector)) { - console.log('OpenAI response was not an array'); - throw new Error('OpenAI response was not an array'); + console.log('${source} response was not an array'); + throw new Error('${source} response was not an array'); } return vector; From 0a9f6783998e2eb0fbb5d5944f4d30f0b9e63dcf Mon Sep 17 00:00:00 2001 From: Fayiron <59962292+Fayiron@users.noreply.github.com> Date: Sat, 16 Dec 2023 17:09:09 +0100 Subject: [PATCH 17/82] Remove togetherai.js --- src/endpoints/togetherai.js | 4 ---- 1 file changed, 4 deletions(-) delete mode 100644 src/endpoints/togetherai.js diff --git a/src/endpoints/togetherai.js b/src/endpoints/togetherai.js deleted file mode 100644 index 309eff1f3..000000000 --- a/src/endpoints/togetherai.js +++ /dev/null @@ -1,4 +0,0 @@ -/** - * sends a request to the together AI api - * - */ \ No newline at end of file From c7c1513e91c2e111ea18938fd8d321dbc1026333 Mon Sep 17 00:00:00 2001 From: Cohee <18619528+Cohee1207@users.noreply.github.com> Date: Sun, 17 Dec 2023 19:41:20 +0200 Subject: [PATCH 18/82] Add proxy support for multimodal captions. Add caption pre-prompt --- default/config.yaml | 4 ++++ public/scripts/extensions/caption/index.js | 11 ++++++++++- public/scripts/extensions/shared.js | 14 ++++++++++++- src/endpoints/openai.js | 23 +++++++++++++++++++--- 4 files changed, 47 insertions(+), 5 deletions(-) diff --git a/default/config.yaml b/default/config.yaml index fd0be655c..edee81009 100644 --- a/default/config.yaml +++ b/default/config.yaml @@ -54,6 +54,10 @@ extras: openai: # Will send a random user ID to OpenAI completion API randomizeUserId: false + # If not empty, will add this as a system message to the start of every caption completion prompt + # Example: "Perform the instructions to the best of your ability.\n\n" (for LLaVA) + # Not used in image inlining mode + captionSystemPrompt: "" # -- DEEPL TRANSLATION CONFIGURATION -- deepl: # Available options: default, more, less, prefer_more, prefer_less diff --git a/public/scripts/extensions/caption/index.js b/public/scripts/extensions/caption/index.js index aa666a232..97cc4cf37 100644 --- a/public/scripts/extensions/caption/index.js +++ b/public/scripts/extensions/caption/index.js @@ -300,7 +300,7 @@ jQuery(function () { $('#caption_prompt_block').toggle(isMultimodal); $('#caption_multimodal_api').val(extension_settings.caption.multimodal_api); $('#caption_multimodal_model').val(extension_settings.caption.multimodal_model); - $('#caption_multimodal_model option').each(function () { + $('#caption_multimodal_block [data-type]').each(function () { const type = $(this).data('type'); $(this).toggle(type === extension_settings.caption.multimodal_api); }); @@ -351,6 +351,10 @@ jQuery(function () {
+
@@ -377,6 +381,7 @@ jQuery(function () { switchMultimodalBlocks(); $('#caption_refine_mode').prop('checked', !!(extension_settings.caption.refine_mode)); + $('#caption_allow_reverse_proxy').prop('checked', !!(extension_settings.caption.allow_reverse_proxy)); $('#caption_source').val(extension_settings.caption.source); $('#caption_prompt').val(extension_settings.caption.prompt); $('#caption_template').val(extension_settings.caption.template); @@ -394,4 +399,8 @@ jQuery(function () { extension_settings.caption.template = String($('#caption_template').val()); saveSettingsDebounced(); }); + $('#caption_allow_reverse_proxy').on('input', () => { + extension_settings.caption.allow_reverse_proxy = $('#caption_allow_reverse_proxy').prop('checked'); + saveSettingsDebounced(); + }); }); diff --git a/public/scripts/extensions/shared.js b/public/scripts/extensions/shared.js index 9058204ec..7d4e16720 100644 --- a/public/scripts/extensions/shared.js +++ b/public/scripts/extensions/shared.js @@ -1,7 +1,8 @@ import { getRequestHeaders } from '../../script.js'; import { extension_settings } from '../extensions.js'; +import { oai_settings } from '../openai.js'; import { SECRET_KEYS, secret_state } from '../secrets.js'; -import { createThumbnail } from '../utils.js'; +import { createThumbnail, isValidUrl } from '../utils.js'; /** * Generates a caption for an image using a multimodal model. @@ -35,6 +36,15 @@ export async function getMultimodalCaption(base64Img, prompt) { } } + const useReverseProxy = + extension_settings.caption.multimodal_api === 'openai' + && extension_settings.caption.allow_reverse_proxy + && oai_settings.reverse_proxy + && isValidUrl(oai_settings.reverse_proxy); + + const proxyUrl = useReverseProxy ? oai_settings.reverse_proxy : ''; + const proxyPassword = useReverseProxy ? oai_settings.proxy_password : ''; + const apiResult = await fetch(`/api/${isGoogle ? 'google' : 'openai'}/caption-image`, { method: 'POST', headers: getRequestHeaders(), @@ -46,6 +56,8 @@ export async function getMultimodalCaption(base64Img, prompt) { : { api: extension_settings.caption.multimodal_api || 'openai', model: extension_settings.caption.multimodal_model || 'gpt-4-vision-preview', + reverse_proxy: proxyUrl, + proxy_password: proxyPassword, }), }), }); diff --git a/src/endpoints/openai.js b/src/endpoints/openai.js index 23a19f943..cb98cf274 100644 --- a/src/endpoints/openai.js +++ b/src/endpoints/openai.js @@ -4,6 +4,7 @@ const express = require('express'); const FormData = require('form-data'); const fs = require('fs'); const { jsonParser, urlencodedParser } = require('../express-common'); +const { getConfigValue } = require('../util'); const router = express.Router(); @@ -11,15 +12,19 @@ router.post('/caption-image', jsonParser, async (request, response) => { try { let key = ''; - if (request.body.api === 'openai') { + if (request.body.api === 'openai' && !request.body.reverse_proxy) { key = readSecret(SECRET_KEYS.OPENAI); } - if (request.body.api === 'openrouter') { + if (request.body.api === 'openrouter' && !request.body.reverse_proxy) { key = readSecret(SECRET_KEYS.OPENROUTER); } - if (!key) { + if (request.body.reverse_proxy && request.body.proxy_password) { + key = request.body.proxy_password; + } + + if (!key && !request.body.reverse_proxy) { console.log('No key found for API', request.body.api); return response.sendStatus(400); } @@ -38,6 +43,14 @@ router.post('/caption-image', jsonParser, async (request, response) => { max_tokens: 500, }; + const captionSystemPrompt = getConfigValue('openai.captionSystemPrompt'); + if (captionSystemPrompt) { + body.messages.unshift({ + role: 'system', + content: captionSystemPrompt, + }); + } + console.log('Multimodal captioning request', body); let apiUrl = ''; @@ -52,6 +65,10 @@ router.post('/caption-image', jsonParser, async (request, response) => { apiUrl = 'https://api.openai.com/v1/chat/completions'; } + if (request.body.reverse_proxy) { + apiUrl = `${request.body.reverse_proxy}/chat/completions`; + } + const result = await fetch(apiUrl, { method: 'POST', headers: { From fb25a905323aad20fc17ea24d0ade6b378b46986 Mon Sep 17 00:00:00 2001 From: LenAnderson Date: Sun, 17 Dec 2023 17:45:23 +0000 Subject: [PATCH 19/82] add GENERATION_STARTED event --- public/script.js | 2 ++ 1 file changed, 2 insertions(+) diff --git a/public/script.js b/public/script.js index da11e20dd..21e8208c0 100644 --- a/public/script.js +++ b/public/script.js @@ -291,6 +291,7 @@ export const event_types = { MESSAGE_DELETED: 'message_deleted', IMPERSONATE_READY: 'impersonate_ready', CHAT_CHANGED: 'chat_id_changed', + GENERATION_STARTED: 'generation_started', GENERATION_STOPPED: 'generation_stopped', EXTENSIONS_FIRST_LOAD: 'extensions_first_load', SETTINGS_LOADED: 'settings_loaded', @@ -2925,6 +2926,7 @@ export async function generateRaw(prompt, api, instructOverride) { // Returns a promise that resolves when the text is done generating. async function Generate(type, { automatic_trigger, force_name2, quiet_prompt, quietToLoud, skipWIAN, force_chid, signal, quietImage, maxLoops } = {}, dryRun = false) { console.log('Generate entered'); + eventSource.emit(event_types.GENERATION_STARTED, type, { automatic_trigger, force_name2, quiet_prompt, quietToLoud, skipWIAN, force_chid, signal, quietImage, maxLoops }, dryRun); setGenerationProgress(0); generation_started = new Date(); From a91f0f7ee562e3bf412be874a4aa73f31aeda950 Mon Sep 17 00:00:00 2001 From: Cohee <18619528+Cohee1207@users.noreply.github.com> Date: Sun, 17 Dec 2023 21:02:32 +0200 Subject: [PATCH 20/82] Fix saving presets after deletion of the same name --- public/scripts/preset-manager.js | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/public/scripts/preset-manager.js b/public/scripts/preset-manager.js index 8b88f97a1..3a7a48907 100644 --- a/public/scripts/preset-manager.js +++ b/public/scripts/preset-manager.js @@ -323,7 +323,7 @@ class PresetManager { } async deleteCurrentPreset() { - const { preset_names } = this.getPresetList(); + const { preset_names, presets } = this.getPresetList(); const value = this.getSelectedPreset(); const nameToDelete = this.getSelectedPresetName(); @@ -335,7 +335,9 @@ class PresetManager { $(this.select).find(`option[value="${value}"]`).remove(); if (this.isKeyedApi()) { - preset_names.splice(preset_names.indexOf(value), 1); + const index = preset_names.indexOf(nameToDelete); + preset_names.splice(index, 1); + presets.splice(index, 1); } else { delete preset_names[nameToDelete]; } From 960c91c1476569cff8532a3356d300664afde130 Mon Sep 17 00:00:00 2001 From: Cohee <18619528+Cohee1207@users.noreply.github.com> Date: Sun, 17 Dec 2023 21:14:16 +0200 Subject: [PATCH 21/82] Add confirmation for convert to group --- public/scripts/bookmarks.js | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/public/scripts/bookmarks.js b/public/scripts/bookmarks.js index 131f95961..7b59fcab4 100644 --- a/public/scripts/bookmarks.js +++ b/public/scripts/bookmarks.js @@ -237,6 +237,12 @@ async function convertSoloToGroupChat() { return; } + const confirm = await callPopup('Are you sure you want to convert this chat to a group chat?', 'confirm'); + + if (!confirm) { + return; + } + const character = characters[this_chid]; // Populate group required fields From 61e87d562f4a20e846b81f8722b20d8a6d6b0ed1 Mon Sep 17 00:00:00 2001 From: Cohee <18619528+Cohee1207@users.noreply.github.com> Date: Sun, 17 Dec 2023 21:27:33 +0200 Subject: [PATCH 22/82] Add confirmation for regen with keystroke --- public/scripts/RossAscends-mods.js | 29 ++++++++++++++++++++++++++--- 1 file changed, 26 insertions(+), 3 deletions(-) diff --git a/public/scripts/RossAscends-mods.js b/public/scripts/RossAscends-mods.js index 9c5d9aa1a..a096ba8ba 100644 --- a/public/scripts/RossAscends-mods.js +++ b/public/scripts/RossAscends-mods.js @@ -18,6 +18,7 @@ import { eventSource, menu_type, substituteParams, + callPopup, } from '../script.js'; import { @@ -995,9 +996,31 @@ export function initRossMods() { console.debug('Accepting edits with Ctrl+Enter'); editMesDone.trigger('click'); } else if (is_send_press == false) { - console.debug('Regenerating with Ctrl+Enter'); - $('#option_regenerate').click(); - $('#options').hide(); + const skipConfirmKey = 'RegenerateWithCtrlEnter'; + const skipConfirm = LoadLocalBool(skipConfirmKey); + function doRegenerate() { + console.debug('Regenerating with Ctrl+Enter'); + $('#option_regenerate').trigger('click'); + $('#options').hide(); + } + if (skipConfirm) { + doRegenerate(); + } else { + const popupText = ` +
Are you sure you want to regenerate the latest message?
+ `; + callPopup(popupText, 'confirm').then(result =>{ + if (!result) { + return; + } + const regenerateWithCtrlEnter = $('#regenerateWithCtrlEnter').prop('checked'); + SaveLocal(skipConfirmKey, regenerateWithCtrlEnter); + doRegenerate(); + }); + } } else { console.debug('Ctrl+Enter ignored'); } From b0d9f14534be0e6f9f70980c166206b77b69448a Mon Sep 17 00:00:00 2001 From: Cohee <18619528+Cohee1207@users.noreply.github.com> Date: Sun, 17 Dec 2023 23:38:03 +0200 Subject: [PATCH 23/82] Re-add Together as a text completion source --- default/settings.json | 1 - public/img/togetherai.svg | 55 ++++++++ public/index.html | 47 +++---- public/script.js | 35 +++-- public/scripts/RossAscends-mods.js | 5 +- public/scripts/extensions/vectors/index.js | 3 +- .../scripts/extensions/vectors/settings.html | 1 - public/scripts/mancer-settings.js | 64 ---------- public/scripts/openai.js | 44 +------ public/scripts/textgen-models.js | 120 ++++++++++++++++++ public/scripts/textgen-settings.js | 33 +++-- public/scripts/tokenizers.js | 5 +- src/additional-headers.js | 11 ++ src/constants.js | 15 ++- src/endpoints/backends/text-completions.js | 23 +++- src/endpoints/vectors.js | 1 - src/openai-vectors.js | 6 - 17 files changed, 300 insertions(+), 169 deletions(-) create mode 100644 public/img/togetherai.svg delete mode 100644 public/scripts/mancer-settings.js create mode 100644 public/scripts/textgen-models.js diff --git a/default/settings.json b/default/settings.json index 1f3a3260d..3dd13670f 100644 --- a/default/settings.json +++ b/default/settings.json @@ -3,7 +3,6 @@ "username": "User", "api_server": "http://127.0.0.1:5000/api", "api_server_textgenerationwebui": "http://127.0.0.1:5000/api", - "api_use_mancer_webui": false, "preset_settings": "RecoveredRuins", "user_avatar": "user-default.png", "amount_gen": 250, diff --git a/public/img/togetherai.svg b/public/img/togetherai.svg new file mode 100644 index 000000000..e1e4c0407 --- /dev/null +++ b/public/img/togetherai.svg @@ -0,0 +1,55 @@ + + + + + + + + + + + + + diff --git a/public/index.html b/public/index.html index ea673f7c0..0bfa60876 100644 --- a/public/index.html +++ b/public/index.html @@ -69,7 +69,7 @@ - + @@ -509,7 +509,7 @@
-
+
Top P
@@ -749,7 +749,7 @@
-
+
Seed
@@ -1592,8 +1592,8 @@ - - + +
@@ -1723,8 +1723,27 @@ +
+
+

TogetherAI API Key

+
+ + +
+
+ For privacy reasons, your API key will be hidden after you reload the page. +
+
+

TogetherAI Model

+ +
+
@@ -1843,7 +1862,6 @@ -

OpenAI API key

@@ -2148,23 +2166,6 @@
-
-

TogetherAI API Key

-
- - -
-
- For privacy reasons, your API key will be hidden after you reload the page. -
-
-

TogetherAI Model

- -
diff --git a/public/script.js b/public/script.js index 3182f7f71..0c04d8b73 100644 --- a/public/script.js +++ b/public/script.js @@ -19,10 +19,10 @@ import { getTextGenUrlSourceId, textgen_types, textgenerationwebui_banned_in_macros, - MANCER_SERVER, + getTextGenServer, } from './scripts/textgen-settings.js'; -const { MANCER } = textgen_types; +const { MANCER, TOGETHERAI } = textgen_types; import { world_info, @@ -189,7 +189,7 @@ import { createPersona, initPersonas, selectCurrentPersona, setPersonaDescriptio import { getBackgrounds, initBackgrounds, loadBackgroundSettings, background_settings } from './scripts/backgrounds.js'; import { hideLoader, showLoader } from './scripts/loader.js'; import { BulkEditOverlay, CharacterContextMenu } from './scripts/BulkEditOverlay.js'; -import { loadMancerModels } from './scripts/mancer-settings.js'; +import { loadMancerModels, loadTogetherAIModels } from './scripts/textgen-models.js'; import { appendFileContent, hasPendingFileAttachment, populateFileAttachment } from './scripts/chats.js'; import { replaceVariableMacros } from './scripts/variables.js'; import { initPresetManager } from './scripts/preset-manager.js'; @@ -931,9 +931,7 @@ async function getStatusKobold() { async function getStatusTextgen() { const url = '/api/backends/text-completions/status'; - let endpoint = textgen_settings.type === MANCER ? - MANCER_SERVER : - api_server_textgenerationwebui; + let endpoint = getTextGenServer(); if (!endpoint) { console.warn('No endpoint for status check'); @@ -949,7 +947,8 @@ async function getStatusTextgen() { api_type: textgen_settings.type, legacy_api: textgen_settings.legacy_api && - textgen_settings.type !== MANCER, + textgen_settings.type !== MANCER && + textgen_settings.type !== TOGETHERAI, }), signal: abortStatusCheck.signal, }); @@ -959,6 +958,9 @@ async function getStatusTextgen() { if (textgen_settings.type === MANCER) { online_status = textgen_settings.mancer_model; loadMancerModels(data?.data); + } else if (textgen_settings.type === TOGETHERAI) { + online_status = textgen_settings.togetherai_model; + loadTogetherAIModels(data?.data); } else { online_status = data?.result; } @@ -2958,7 +2960,8 @@ async function Generate(type, { automatic_trigger, force_name2, quiet_prompt, qu if (main_api === 'textgenerationwebui' && textgen_settings.streaming && textgen_settings.legacy_api && - textgen_settings.type !== MANCER) { + textgen_settings.type !== MANCER && + textgen_settings.type !== TOGETHERAI) { toastr.error('Streaming is not supported for the Legacy API. Update Ooba and use --extensions openai to enable streaming.', undefined, { timeOut: 10000, preventDuplicates: true }); unblockGeneration(); return Promise.resolve(); @@ -5399,7 +5402,6 @@ function changeMainAPI() { case chat_completion_sources.AI21: case chat_completion_sources.MAKERSUITE: case chat_completion_sources.MISTRALAI: - case chat_completion_sources.TOGETHERAI: default: setupChatCompletionPromptManager(oai_settings); break; @@ -7515,6 +7517,11 @@ async function connectAPISlash(_, text) { button: '#api_button_textgenerationwebui', type: textgen_types.KOBOLDCPP, }, + 'togetherai': { + selected: 'textgenerationwebui', + button: '#api_button_textgenerationwebui', + type: textgen_types.TOGETHERAI, + }, 'oai': { selected: 'openai', source: 'openai', @@ -7555,11 +7562,6 @@ async function connectAPISlash(_, text) { source: 'mistralai', button: '#api_button_openai', }, - 'togetherai': { - selected: 'openai', - source: 'togetherai', - button: '#api_button_openai', - }, }; const apiConfig = apiMap[text.toLowerCase()]; @@ -8400,6 +8402,11 @@ jQuery(async function () { await writeSecret(SECRET_KEYS.TABBY, tabbyKey); } + const togetherKey = String($('#api_key_togetherai').val()).trim(); + if (togetherKey.length) { + await writeSecret(SECRET_KEYS.TOGETHERAI, togetherKey); + } + const urlSourceId = getTextGenUrlSourceId(); if (urlSourceId && $(urlSourceId).val() !== '') { diff --git a/public/scripts/RossAscends-mods.js b/public/scripts/RossAscends-mods.js index fc1d9eb26..4dd724060 100644 --- a/public/scripts/RossAscends-mods.js +++ b/public/scripts/RossAscends-mods.js @@ -382,7 +382,9 @@ function RA_autoconnect(PrevApi) { } break; case 'textgenerationwebui': - if (textgen_settings.type === textgen_types.MANCER && secret_state[SECRET_KEYS.MANCER]) { + if ((textgen_settings.type === textgen_types.MANCER && secret_state[SECRET_KEYS.MANCER]) || + (textgen_settings.type === textgen_types.TOGETHERAI && secret_state[SECRET_KEYS.TOGETHERAI]) + ) { $('#api_button_textgenerationwebui').trigger('click'); } else if (api_server_textgenerationwebui && isValidUrl(api_server_textgenerationwebui)) { @@ -398,7 +400,6 @@ function RA_autoconnect(PrevApi) { || (secret_state[SECRET_KEYS.AI21] && oai_settings.chat_completion_source == chat_completion_sources.AI21) || (secret_state[SECRET_KEYS.MAKERSUITE] && oai_settings.chat_completion_source == chat_completion_sources.MAKERSUITE) || (secret_state[SECRET_KEYS.MISTRALAI] && oai_settings.chat_completion_source == chat_completion_sources.MISTRALAI) - || (secret_state[SECRET_KEYS.TOGETHERAI] && oai_settings.chat_completion_source == chat_completion_sources.TOGETHERAI) ) { $('#api_button_openai').trigger('click'); } diff --git a/public/scripts/extensions/vectors/index.js b/public/scripts/extensions/vectors/index.js index e11fa48f5..9e8777333 100644 --- a/public/scripts/extensions/vectors/index.js +++ b/public/scripts/extensions/vectors/index.js @@ -395,8 +395,7 @@ async function getSavedHashes(collectionId) { async function insertVectorItems(collectionId, items) { if (settings.source === 'openai' && !secret_state[SECRET_KEYS.OPENAI] || settings.source === 'palm' && !secret_state[SECRET_KEYS.MAKERSUITE] || - settings.source === 'mistral' && !secret_state[SECRET_KEYS.MISTRALAI] || - settings.source === 'togetherai' && !secret_state[SECRET_KEYS.TOGETHERAI]) { + settings.source === 'mistral' && !secret_state[SECRET_KEYS.MISTRALAI]) { throw new Error('Vectors: API key missing', { cause: 'api_key_missing' }); } diff --git a/public/scripts/extensions/vectors/settings.html b/public/scripts/extensions/vectors/settings.html index 2ab1d1897..b1d74c83d 100644 --- a/public/scripts/extensions/vectors/settings.html +++ b/public/scripts/extensions/vectors/settings.html @@ -14,7 +14,6 @@ -
diff --git a/public/scripts/mancer-settings.js b/public/scripts/mancer-settings.js deleted file mode 100644 index 0a08fede8..000000000 --- a/public/scripts/mancer-settings.js +++ /dev/null @@ -1,64 +0,0 @@ -import { setGenerationParamsFromPreset } from '../script.js'; -import { isMobile } from './RossAscends-mods.js'; -import { textgenerationwebui_settings as textgen_settings } from './textgen-settings.js'; - -let models = []; - -export async function loadMancerModels(data) { - if (!Array.isArray(data)) { - console.error('Invalid Mancer models data', data); - return; - } - - models = data; - - $('#mancer_model').empty(); - for (const model of data) { - const option = document.createElement('option'); - option.value = model.id; - option.text = model.name; - option.selected = model.id === textgen_settings.mancer_model; - $('#mancer_model').append(option); - } -} - -function onMancerModelSelect() { - const modelId = String($('#mancer_model').val()); - textgen_settings.mancer_model = modelId; - $('#api_button_textgenerationwebui').trigger('click'); - - const limits = models.find(x => x.id === modelId)?.limits; - setGenerationParamsFromPreset({ max_length: limits.context, genamt: limits.completion }); -} - -function getMancerModelTemplate(option) { - const model = models.find(x => x.id === option?.element?.value); - - if (!option.id || !model) { - return option.text; - } - - const creditsPerPrompt = (model.limits?.context - model.limits?.completion) * model.pricing?.prompt; - const creditsPerCompletion = model.limits?.completion * model.pricing?.completion; - const creditsTotal = Math.round(creditsPerPrompt + creditsPerCompletion).toFixed(0); - - return $((` -
-
${DOMPurify.sanitize(model.name)} | ${model.limits?.context} ctx / ${model.limits?.completion} res | Credits per request (max): ${creditsTotal}
-
- `)); -} - -jQuery(function () { - $('#mancer_model').on('change', onMancerModelSelect); - - if (!isMobile()) { - $('#mancer_model').select2({ - placeholder: 'Select a model', - searchInputPlaceholder: 'Search models...', - searchInputCssClass: 'text_pole', - width: '100%', - templateResult: getMancerModelTemplate, - }); - } -}); diff --git a/public/scripts/openai.js b/public/scripts/openai.js index a7770b90e..f3c17f373 100644 --- a/public/scripts/openai.js +++ b/public/scripts/openai.js @@ -165,7 +165,6 @@ export const chat_completion_sources = { AI21: 'ai21', MAKERSUITE: 'makersuite', MISTRALAI: 'mistralai', - TOGETHERAI: 'togetherai', }; const prefixMap = selected_group ? { @@ -210,7 +209,6 @@ const default_settings = { claude_model: 'claude-instant-v1', google_model: 'gemini-pro', ai21_model: 'j2-ultra', - togetherai_model: 'togethercomputer/GPT-NeoXT-Chat-Base-20B', // unsure here mistralai_model: 'mistral-medium', windowai_model: '', openrouter_model: openrouter_website_model, @@ -267,7 +265,6 @@ const oai_settings = { claude_model: 'claude-instant-v1', google_model: 'gemini-pro', ai21_model: 'j2-ultra', - togetherai_model: 'togethercomputer/GPT-NeoXT-Chat-Base-20B', // unsure here mistralai_model: 'mistral-medium', windowai_model: '', openrouter_model: openrouter_website_model, @@ -1267,8 +1264,6 @@ function getChatCompletionModel() { return oai_settings.openrouter_model !== openrouter_website_model ? oai_settings.openrouter_model : null; case chat_completion_sources.AI21: return oai_settings.ai21_model; - case chat_completion_sources.TOGETHERAI: - return oai_settings.togetherai_model; case chat_completion_sources.MISTRALAI: return oai_settings.mistralai_model; default: @@ -1458,7 +1453,6 @@ async function sendOpenAIRequest(type, messages, signal) { const isAI21 = oai_settings.chat_completion_source == chat_completion_sources.AI21; const isGoogle = oai_settings.chat_completion_source == chat_completion_sources.MAKERSUITE; const isOAI = oai_settings.chat_completion_source == chat_completion_sources.OPENAI; - const isTogetherAI = oai_settings.chat_completion_source == chat_completion_sources.TOGETHERAI; const isMistral = oai_settings.chat_completion_source == chat_completion_sources.MISTRALAI; const isTextCompletion = (isOAI && textCompletionModels.includes(oai_settings.openai_model)) || (isOpenRouter && oai_settings.openrouter_force_instruct && power_user.instruct.enabled); const isQuiet = type === 'quiet'; @@ -1571,7 +1565,7 @@ async function sendOpenAIRequest(type, messages, signal) { generate_data['safe_mode'] = false; // already defaults to false, but just incase they change that in the future. } - if ((isOAI || isOpenRouter || isMistral || isTogetherAI) && oai_settings.seed >= 0) { + if ((isOAI || isOpenRouter || isMistral) && oai_settings.seed >= 0) { generate_data['seed'] = oai_settings.seed; } @@ -2325,7 +2319,6 @@ function loadOpenAISettings(data, settings) { oai_settings.assistant_prefill = settings.assistant_prefill ?? default_settings.assistant_prefill; oai_settings.image_inlining = settings.image_inlining ?? default_settings.image_inlining; oai_settings.bypass_status_check = settings.bypass_status_check ?? default_settings.bypass_status_check; - oai_settings.togetherai_model = settings.togetherai_model ?? default_settings.togetherai_model; oai_settings.prompts = settings.prompts ?? default_settings.prompts; oai_settings.prompt_order = settings.prompt_order ?? default_settings.prompt_order; @@ -2360,8 +2353,6 @@ function loadOpenAISettings(data, settings) { $(`#model_google_select option[value="${oai_settings.google_model}"`).attr('selected', true); $('#model_ai21_select').val(oai_settings.ai21_model); $(`#model_ai21_select option[value="${oai_settings.ai21_model}"`).attr('selected', true); - $('#model_togetherai_select').val(oai_settings.togetherai_model); - $(`#model_togetherai_select option[value="${oai_settings.togetherai_model}"`).attr('selected', true); $('#model_mistralai_select').val(oai_settings.mistralai_model); $(`#model_mistralai_select option[value="${oai_settings.mistralai_model}"`).attr('selected', true); $('#openai_max_context').val(oai_settings.openai_max_context); @@ -2541,7 +2532,6 @@ async function saveOpenAIPreset(name, settings, triggerUi = true) { openrouter_group_models: settings.openrouter_group_models, openrouter_sort_models: settings.openrouter_sort_models, ai21_model: settings.ai21_model, - togetherai_model: settings.togetherai_model, mistralai_model: settings.mistralai_model, google_model: settings.google_model, temperature: settings.temp_openai, @@ -2914,7 +2904,6 @@ function onSettingsPresetChange() { openrouter_group_models: ['#openrouter_group_models', 'openrouter_group_models', false], openrouter_sort_models: ['#openrouter_sort_models', 'openrouter_sort_models', false], ai21_model: ['#model_ai21_select', 'ai21_model', false], - togetherai_model: ['#model_togetherai_select', 'togetherai_model', false], mistralai_model: ['#model_mistralai_select', 'mistralai_model', false], google_model: ['#model_google_select', 'google_model', false], openai_max_context: ['#openai_max_context', 'openai_max_context', false], @@ -3094,12 +3083,7 @@ async function onModelChange() { console.log('AI21 model changed to', value); oai_settings.ai21_model = value; } - - if ($(this).is('#model_togetherai_select')) { - console.log('TogetherAI model changed to', value); - oai_settings.togetherai_model = value; - } - + if ($(this).is('#model_google_select')) { console.log('Google model changed to', value); oai_settings.google_model = value; @@ -3204,14 +3188,6 @@ async function onModelChange() { } } - // not sure if this is enough - if (oai_settings.chat_completion_source == chat_completion_sources.TOGETHERAI) { - $('#openai_max_context').attr('max', max_2k); // assuming togethercomputer/GPT-NeoXT-Chat-Base-20B - oai_settings.openai_max_context = Math.min(oai_settings.openai_max_context, Number($('#openai_max_context').attr('max'))); - $('#openai_max_context').val(oai_settings.openai_max_context).trigger('input'); - - } - if (oai_settings.chat_completion_source == chat_completion_sources.OPENAI) { $('#openai_max_context').attr('max', getMaxContextOpenAI(value)); oai_settings.openai_max_context = Math.min(oai_settings.openai_max_context, Number($('#openai_max_context').attr('max'))); @@ -3394,18 +3370,6 @@ async function onConnectButtonClick(e) { } } - if (oai_settings.chat_completion_source == chat_completion_sources.TOGETHERAI) { - const api_key_togetherai = String($('#api_key_togetherai').val()).trim(); - - if (api_key_togetherai.length) { - await writeSecret(SECRET_KEYS.TOGETHERAI, api_key_togetherai); - } - - if (!secret_state[SECRET_KEYS.TOGETHERAI]) { - console.log('No secret key saved for TogetherAI'); - } - } - if (oai_settings.chat_completion_source == chat_completion_sources.MISTRALAI) { const api_key_mistralai = String($('#api_key_mistralai').val()).trim(); @@ -3451,9 +3415,6 @@ function toggleChatCompletionForms() { else if (oai_settings.chat_completion_source == chat_completion_sources.AI21) { $('#model_ai21_select').trigger('change'); } - else if (oai_settings.chat_completion_source == chat_completion_sources.TOGETHERAI) { - $('#model_togetherai_select').trigger('change'); - } else if (oai_settings.chat_completion_source == chat_completion_sources.MISTRALAI) { $('#model_mistralai_select').trigger('change'); } @@ -3834,7 +3795,6 @@ $(document).ready(async function () { $('#openrouter_group_models').on('change', onOpenrouterModelSortChange); $('#openrouter_sort_models').on('change', onOpenrouterModelSortChange); $('#model_ai21_select').on('change', onModelChange); - $('#model_togetherai_select').on('change', onModelChange); $('#model_mistralai_select').on('change', onModelChange); $('#settings_preset_openai').on('change', onSettingsPresetChange); $('#new_oai_preset').on('click', onNewPresetClick); diff --git a/public/scripts/textgen-models.js b/public/scripts/textgen-models.js new file mode 100644 index 000000000..03d908a7a --- /dev/null +++ b/public/scripts/textgen-models.js @@ -0,0 +1,120 @@ +import { setGenerationParamsFromPreset } from '../script.js'; +import { isMobile } from './RossAscends-mods.js'; +import { textgenerationwebui_settings as textgen_settings } from './textgen-settings.js'; + +let mancerModels = []; +let togetherModels = []; + +export async function loadTogetherAIModels(data) { + if (!Array.isArray(data)) { + console.error('Invalid Together AI models data', data); + return; + } + + togetherModels = data; + + $('#model_togetherai_select').empty(); + for (const model of data) { + // Hey buddy, I think you've got the wrong door. + if (model.display_type === 'image') { + continue; + } + + const option = document.createElement('option'); + option.value = model.name; + option.text = model.display_name; + option.selected = model.name === textgen_settings.togetherai_model; + $('#model_togetherai_select').append(option); + } +} + +export async function loadMancerModels(data) { + if (!Array.isArray(data)) { + console.error('Invalid Mancer models data', data); + return; + } + + mancerModels = data; + + $('#mancer_model').empty(); + for (const model of data) { + const option = document.createElement('option'); + option.value = model.id; + option.text = model.name; + option.selected = model.id === textgen_settings.mancer_model; + $('#mancer_model').append(option); + } +} + +function onMancerModelSelect() { + const modelId = String($('#mancer_model').val()); + textgen_settings.mancer_model = modelId; + $('#api_button_textgenerationwebui').trigger('click'); + + const limits = mancerModels.find(x => x.id === modelId)?.limits; + setGenerationParamsFromPreset({ max_length: limits.context, genamt: limits.completion }); +} + + +function onTogetherModelSelect() { + const modelName = String($('#model_togetherai_select').val()); + textgen_settings.togetherai_model = modelName; + $('#api_button_textgenerationwebui').trigger('click'); + const model = togetherModels.find(x => x.name === modelName); + setGenerationParamsFromPreset({ max_length: model.context_length }); +} + +function getMancerModelTemplate(option) { + const model = mancerModels.find(x => x.id === option?.element?.value); + + if (!option.id || !model) { + return option.text; + } + + const creditsPerPrompt = (model.limits?.context - model.limits?.completion) * model.pricing?.prompt; + const creditsPerCompletion = model.limits?.completion * model.pricing?.completion; + const creditsTotal = Math.round(creditsPerPrompt + creditsPerCompletion).toFixed(0); + + return $((` +
+
${DOMPurify.sanitize(model.name)} | ${model.limits?.context} ctx / ${model.limits?.completion} res | Credits per request (max): ${creditsTotal}
+
+ `)); +} + +function getTogetherModelTemplate(option) { + const model = togetherModels.find(x => x.name === option?.element?.value); + + if (!option.id || !model) { + return option.text; + } + + return $((` +
+
${DOMPurify.sanitize(model.name)} | ${model.context_length || '???'} tokens
+
${DOMPurify.sanitize(model.description)}
+
+ `)); +} + +jQuery(function () { + $('#mancer_model').on('change', onMancerModelSelect); + $('#model_togetherai_select').on('change', onTogetherModelSelect); + + if (!isMobile()) { + $('#mancer_model').select2({ + placeholder: 'Select a model', + searchInputPlaceholder: 'Search models...', + searchInputCssClass: 'text_pole', + width: '100%', + templateResult: getMancerModelTemplate, + }); + $('#model_togetherai_select').select2({ + placeholder: 'Select a model', + searchInputPlaceholder: 'Search models...', + searchInputCssClass: 'text_pole', + width: '100%', + templateResult: getTogetherModelTemplate, + }); + } +}); diff --git a/public/scripts/textgen-settings.js b/public/scripts/textgen-settings.js index 58a3aba62..756cdd4a2 100644 --- a/public/scripts/textgen-settings.js +++ b/public/scripts/textgen-settings.js @@ -31,15 +31,17 @@ export const textgen_types = { APHRODITE: 'aphrodite', TABBY: 'tabby', KOBOLDCPP: 'koboldcpp', + TOGETHERAI: 'togetherai', }; -const { MANCER, APHRODITE } = textgen_types; +const { MANCER, APHRODITE, TOGETHERAI } = textgen_types; // Maybe let it be configurable in the future? // (7 days later) The future has come. const MANCER_SERVER_KEY = 'mancer_server'; const MANCER_SERVER_DEFAULT = 'https://neuro.mancer.tech'; -export let MANCER_SERVER = localStorage.getItem(MANCER_SERVER_KEY) ?? MANCER_SERVER_DEFAULT; +let MANCER_SERVER = localStorage.getItem(MANCER_SERVER_KEY) ?? MANCER_SERVER_DEFAULT; +let TOGETHERAI_SERVER = 'https://api.together.xyz'; const KOBOLDCPP_ORDER = [6, 0, 1, 3, 4, 2, 5]; const settings = { @@ -89,6 +91,7 @@ const settings = { //prompt_log_probs_aphrodite: 0, type: textgen_types.OOBA, mancer_model: 'mytholite', + togetherai_model: 'Gryphe/MythoMax-L2-13b', legacy_api: false, sampler_order: KOBOLDCPP_ORDER, n: 1, @@ -164,8 +167,8 @@ async function selectPreset(name) { function formatTextGenURL(value) { try { - // Mancer doesn't need any formatting (it's hardcoded) - if (settings.type === MANCER) { + // Mancer/Together doesn't need any formatting (it's hardcoded) + if (settings.type === MANCER || settings.type === TOGETHERAI) { return value; } @@ -546,6 +549,10 @@ function getModel() { return settings.mancer_model; } + if (settings.type === TOGETHERAI) { + return settings.togetherai_model; + } + if (settings.type === APHRODITE) { return online_status; } @@ -553,6 +560,18 @@ function getModel() { return undefined; } +export function getTextGenServer() { + if (settings.type === MANCER) { + return MANCER_SERVER; + } + + if (settings.type === TOGETHERAI) { + return TOGETHERAI_SERVER; + } + + return api_server_textgenerationwebui; +} + export function getTextGenGenerationData(finalPrompt, maxTokens, isImpersonate, isContinue, cfgValues, type) { const canMultiSwipe = !isContinue && !isImpersonate && type !== 'quiet'; let APIflags = { @@ -590,10 +609,8 @@ export function getTextGenGenerationData(finalPrompt, maxTokens, isImpersonate, toIntArray(getCustomTokenBans()) : getCustomTokenBans(), 'api_type': settings.type, - 'api_server': settings.type === MANCER ? - MANCER_SERVER : - api_server_textgenerationwebui, - 'legacy_api': settings.legacy_api && settings.type !== MANCER, + 'api_server': getTextGenServer(), + 'legacy_api': settings.legacy_api && settings.type !== MANCER && settings.type !== TOGETHERAI, 'sampler_order': settings.type === textgen_types.KOBOLDCPP ? settings.sampler_order : undefined, diff --git a/public/scripts/tokenizers.js b/public/scripts/tokenizers.js index 32cd44b5e..2c9152187 100644 --- a/public/scripts/tokenizers.js +++ b/public/scripts/tokenizers.js @@ -6,7 +6,7 @@ import { getStringHash } from './utils.js'; import { kai_flags } from './kai-settings.js'; import { textgen_types, textgenerationwebui_settings as textgen_settings } from './textgen-settings.js'; -const { OOBA, TABBY, KOBOLDCPP, MANCER } = textgen_types; +const { OOBA, TABBY, KOBOLDCPP, MANCER, TOGETHERAI } = textgen_types; export const CHARACTERS_PER_TOKEN_RATIO = 3.35; const TOKENIZER_WARNING_KEY = 'tokenizationWarningShown'; @@ -540,7 +540,8 @@ function getTextgenAPITokenizationParams(str) { url: api_server_textgenerationwebui, legacy_api: textgen_settings.legacy_api && - textgen_settings.type !== MANCER, + textgen_settings.type !== MANCER && + textgen_settings.type !== TOGETHERAI, }; } diff --git a/src/additional-headers.js b/src/additional-headers.js index 61ea1790d..de8b0d91a 100644 --- a/src/additional-headers.js +++ b/src/additional-headers.js @@ -11,6 +11,14 @@ function getMancerHeaders() { }) : {}; } +function getTogetherAIHeaders() { + const apiKey = readSecret(SECRET_KEYS.TOGETHERAI); + + return apiKey ? ({ + 'Authorization': `Bearer ${apiKey}`, + }) : {}; +} + function getAphroditeHeaders() { const apiKey = readSecret(SECRET_KEYS.APHRODITE); @@ -58,6 +66,9 @@ function setAdditionalHeaders(request, args, server) { case TEXTGEN_TYPES.TABBY: headers = getTabbyHeaders(); break; + case TEXTGEN_TYPES.TOGETHERAI: + headers = getTogetherAIHeaders(); + break; default: headers = server ? getOverrideHeaders((new URL(server))?.host) : {}; break; diff --git a/src/constants.js b/src/constants.js index 2f04b4c99..509326bc4 100644 --- a/src/constants.js +++ b/src/constants.js @@ -158,7 +158,6 @@ const CHAT_COMPLETION_SOURCES = { SCALE: 'scale', OPENROUTER: 'openrouter', AI21: 'ai21', - TOGETHERAI: 'togetherai', MAKERSUITE: 'makersuite', MISTRALAI: 'mistralai', }; @@ -172,8 +171,21 @@ const TEXTGEN_TYPES = { APHRODITE: 'aphrodite', TABBY: 'tabby', KOBOLDCPP: 'koboldcpp', + TOGETHERAI: 'togetherai', }; +// https://docs.together.ai/reference/completions +const TOGETHERAI_KEYS = [ + 'model', + 'prompt', + 'max_tokens', + 'temperature', + 'top_p', + 'top_k', + 'repetition_penalty', + 'stream', +]; + const AVATAR_WIDTH = 400; const AVATAR_HEIGHT = 600; @@ -187,4 +199,5 @@ module.exports = { CHAT_COMPLETION_SOURCES, AVATAR_WIDTH, AVATAR_HEIGHT, + TOGETHERAI_KEYS, }; diff --git a/src/endpoints/backends/text-completions.js b/src/endpoints/backends/text-completions.js index 71387eefd..7e9a94f74 100644 --- a/src/endpoints/backends/text-completions.js +++ b/src/endpoints/backends/text-completions.js @@ -1,8 +1,9 @@ const express = require('express'); const fetch = require('node-fetch').default; +const _ = require('lodash'); const { jsonParser } = require('../../express-common'); -const { TEXTGEN_TYPES } = require('../../constants'); +const { TEXTGEN_TYPES, TOGETHERAI_KEYS } = require('../../constants'); const { forwardFetchResponse } = require('../../util'); const { setAdditionalHeaders } = require('../../additional-headers'); @@ -46,6 +47,9 @@ router.post('/status', jsonParser, async function (request, response) { case TEXTGEN_TYPES.TABBY: url += '/v1/model/list'; break; + case TEXTGEN_TYPES.TOGETHERAI: + url += '/api/models?&info'; + break; } } @@ -56,13 +60,18 @@ router.post('/status', jsonParser, async function (request, response) { return response.status(400); } - const data = await modelsReply.json(); + let data = await modelsReply.json(); if (request.body.legacy_api) { console.log('Legacy API response:', data); return response.send({ result: data?.result }); } + // Rewrap to OAI-like response + if (request.body.api_type === TEXTGEN_TYPES.TOGETHERAI && Array.isArray(data)) { + data = { data: data.map(x => ({ id: x.name, ...x })) }; + } + if (!Array.isArray(data.data)) { console.log('Models response is not an array.'); return response.status(400); @@ -145,6 +154,7 @@ router.post('/generate', jsonParser, async function (request, response_generate) case TEXTGEN_TYPES.OOBA: case TEXTGEN_TYPES.TABBY: case TEXTGEN_TYPES.KOBOLDCPP: + case TEXTGEN_TYPES.TOGETHERAI: url += '/v1/completions'; break; case TEXTGEN_TYPES.MANCER: @@ -163,6 +173,15 @@ router.post('/generate', jsonParser, async function (request, response_generate) setAdditionalHeaders(request, args, baseUrl); + if (request.body.api_type === TEXTGEN_TYPES.TOGETHERAI) { + const stop = Array.isArray(request.body.stop) ? request.body.stop[0] : ''; + request.body = _.pickBy(request.body, (_, key) => TOGETHERAI_KEYS.includes(key)); + if (typeof stop === 'string' && stop.length > 0) { + request.body.stop = stop; + } + args.body = JSON.stringify(request.body); + } + if (request.body.stream) { const completionsStream = await fetch(url, args); // Pipe remote SSE stream to Express response diff --git a/src/endpoints/vectors.js b/src/endpoints/vectors.js index 03ab4af9a..45d4d55a6 100644 --- a/src/endpoints/vectors.js +++ b/src/endpoints/vectors.js @@ -12,7 +12,6 @@ const { jsonParser } = require('../express-common'); */ async function getVector(source, text) { switch (source) { - case 'togetherai': case 'mistral': case 'openai': return require('../openai-vectors').getOpenAIVector(text, source); diff --git a/src/openai-vectors.js b/src/openai-vectors.js index c5ee1a7e9..3b19a4c96 100644 --- a/src/openai-vectors.js +++ b/src/openai-vectors.js @@ -12,11 +12,6 @@ const SOURCES = { url: 'api.openai.com', model: 'text-embedding-ada-002', }, - 'togetherai': { - secretKey: SECRET_KEYS.TOGETHERAI, - url: 'api.togetherai.xyz', - model: 'togethercomputer/GPT-NeoXT-Chat-Base-20B', - }, }; /** @@ -26,7 +21,6 @@ const SOURCES = { * @returns {Promise} - The vector for the text */ async function getOpenAIVector(text, source) { - const config = SOURCES[source]; if (!config) { From 55b9ebad3f1db58cfea623d755fe2e861c7ebe6c Mon Sep 17 00:00:00 2001 From: LenAnderson Date: Sun, 17 Dec 2023 22:17:08 +0000 Subject: [PATCH 24/82] add custom placeholders for comfy workflows --- .../stable-diffusion/comfyWorkflowEditor.html | 6 +++ .../extensions/stable-diffusion/index.js | 48 +++++++++++++++++++ .../extensions/stable-diffusion/style.css | 14 ++++++ 3 files changed, 68 insertions(+) diff --git a/public/scripts/extensions/stable-diffusion/comfyWorkflowEditor.html b/public/scripts/extensions/stable-diffusion/comfyWorkflowEditor.html index 29ebb9197..f5001dba4 100644 --- a/public/scripts/extensions/stable-diffusion/comfyWorkflowEditor.html +++ b/public/scripts/extensions/stable-diffusion/comfyWorkflowEditor.html @@ -25,6 +25,12 @@ ? +
Custom
+
+ + +
+
    +
diff --git a/public/scripts/extensions/stable-diffusion/index.js b/public/scripts/extensions/stable-diffusion/index.js index e648809ac..546ad12bf 100644 --- a/public/scripts/extensions/stable-diffusion/index.js +++ b/public/scripts/extensions/stable-diffusion/index.js @@ -16,6 +16,7 @@ import { user_avatar, getCharacterAvatar, formatCharacterAvatar, + substituteParams, } from '../../../script.js'; import { getApiUrl, getContext, extension_settings, doExtrasFetch, modules, renderExtensionTemplate } from '../../extensions.js'; import { selected_group } from '../../group-chats.js'; @@ -2180,6 +2181,9 @@ async function generateComfyImage(prompt) { placeholders.forEach(ph => { workflow = workflow.replace(`"%${ph}%"`, JSON.stringify(extension_settings.sd[ph])); }); + (extension_settings.sd.comfy_placeholders ?? []).forEach(ph => { + workflow = workflow.replace(`"%${ph.find}%"`, JSON.stringify(substituteParams(ph.replace))); + }); console.log(`{ "prompt": ${workflow} }`); @@ -2216,6 +2220,50 @@ async function onComfyOpenWorkflowEditorClick() { }; $('#sd_comfy_workflow_editor_name').text(extension_settings.sd.comfy_workflow); $('#sd_comfy_workflow_editor_workflow').val(workflow); + const addPlaceholderDom = (placeholder) => { + const el = $(` +
  • + ⊘ + "%${placeholder.find}%"
    +
    + +
  • + `); + $('#sd_comfy_workflow_editor_placeholder_list_custom').append(el); + el.find('.sd_comfy_workflow_editor_custom_find').val(placeholder.find); + el.find('.sd_comfy_workflow_editor_custom_find').on('input', function() { + placeholder.find = this.value; + el.find('.sd_comfy_workflow_editor_custom_final').text(`"%${this.value}%"`); + el.attr('data-placeholder', `${this.value}`); + checkPlaceholders(); + saveSettingsDebounced(); + }); + el.find('.sd_comfy_workflow_editor_custom_replace').val(placeholder.replace); + el.find('.sd_comfy_workflow_editor_custom_replace').on('input', function() { + placeholder.replace = this.value; + saveSettingsDebounced(); + }); + el.find('.sd_comfy_workflow_editor_custom_remove').on('click', () => { + el.remove(); + extension_settings.sd.comfy_placeholders.splice(extension_settings.sd.comfy_placeholders.indexOf(placeholder)); + saveSettingsDebounced(); + }); + }; + $('#sd_comfy_workflow_editor_placeholder_add').on('click', () => { + if (!extension_settings.sd.comfy_placeholders) { + extension_settings.sd.comfy_placeholders = []; + } + const placeholder = { + find: '', + replace: '', + }; + extension_settings.sd.comfy_placeholders.push(placeholder); + addPlaceholderDom(placeholder); + saveSettingsDebounced(); + }); + (extension_settings.sd.comfy_placeholders ?? []).forEach(placeholder=>{ + addPlaceholderDom(placeholder); + }); checkPlaceholders(); $('#sd_comfy_workflow_editor_workflow').on('input', checkPlaceholders); if (await popupResult) { diff --git a/public/scripts/extensions/stable-diffusion/style.css b/public/scripts/extensions/stable-diffusion/style.css index 7b9fdd551..139cf3f09 100644 --- a/public/scripts/extensions/stable-diffusion/style.css +++ b/public/scripts/extensions/stable-diffusion/style.css @@ -82,3 +82,17 @@ .sd_comfy_workflow_editor_placeholder_list>li>.notes-link { cursor: help; } + +.sd_comfy_workflow_editor_placeholder_list input { + font-size: inherit; + margin: 0; +} +.sd_comfy_workflow_editor_custom_remove, #sd_comfy_workflow_editor_placeholder_add { + cursor: pointer; + font-weight: bold; + width: 1em; + opacity: 0.5; + &:hover { + opacity: 1; + } +} From 56b9398353b861500dc376b75890e09dc41c31d4 Mon Sep 17 00:00:00 2001 From: LenAnderson Date: Sun, 17 Dec 2023 22:24:22 +0000 Subject: [PATCH 25/82] add slash command to change comfy workflows --- public/scripts/extensions/stable-diffusion/index.js | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/public/scripts/extensions/stable-diffusion/index.js b/public/scripts/extensions/stable-diffusion/index.js index 546ad12bf..5328cd1f5 100644 --- a/public/scripts/extensions/stable-diffusion/index.js +++ b/public/scripts/extensions/stable-diffusion/index.js @@ -25,6 +25,7 @@ import { getMessageTimeStamp, humanizedDateTime } from '../../RossAscends-mods.j import { SECRET_KEYS, secret_state } from '../../secrets.js'; import { getNovelUnlimitedImageGeneration, getNovelAnlas, loadNovelSubscriptionData } from '../../nai-settings.js'; import { getMultimodalCaption } from '../shared.js'; +import { registerSlashCommand } from '../../slash-commands.js'; export { MODULE_NAME }; // Wraps a string into monospace font-face span @@ -831,6 +832,11 @@ function onComfyWorkflowChange() { extension_settings.sd.comfy_workflow = $('#sd_comfy_workflow').find(':selected').val(); saveSettingsDebounced(); } +function changeComfyWorkflow(_, name) { + extension_settings.sd.comfy_workflow = name.replace(/(\.json)?$/i, '.json'); + $('#sd_comfy_workflow').val(extension_settings.sd.comfy_workflow); + saveSettingsDebounced(); +} async function validateAutoUrl() { try { @@ -2530,6 +2536,7 @@ $('#sd_dropdown [id]').on('click', function () { jQuery(async () => { getContext().registerSlashCommand('imagine', generatePicture, ['sd', 'img', 'image'], helpString, true, true); + registerSlashCommand('imagine-comfy-workflow', changeComfyWorkflow, ['icw'], '(workflowName) - change the workflow to be used for image generation with ComfyUI, e.g. /imagine-comfy-workflow MyWorkflow') $('#extensions_settings').append(renderExtensionTemplate('stable-diffusion', 'settings', defaultSettings)); $('#sd_source').on('change', onSourceChange); From 796ab7eff8a06cb2c0c5b952c59ef6a4b70f8b76 Mon Sep 17 00:00:00 2001 From: LenAnderson Date: Sun, 17 Dec 2023 22:57:10 +0000 Subject: [PATCH 26/82] validate /icw --- public/scripts/extensions/stable-diffusion/index.js | 13 +++++++++---- 1 file changed, 9 insertions(+), 4 deletions(-) diff --git a/public/scripts/extensions/stable-diffusion/index.js b/public/scripts/extensions/stable-diffusion/index.js index 5328cd1f5..344a3a23c 100644 --- a/public/scripts/extensions/stable-diffusion/index.js +++ b/public/scripts/extensions/stable-diffusion/index.js @@ -832,10 +832,15 @@ function onComfyWorkflowChange() { extension_settings.sd.comfy_workflow = $('#sd_comfy_workflow').find(':selected').val(); saveSettingsDebounced(); } -function changeComfyWorkflow(_, name) { - extension_settings.sd.comfy_workflow = name.replace(/(\.json)?$/i, '.json'); - $('#sd_comfy_workflow').val(extension_settings.sd.comfy_workflow); - saveSettingsDebounced(); +async function changeComfyWorkflow(_, name) { + name = name.replace(/(\.json)?$/i, '.json'); + if ($(`#sd_comfy_workflow > [value="${name}"]`).length > 0) { + extension_settings.sd.comfy_workflow = name; + $('#sd_comfy_workflow').val(extension_settings.sd.comfy_workflow); + saveSettingsDebounced(); + } else { + toastr.error(`ComfyUI Workflow "${name}" does not exist.`); + } } async function validateAutoUrl() { From f249ff8b2064ae1324391c6bb701e96c7eee4109 Mon Sep 17 00:00:00 2001 From: Cohee <18619528+Cohee1207@users.noreply.github.com> Date: Mon, 18 Dec 2023 01:39:37 +0200 Subject: [PATCH 27/82] Fix slash commands for offscreen group members --- public/scripts/group-chats.js | 4 +-- public/scripts/slash-commands.js | 42 +++++++++++++++++++++++++++----- 2 files changed, 38 insertions(+), 8 deletions(-) diff --git a/public/scripts/group-chats.js b/public/scripts/group-chats.js index 7108a9914..bfd49f7c8 100644 --- a/public/scripts/group-chats.js +++ b/public/scripts/group-chats.js @@ -1108,7 +1108,7 @@ function printGroupCandidates() { showNavigator: true, showSizeChanger: true, pageSize: Number(localStorage.getItem(storageKey)) || 5, - sizeChangerOptions: [5, 10, 25, 50, 100, 200], + sizeChangerOptions: [5, 10, 25, 50, 100, 200, 500, 1000], afterSizeSelectorChange: function (e) { localStorage.setItem(storageKey, e.target.value); }, @@ -1135,7 +1135,7 @@ function printGroupMembers() { showNavigator: true, showSizeChanger: true, pageSize: Number(localStorage.getItem(storageKey)) || 5, - sizeChangerOptions: [5, 10, 25, 50, 100, 200], + sizeChangerOptions: [5, 10, 25, 50, 100, 200, 500, 1000], afterSizeSelectorChange: function (e) { localStorage.setItem(storageKey, e.target.value); }, diff --git a/public/scripts/slash-commands.js b/public/scripts/slash-commands.js index 1768762db..23ca73a56 100644 --- a/public/scripts/slash-commands.js +++ b/public/scripts/slash-commands.js @@ -842,6 +842,36 @@ async function unhideMessageCallback(_, arg) { return ''; } +/** + * Copium for running group actions when the member is offscreen. + * @param {number} chid - character ID + * @param {string} action - one of 'enable', 'disable', 'up', 'down', 'peek', 'remove' + * @returns {void} + */ +function performGroupMemberAction(chid, action) { + const memberSelector = `.group_member[chid="${chid}"]`; + // Do not optimize. Paginator gets recreated on every action + const paginationSelector = '#rm_group_members_pagination'; + const pageSizeSelector = '#rm_group_members_pagination select'; + let wasOffscreen = false; + let paginationValue = null; + let pageValue = null; + + if ($(memberSelector).length === 0) { + wasOffscreen = true; + paginationValue = Number($(pageSizeSelector).val()); + pageValue = $(paginationSelector).pagination('getCurrentPageNum'); + $(pageSizeSelector).val($(pageSizeSelector).find('option').last().val()).trigger('change'); + } + + $(memberSelector).find(`[data-action="${action}"]`).trigger('click'); + + if (wasOffscreen) { + $(pageSizeSelector).val(paginationValue).trigger('change'); + $(paginationSelector).pagination('go', pageValue); + } +} + async function disableGroupMemberCallback(_, arg) { if (!selected_group) { toastr.warning('Cannot run /disable command outside of a group chat.'); @@ -855,7 +885,7 @@ async function disableGroupMemberCallback(_, arg) { return ''; } - $(`.group_member[chid="${chid}"] [data-action="disable"]`).trigger('click'); + performGroupMemberAction(chid, 'disable'); return ''; } @@ -872,7 +902,7 @@ async function enableGroupMemberCallback(_, arg) { return ''; } - $(`.group_member[chid="${chid}"] [data-action="enable"]`).trigger('click'); + performGroupMemberAction(chid, 'enable'); return ''; } @@ -889,7 +919,7 @@ async function moveGroupMemberUpCallback(_, arg) { return ''; } - $(`.group_member[chid="${chid}"] [data-action="up"]`).trigger('click'); + performGroupMemberAction(chid, 'up'); return ''; } @@ -906,7 +936,7 @@ async function moveGroupMemberDownCallback(_, arg) { return ''; } - $(`.group_member[chid="${chid}"] [data-action="down"]`).trigger('click'); + performGroupMemberAction(chid, 'down'); return ''; } @@ -928,7 +958,7 @@ async function peekCallback(_, arg) { return ''; } - $(`.group_member[chid="${chid}"] [data-action="view"]`).trigger('click'); + performGroupMemberAction(chid, 'peek'); return ''; } @@ -950,7 +980,7 @@ async function removeGroupMemberCallback(_, arg) { return ''; } - $(`.group_member[chid="${chid}"] [data-action="remove"]`).trigger('click'); + performGroupMemberAction(chid, 'remove'); return ''; } From 50ece13752a2abe0f697717c3960303eac74d21b Mon Sep 17 00:00:00 2001 From: DonMoralez Date: Mon, 18 Dec 2023 02:25:17 +0200 Subject: [PATCH 28/82] Add restore button, def hum message, claude check --- public/index.html | 9 +++++++-- public/scripts/openai.js | 19 +++++++++++++------ src/endpoints/backends/chat-completions.js | 13 ++++++++++--- src/endpoints/prompt-converters.js | 21 ++++++--------------- 4 files changed, 36 insertions(+), 26 deletions(-) diff --git a/public/index.html b/public/index.html index 2b4951700..a504cc961 100644 --- a/public/index.html +++ b/public/index.html @@ -1538,8 +1538,13 @@ Exclude the 'Human: ' prefix from being added to the beginning of the prompt. Instead, place it between the sysprompt and the first message with the role 'assistant'(right before 'Chat History', by default).
    - Human: first message - +
    + Human: first message +
    +
    +
    +
    +
    diff --git a/public/scripts/openai.js b/public/scripts/openai.js index 82f5865d3..9e574c7d0 100644 --- a/public/scripts/openai.js +++ b/public/scripts/openai.js @@ -90,6 +90,7 @@ const default_wi_format = '[Details of the fictional world the RP is set in:\n{0 const default_new_chat_prompt = '[Start a new Chat]'; const default_new_group_chat_prompt = '[Start a new group chat. Group members: {{group}}]'; const default_new_example_chat_prompt = '[Start a new Chat]'; +const default_claude_human_sysprompt_message = 'Let\'s get started. Please generate your response based on the information and instructions provided above.'; const default_continue_nudge_prompt = '[Continue the following message. Do not include ANY parts of the original message. Use capitalization and punctuation as if your reply is a part of the original message: {{lastChatMessage}}]'; const default_bias = 'Default (none)'; const default_personality_format = '[{{char}}\'s personality: {{personality}}]'; @@ -224,7 +225,7 @@ const default_settings = { show_external_models: false, proxy_password: '', assistant_prefill: '', - human_sysprompt_message: '', + human_sysprompt_message: default_claude_human_sysprompt_message, use_ai21_tokenizer: false, use_google_tokenizer: false, exclude_assistant: false, @@ -282,7 +283,7 @@ const oai_settings = { show_external_models: false, proxy_password: '', assistant_prefill: '', - human_sysprompt_message: '', + human_sysprompt_message: default_claude_human_sysprompt_message, use_ai21_tokenizer: false, use_google_tokenizer: false, exclude_assistant: false, @@ -2348,7 +2349,7 @@ function loadOpenAISettings(data, settings) { $('#api_url_scale').val(oai_settings.api_url_scale); $('#openai_proxy_password').val(oai_settings.proxy_password); $('#claude_assistant_prefill').val(oai_settings.assistant_prefill); - $('#claude_human_sysprompt_message').val(oai_settings.human_sysprompt_message); + $('#claude_human_sysprompt_textarea').val(oai_settings.human_sysprompt_message); $('#openai_image_inlining').prop('checked', oai_settings.image_inlining); $('#openai_bypass_status_check').prop('checked', oai_settings.bypass_status_check); @@ -2941,7 +2942,7 @@ function onSettingsPresetChange() { show_external_models: ['#openai_show_external_models', 'show_external_models', true], proxy_password: ['#openai_proxy_password', 'proxy_password', false], assistant_prefill: ['#claude_assistant_prefill', 'assistant_prefill', false], - human_sysprompt_message: ['#claude_human_sysprompt_message', 'human_sysprompt_message', false], + human_sysprompt_message: ['#claude_human_sysprompt_textarea', 'human_sysprompt_message', false], use_ai21_tokenizer: ['#use_ai21_tokenizer', 'use_ai21_tokenizer', true], use_google_tokenizer: ['#use_google_tokenizer', 'use_google_tokenizer', true], exclude_assistant: ['#exclude_assistant', 'exclude_assistant', true], @@ -3679,6 +3680,12 @@ $(document).ready(async function () { saveSettingsDebounced(); }); + $('#claude_human_sysprompt_message_restore').on('click', function () { + oai_settings.human_sysprompt_message = default_claude_human_sysprompt_message; + $('#claude_human_sysprompt_textarea').val(oai_settings.human_sysprompt_message); + saveSettingsDebounced(); + }); + $('#newgroupchat_prompt_restore').on('click', function () { oai_settings.new_group_chat_prompt = default_new_group_chat_prompt; $('#newgroupchat_prompt_textarea').val(oai_settings.new_group_chat_prompt); @@ -3766,8 +3773,8 @@ $(document).ready(async function () { saveSettingsDebounced(); }); - $('#claude_human_sysprompt_message').on('input', function () { - oai_settings.human_sysprompt_message = String($(this).val()); + $('#claude_human_sysprompt_textarea').on('input', function () { + oai_settings.human_sysprompt_message = String($('#claude_human_sysprompt_textarea').val()); saveSettingsDebounced(); }); diff --git a/src/endpoints/backends/chat-completions.js b/src/endpoints/backends/chat-completions.js index 0c765a900..4a0b2ece2 100644 --- a/src/endpoints/backends/chat-completions.js +++ b/src/endpoints/backends/chat-completions.js @@ -60,13 +60,20 @@ async function sendClaudeRequest(request, response) { } if (!humanFound) { - console.log(chalk.red(`${divider}\nWarning: No Human prefix found in the prompt.\n${divider}`)); + console.log(chalk.red(`${divider}\nWarning: No 'Human:' prefix found in the prompt.\n${divider}`)); } if (!assistantFound) { - console.log(chalk.red(`${divider}\nWarning: No Assistant prefix found in the prompt.\n${divider}`)); + console.log(chalk.red(`${divider}\nWarning: No 'Assistant: ' prefix found in the prompt.\n${divider}`)); + } + if (!sequence[0].startsWith('Human:')) { + console.log(chalk.red(`${divider}\nWarning: The messages sequence should start with 'Human:' prefix.\nMake sure you have 'Human:' prefix at the very beggining of the prompt, or after the system prompt.\n${divider}`)); } if (humanErrorCount > 0 || assistantErrorCount > 0) { - console.log(chalk.red(`${divider}\nWarning: Detected incorrect Prefix sequence(s).\nIncorrect 'Human:' prefix(es): ${humanErrorCount}.\nIncorrect 'Assistant:' prefix(es): ${assistantErrorCount}.\nCheck the prompt above and fix it in the sillytavern.\nThe correct sequence should look like this:\nSystem prompt message <--(for new sysprompt format only)\n <------------------(Every message start with Assistant:/Human:prefix should have one empty line above)\nHuman:\n\nAssistant:\n\...\n\nHuman:\n\nAssistant:\n${divider}`)); + console.log(chalk.red(`${divider}\nWarning: Detected incorrect Prefix sequence(s).`)); + console.log(chalk.red(`Incorrect "Human:" prefix(es): ${humanErrorCount}.\nIncorrect "Assistant: " prefix(es): ${assistantErrorCount}.`)); + console.log(chalk.red('Check the prompt above and fix it in the sillytavern.')); + console.log(chalk.red('\nThe correct sequence should look like this:\nSystem prompt <-(for the sysprompt format only, else have 2 empty lines above the first human\'s message.)')); + console.log(chalk.red(` <-----(Each message beginning with the "Assistant:/Human:" prefix must have one empty line above.)\nHuman:\n\nAssistant:\n...\n\nHuman:\n\nAssistant:\n${divider}`)); } const stop_sequences = ['\n\nHuman:', '\n\nSystem:', '\n\nAssistant:']; diff --git a/src/endpoints/prompt-converters.js b/src/endpoints/prompt-converters.js index ce5be3a53..f5145c324 100644 --- a/src/endpoints/prompt-converters.js +++ b/src/endpoints/prompt-converters.js @@ -24,12 +24,8 @@ function convertClaudePrompt(messages, addAssistantPostfix, addAssistantPrefill, } // Find the index of the first message with an assistant role and check for a "'user' role/Human:" before it. let hasUser = false; - let hasAssist = false; const firstAssistantIndex = messages.findIndex((message, i) => { if (i >= 0 && (message.role === 'user' || message.content.includes('\n\nHuman: '))) { - if (message.content.includes('\n\nAssistant: ')) { - hasAssist = true; - } hasUser = true; } return message.role === 'assistant' && i > 0; @@ -38,10 +34,10 @@ function convertClaudePrompt(messages, addAssistantPostfix, addAssistantPrefill, // Inserts the human's message before the first the assistant one, if there are no such message or prefix found. if (withSyspromptSupport && useSystemPrompt) { messages[0].role = 'system'; - if (firstAssistantIndex > 0 && (!hasUser || (hasUser && hasAssist))) { //addSysHumanMsg for test + if (firstAssistantIndex > 0 && addSysHumanMsg && !hasUser) { messages.splice(firstAssistantIndex, 0, { role: 'user', - content: addSysHumanMsg || 'Let\'s get started.', + content: addSysHumanMsg, }); } } else { @@ -58,20 +54,15 @@ function convertClaudePrompt(messages, addAssistantPostfix, addAssistantPrefill, // Convert messages to requestPrompt. let requestPrompt = messages.map((v, i) => { - // Claude doesn't support message names, so we'll just add them to the message content. - if (v.name && v.role !== 'system') { - v.content = `${v.name}: ${v.content}`; - delete v.name; - } - //let prefix = ''; // Set prefix according to the role. + // Set prefix according to the role. let prefix = { 'assistant': '\n\nAssistant: ', 'user': '\n\nHuman: ', 'system': i === 0 ? '' : v.name === 'example_assistant' ? '\n\nA: ' : v.name === 'example_user' ? '\n\nH: ' : '\n\n', 'FixHumMsg': '\n\nFirst message: ', - }[v.role] ?? '\n\n'; - - return prefix + v.content; + }[v.role] ?? ''; + // Claude doesn't support message names, so we'll just add them to the message content. + return `${prefix}${v.name && v.role !== 'system' ? `${v.name}: ` : ''}${v.content}`; }).join(''); return requestPrompt; From 58227b82fc18b6e27952fe9382e5dbdbc61852b2 Mon Sep 17 00:00:00 2001 From: DonMoralez Date: Mon, 18 Dec 2023 02:32:25 +0200 Subject: [PATCH 29/82] Update prompt-converters.js --- src/endpoints/prompt-converters.js | 7 ++----- 1 file changed, 2 insertions(+), 5 deletions(-) diff --git a/src/endpoints/prompt-converters.js b/src/endpoints/prompt-converters.js index f5145c324..3d864e1b8 100644 --- a/src/endpoints/prompt-converters.js +++ b/src/endpoints/prompt-converters.js @@ -11,7 +11,6 @@ */ function convertClaudePrompt(messages, addAssistantPostfix, addAssistantPrefill, withSyspromptSupport, useSystemPrompt, addSysHumanMsg) { - console.log(JSON.stringify(messages, null, 2)); //Prepare messages for claude. if (messages.length > 0) { messages[0].role = 'system'; @@ -30,7 +29,7 @@ function convertClaudePrompt(messages, addAssistantPostfix, addAssistantPrefill, } return message.role === 'assistant' && i > 0; }); - // When 2.1 and 'Use system prompt" checked, switches to the system prompt format by setting the first message's role to the 'system'. + // When 2.1+ and 'Use system prompt" checked, switches to the system prompt format by setting the first message's role to the 'system'. // Inserts the human's message before the first the assistant one, if there are no such message or prefix found. if (withSyspromptSupport && useSystemPrompt) { messages[0].role = 'system'; @@ -50,9 +49,7 @@ function convertClaudePrompt(messages, addAssistantPostfix, addAssistantPrefill, } } - console.log(JSON.stringify(messages, null, 2)); - - // Convert messages to requestPrompt. + // Convert messages to the prompt. let requestPrompt = messages.map((v, i) => { // Set prefix according to the role. let prefix = { From f6b59d0d3a7fe5c250857e1d088d5a177f2ebd0a Mon Sep 17 00:00:00 2001 From: Cohee <18619528+Cohee1207@users.noreply.github.com> Date: Mon, 18 Dec 2023 02:35:02 +0200 Subject: [PATCH 30/82] Uniform command registration --- public/scripts/extensions/stable-diffusion/index.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/public/scripts/extensions/stable-diffusion/index.js b/public/scripts/extensions/stable-diffusion/index.js index 344a3a23c..8bb40d059 100644 --- a/public/scripts/extensions/stable-diffusion/index.js +++ b/public/scripts/extensions/stable-diffusion/index.js @@ -2540,7 +2540,7 @@ $('#sd_dropdown [id]').on('click', function () { }); jQuery(async () => { - getContext().registerSlashCommand('imagine', generatePicture, ['sd', 'img', 'image'], helpString, true, true); + registerSlashCommand('imagine', generatePicture, ['sd', 'img', 'image'], helpString, true, true); registerSlashCommand('imagine-comfy-workflow', changeComfyWorkflow, ['icw'], '(workflowName) - change the workflow to be used for image generation with ComfyUI, e.g. /imagine-comfy-workflow MyWorkflow') $('#extensions_settings').append(renderExtensionTemplate('stable-diffusion', 'settings', defaultSettings)); From 79ed01f4b0823e12a4064cc0e2b5477c1493c342 Mon Sep 17 00:00:00 2001 From: DonMoralez Date: Mon, 18 Dec 2023 02:44:11 +0200 Subject: [PATCH 31/82] Update chat-completions.js --- src/endpoints/backends/chat-completions.js | 1 - 1 file changed, 1 deletion(-) diff --git a/src/endpoints/backends/chat-completions.js b/src/endpoints/backends/chat-completions.js index 4a0b2ece2..d7f5769d2 100644 --- a/src/endpoints/backends/chat-completions.js +++ b/src/endpoints/backends/chat-completions.js @@ -49,7 +49,6 @@ async function sendClaudeRequest(request, response) { let assistantFound = sequence.some(line => line.startsWith('Assistant:')); for (let i = 0; i < sequence.length - 1; i++) { - if (sequence[i].startsWith(sequence[i + 1].split(':')[0])) { if (sequence[i].startsWith('Human:')) { humanErrorCount++; From ac70a0a592395f91200411c79ef95e6a5fc51edb Mon Sep 17 00:00:00 2001 From: Cohee <18619528+Cohee1207@users.noreply.github.com> Date: Mon, 18 Dec 2023 03:33:05 +0200 Subject: [PATCH 32/82] Add TogetherAI for image generation --- .../extensions/stable-diffusion/index.js | 78 +++++++++++++-- .../extensions/stable-diffusion/settings.html | 1 + src/endpoints/stable-diffusion.js | 96 ++++++++++++++++++- src/util.js | 16 ++++ 4 files changed, 184 insertions(+), 7 deletions(-) diff --git a/public/scripts/extensions/stable-diffusion/index.js b/public/scripts/extensions/stable-diffusion/index.js index 8bb40d059..3e8072711 100644 --- a/public/scripts/extensions/stable-diffusion/index.js +++ b/public/scripts/extensions/stable-diffusion/index.js @@ -46,6 +46,7 @@ const sources = { vlad: 'vlad', openai: 'openai', comfy: 'comfy', + togetherai: 'togetherai', }; const generationMode = { @@ -917,7 +918,7 @@ async function onModelChange() { extension_settings.sd.model = $('#sd_model').find(':selected').val(); saveSettingsDebounced(); - const cloudSources = [sources.horde, sources.novel, sources.openai]; + const cloudSources = [sources.horde, sources.novel, sources.openai, sources.togetherai]; if (cloudSources.includes(extension_settings.sd.source)) { return; @@ -1050,11 +1051,14 @@ async function loadSamplers() { samplers = await loadVladSamplers(); break; case sources.openai: - samplers = await loadOpenAiSamplers(); + samplers = ['N/A']; break; case sources.comfy: samplers = await loadComfySamplers(); break; + case sources.togetherai: + samplers = ['N/A']; + break; } for (const sampler of samplers) { @@ -1064,6 +1068,11 @@ async function loadSamplers() { option.selected = sampler === extension_settings.sd.sampler; $('#sd_sampler').append(option); } + + if (!extension_settings.sd.sampler && samplers.length > 0) { + extension_settings.sd.sampler = samplers[0]; + $('#sd_sampler').val(extension_settings.sd.sampler).trigger('change'); + } } async function loadHordeSamplers() { @@ -1120,10 +1129,6 @@ async function loadAutoSamplers() { } } -async function loadOpenAiSamplers() { - return ['N/A']; -} - async function loadVladSamplers() { if (!extension_settings.sd.vlad_url) { return []; @@ -1212,6 +1217,9 @@ async function loadModels() { case sources.comfy: models = await loadComfyModels(); break; + case sources.togetherai: + models = await loadTogetherAIModels(); + break; } for (const model of models) { @@ -1221,6 +1229,30 @@ async function loadModels() { option.selected = model.value === extension_settings.sd.model; $('#sd_model').append(option); } + + if (!extension_settings.sd.model && models.length > 0) { + extension_settings.sd.model = models[0].value; + $('#sd_model').val(extension_settings.sd.model).trigger('change'); + } +} + +async function loadTogetherAIModels() { + if (!secret_state[SECRET_KEYS.TOGETHERAI]) { + console.debug('TogetherAI API key is not set.'); + return []; + } + + const result = await fetch('/api/sd/together/models', { + method: 'POST', + headers: getRequestHeaders(), + }); + + if (result.ok) { + const data = await result.json(); + return data; + } + + return []; } async function loadHordeModels() { @@ -1434,6 +1466,9 @@ async function loadSchedulers() { case sources.openai: schedulers = ['N/A']; break; + case sources.togetherai: + schedulers = ['N/A']; + break; case sources.comfy: schedulers = await loadComfySchedulers(); break; @@ -1493,6 +1528,9 @@ async function loadVaes() { case sources.openai: vaes = ['N/A']; break; + case sources.togetherai: + vaes = ['N/A']; + break; case sources.comfy: vaes = await loadComfyVaes(); break; @@ -1873,6 +1911,9 @@ async function sendGenerationRequest(generationType, prompt, characterName = nul case sources.comfy: result = await generateComfyImage(prefixedPrompt); break; + case sources.togetherai: + result = await generateTogetherAIImage(prefixedPrompt); + break; } if (!result.data) { @@ -1895,6 +1936,29 @@ async function sendGenerationRequest(generationType, prompt, characterName = nul callback ? callback(prompt, base64Image, generationType) : sendMessage(prompt, base64Image, generationType); } +async function generateTogetherAIImage(prompt) { + const result = await fetch('/api/sd/together/generate', { + method: 'POST', + headers: getRequestHeaders(), + body: JSON.stringify({ + prompt: prompt, + negative_prompt: extension_settings.sd.negative_prompt, + model: extension_settings.sd.model, + steps: extension_settings.sd.steps, + width: extension_settings.sd.width, + height: extension_settings.sd.height, + }), + }); + + if (result.ok) { + const data = await result.json(); + return { format: 'jpg', data: data?.output?.choices?.[0]?.image_base64 }; + } else { + const text = await result.text(); + throw new Error(text); + } +} + /** * Generates an "extras" image using a provided prompt and other settings. * @@ -2435,6 +2499,8 @@ function isValidState() { return secret_state[SECRET_KEYS.OPENAI]; case sources.comfy: return true; + case sources.togetherai: + return secret_state[SECRET_KEYS.TOGETHERAI]; } } diff --git a/public/scripts/extensions/stable-diffusion/settings.html b/public/scripts/extensions/stable-diffusion/settings.html index 7c84c28ae..e42a24348 100644 --- a/public/scripts/extensions/stable-diffusion/settings.html +++ b/public/scripts/extensions/stable-diffusion/settings.html @@ -35,6 +35,7 @@ +
    diff --git a/src/endpoints/stable-diffusion.js b/src/endpoints/stable-diffusion.js index 5de6d9726..1054d2d6b 100644 --- a/src/endpoints/stable-diffusion.js +++ b/src/endpoints/stable-diffusion.js @@ -1,11 +1,12 @@ const express = require('express'); const fetch = require('node-fetch').default; const sanitize = require('sanitize-filename'); -const { getBasicAuthHeader, delay } = require('../util.js'); +const { getBasicAuthHeader, delay, getHexString } = require('../util.js'); const fs = require('fs'); const { DIRECTORIES } = require('../constants.js'); const writeFileAtomicSync = require('write-file-atomic').sync; const { jsonParser } = require('../express-common'); +const { readSecret, SECRET_KEYS } = require('./secrets.js'); /** * Sanitizes a string. @@ -545,6 +546,99 @@ comfy.post('/generate', jsonParser, async (request, response) => { } }); +const together = express.Router(); + +together.post('/models', jsonParser, async (_, response) => { + try { + const key = readSecret(SECRET_KEYS.TOGETHERAI); + + if (!key) { + console.log('TogetherAI key not found.'); + return response.sendStatus(400); + } + + const modelsResponse = await fetch('https://api.together.xyz/api/models', { + method: 'GET', + headers: { + 'Authorization': `Bearer ${key}`, + }, + }); + + if (!modelsResponse.ok) { + console.log('TogetherAI returned an error.'); + return response.sendStatus(500); + } + + const data = await modelsResponse.json(); + + if (!Array.isArray(data)) { + console.log('TogetherAI returned invalid data.'); + return response.sendStatus(500); + } + + const models = data + .filter(x => x.display_type === 'image') + .map(x => ({ value: x.name, text: x.display_name })); + + return response.send(models); + } catch (error) { + console.log(error); + return response.sendStatus(500); + } +}); + +together.post('/generate', jsonParser, async (request, response) => { + try { + const key = readSecret(SECRET_KEYS.TOGETHERAI); + + if (!key) { + console.log('TogetherAI key not found.'); + return response.sendStatus(400); + } + + console.log('TogetherAI request:', request.body); + + const result = await fetch('https://api.together.xyz/api/inference', { + method: 'POST', + body: JSON.stringify({ + request_type: 'image-model-inference', + prompt: request.body.prompt, + negative_prompt: request.body.negative_prompt, + height: request.body.height, + width: request.body.width, + model: request.body.model, + steps: request.body.steps, + n: 1, + seed: Math.floor(Math.random() * 10_000_000), // Limited to 10000 on playground, works fine with more. + sessionKey: getHexString(40), // Don't know if that's supposed to be random or not. It works either way. + }), + headers: { + 'Content-Type': 'application/json', + 'Authorization': `Bearer ${key}`, + }, + }); + + if (!result.ok) { + console.log('TogetherAI returned an error.'); + return response.sendStatus(500); + } + + const data = await result.json(); + console.log('TogetherAI response:', data); + + if (data.status !== 'finished') { + console.log('TogetherAI job failed.'); + return response.sendStatus(500); + } + + return response.send(data); + } catch (error) { + console.log(error); + return response.sendStatus(500); + } +}); + router.use('/comfy', comfy); +router.use('/together', together); module.exports = { router }; diff --git a/src/util.js b/src/util.js index be8d5135f..45c96fef1 100644 --- a/src/util.js +++ b/src/util.js @@ -105,6 +105,21 @@ function delay(ms) { return new Promise(resolve => setTimeout(resolve, ms)); } +/** + * Generates a random hex string of the given length. + * @param {number} length String length + * @returns {string} Random hex string + * @example getHexString(8) // 'a1b2c3d4' + */ +function getHexString(length) { + const chars = '0123456789abcdef'; + let result = ''; + for (let i = 0; i < length; i++) { + result += chars[Math.floor(Math.random() * chars.length)]; + } + return result; +} + /** * Extracts a file with given extension from an ArrayBuffer containing a ZIP archive. * @param {ArrayBuffer} archiveBuffer Buffer containing a ZIP archive @@ -404,4 +419,5 @@ module.exports = { removeOldBackups, getImages, forwardFetchResponse, + getHexString, }; From c2ad90eb2a15e9765a2fd133cda307b26c5de0c9 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Carsten=20Kragelund=20J=C3=B8rgensen?= Date: Mon, 18 Dec 2023 13:29:27 +0100 Subject: [PATCH 33/82] fix: verify QR exists when deleting through /qr-delete --- public/scripts/extensions/quick-reply/index.js | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/public/scripts/extensions/quick-reply/index.js b/public/scripts/extensions/quick-reply/index.js index 947cc3727..6b184f192 100644 --- a/public/scripts/extensions/quick-reply/index.js +++ b/public/scripts/extensions/quick-reply/index.js @@ -798,6 +798,10 @@ async function qrDeleteCallback(args, label) { } const idx = preset.quickReplySlots.findIndex(x => x.label == label); + if (idx === -1) { + toastr.warning('Confirm you are using proper case sensitivity!', `QR with label '${label}' not found`); + return ''; + }; preset.quickReplySlots.splice(idx, 1); preset.numberOfSlots--; await fetch('/savequickreply', { From cc27bcb076bb2a90cc9d71da17b25e412048eb64 Mon Sep 17 00:00:00 2001 From: based Date: Mon, 18 Dec 2023 03:17:47 +1000 Subject: [PATCH 34/82] UNRELATED: this never happened. --- src/endpoints/backends/chat-completions.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/endpoints/backends/chat-completions.js b/src/endpoints/backends/chat-completions.js index b147f317c..2d68a1239 100644 --- a/src/endpoints/backends/chat-completions.js +++ b/src/endpoints/backends/chat-completions.js @@ -410,12 +410,12 @@ async function sendMistralAIRequest(request, response) { const messages = Array.isArray(request.body.messages) ? request.body.messages : []; const lastMsg = messages[messages.length - 1]; if (messages.length > 0 && lastMsg && (lastMsg.role === 'system' || lastMsg.role === 'assistant')) { - lastMsg.role = 'user'; if (lastMsg.role === 'assistant') { lastMsg.content = lastMsg.name + ': ' + lastMsg.content; } else if (lastMsg.role === 'system') { lastMsg.content = '[INST] ' + lastMsg.content + ' [/INST]'; } + lastMsg.role = 'user'; } //system prompts can be stacked at the start, but any futher sys prompts after the first user/assistant message will break the model From 08ea2095f84b4c675e5b25564d8d0bc704c7ebc4 Mon Sep 17 00:00:00 2001 From: Cohee <18619528+Cohee1207@users.noreply.github.com> Date: Mon, 18 Dec 2023 17:32:10 +0200 Subject: [PATCH 35/82] Refactor Novel logit bias --- public/index.html | 12 ++-- public/scripts/logit-bias.js | 126 +++++++++++++++++++++++++++++++++ public/scripts/nai-settings.js | 111 +++-------------------------- public/style.css | 12 ++-- 4 files changed, 148 insertions(+), 113 deletions(-) create mode 100644 public/scripts/logit-bias.js diff --git a/public/index.html b/public/index.html index 0bfa60876..ac25897de 100644 --- a/public/index.html +++ b/public/index.html @@ -978,7 +978,7 @@ Helps to ban or reinforce the usage of certain tokens.
    -
    +
    @@ -4272,11 +4272,11 @@
    -
    -
    - - - +
    +
    + + +
    +
    +
    + Logit Bias +
    + + Add +
    +
    +
    + Helps to ban or reinforce the usage of certain tokens. +
    +
    +
    +
    +

    CFG diff --git a/public/scripts/textgen-settings.js b/public/scripts/textgen-settings.js index 756cdd4a2..a912df259 100644 --- a/public/scripts/textgen-settings.js +++ b/public/scripts/textgen-settings.js @@ -9,6 +9,7 @@ import { setOnlineStatus, substituteParams, } from '../script.js'; +import { BIAS_CACHE, createNewLogitBiasEntry, displayLogitBias, getLogitBiasListResult } from './logit-bias.js'; import { power_user, @@ -35,6 +36,7 @@ export const textgen_types = { }; const { MANCER, APHRODITE, TOGETHERAI } = textgen_types; +const BIAS_KEY = '#textgenerationwebui_api-settings'; // Maybe let it be configurable in the future? // (7 days later) The future has come. @@ -94,6 +96,7 @@ const settings = { togetherai_model: 'Gryphe/MythoMax-L2-13b', legacy_api: false, sampler_order: KOBOLDCPP_ORDER, + logit_bias: [], n: 1, }; @@ -147,6 +150,7 @@ const setting_names = [ //'prompt_log_probs_aphrodite' 'sampler_order', 'n', + 'logit_bias', ]; async function selectPreset(name) { @@ -162,6 +166,7 @@ async function selectPreset(name) { setSettingByName(name, value, true); } setGenerationParamsFromPreset(preset); + displayLogitBias(preset.logit_bias, BIAS_KEY); saveSettingsDebounced(); } @@ -243,6 +248,42 @@ function getCustomTokenBans() { return result.filter(onlyUnique).map(x => String(x)).join(','); } +/** + * Calculates logit bias object from the logit bias list. + * @returns {object} Logit bias object + */ +function calculateLogitBias() { + if (!Array.isArray(settings.logit_bias) || settings.logit_bias.length === 0) { + return {}; + } + + const tokenizer = SENTENCEPIECE_TOKENIZERS.includes(power_user.tokenizer) ? power_user.tokenizer : tokenizers.LLAMA; + const result = {}; + + /** + * Adds bias to the logit bias object. + * @param {number} bias + * @param {number[]} sequence + * @returns {object} Accumulated logit bias object + */ + function addBias(bias, sequence) { + if (sequence.length === 0) { + return; + } + + for (const logit of sequence) { + const key = String(logit); + result[key] = bias; + } + + return result; + } + + getLogitBiasListResult(settings.logit_bias, tokenizer, addBias); + + return result; +} + function loadTextGenSettings(data, loadedSettings) { textgenerationwebui_presets = convertPresets(data.textgenerationwebui_presets); textgenerationwebui_preset_names = data.textgenerationwebui_preset_names ?? []; @@ -270,6 +311,7 @@ function loadTextGenSettings(data, loadedSettings) { $('#textgen_type').val(settings.type); showTypeSpecificControls(settings.type); + displayLogitBias(settings.logit_bias, BIAS_KEY); //this is needed because showTypeSpecificControls() does not handle NOT declarations if (settings.type === textgen_types.APHRODITE) { $('[data-forAphro=False]').each(function () { @@ -415,6 +457,8 @@ jQuery(function () { saveSettingsDebounced(); }); } + + $('#textgen_logit_bias_new_entry').on('click', () => createNewLogitBiasEntry(settings.logit_bias, BIAS_KEY)); }); function showTypeSpecificControls(type) { @@ -440,6 +484,11 @@ function setSettingByName(setting, value, trigger) { return; } + if ('logit_bias' === setting) { + settings.logit_bias = Array.isArray(value) ? value : []; + return; + } + const isCheckbox = $(`#${setting}_textgenerationwebui`).attr('type') == 'checkbox'; const isText = $(`#${setting}_textgenerationwebui`).attr('type') == 'text' || $(`#${setting}_textgenerationwebui`).is('textarea'); if (isCheckbox) { @@ -642,6 +691,12 @@ export function getTextGenGenerationData(finalPrompt, maxTokens, isImpersonate, APIflags = Object.assign(APIflags, aphroditeExclusionFlags); } + if (Array.isArray(settings.logit_bias) && settings.logit_bias.length) { + const logitBias = BIAS_CACHE.get(BIAS_KEY) || calculateLogitBias(); + BIAS_CACHE.set(BIAS_KEY, logitBias); + APIflags.logit_bias = logitBias; + } + return APIflags; } diff --git a/public/style.css b/public/style.css index 2b7e5ede9..cf2f1f75c 100644 --- a/public/style.css +++ b/public/style.css @@ -3480,7 +3480,7 @@ a { width: 100%; height: 100%; opacity: 0.8; - min-height: 2.5rem; + min-height: 2.5em; } .openai_logit_bias_preset_form { From edd737e8bd6f5bec5cc8094193be9f83b25f6abc Mon Sep 17 00:00:00 2001 From: Cohee <18619528+Cohee1207@users.noreply.github.com> Date: Mon, 18 Dec 2023 22:38:28 +0200 Subject: [PATCH 37/82] #371 Add llama.cpp inference server support --- public/img/llamacpp.svg | 39 ++++++++++++ public/index.html | 15 ++++- public/script.js | 27 +++++---- public/scripts/power-user.js | 2 + public/scripts/textgen-settings.js | 70 ++++++++++++++++------ public/scripts/tokenizers.js | 9 +-- src/constants.js | 1 + src/endpoints/backends/text-completions.js | 4 ++ src/endpoints/tokenizers.js | 6 +- 9 files changed, 136 insertions(+), 37 deletions(-) create mode 100644 public/img/llamacpp.svg diff --git a/public/img/llamacpp.svg b/public/img/llamacpp.svg new file mode 100644 index 000000000..d3237d247 --- /dev/null +++ b/public/img/llamacpp.svg @@ -0,0 +1,39 @@ + + + + + + diff --git a/public/index.html b/public/index.html index 0a9db31a9..115efc007 100644 --- a/public/index.html +++ b/public/index.html @@ -1738,6 +1738,7 @@ +

    @@ -1817,6 +1818,18 @@
    +
    + +
    +

    API URL

    + Example: http://127.0.0.1:8080 + +
    +
    -
    @@ -1779,7 +1780,11 @@

    Mancer Model

    - +
    @@ -1830,6 +1835,32 @@
    +
    + +
    +

    API URL

    + Example: http://127.0.0.1:11434 + +
    +
    +

    + Ollama Model

    + + + +
    +
    diff --git a/public/script.js b/public/script.js index 9c24d107a..dbbb50c6d 100644 --- a/public/script.js +++ b/public/script.js @@ -15,14 +15,13 @@ import { loadTextGenSettings, generateTextGenWithStreaming, getTextGenGenerationData, - formatTextGenURL, - getTextGenUrlSourceId, textgen_types, textgenerationwebui_banned_in_macros, getTextGenServer, + validateTextGenUrl, } from './scripts/textgen-settings.js'; -const { MANCER, TOGETHERAI, OOBA, APHRODITE } = textgen_types; +const { MANCER, TOGETHERAI, OOBA, APHRODITE, OLLAMA } = textgen_types; import { world_info, @@ -189,7 +188,7 @@ import { createPersona, initPersonas, selectCurrentPersona, setPersonaDescriptio import { getBackgrounds, initBackgrounds, loadBackgroundSettings, background_settings } from './scripts/backgrounds.js'; import { hideLoader, showLoader } from './scripts/loader.js'; import { BulkEditOverlay, CharacterContextMenu } from './scripts/BulkEditOverlay.js'; -import { loadMancerModels, loadTogetherAIModels } from './scripts/textgen-models.js'; +import { loadMancerModels, loadOllamaModels, loadTogetherAIModels } from './scripts/textgen-models.js'; import { appendFileContent, hasPendingFileAttachment, populateFileAttachment } from './scripts/chats.js'; import { replaceVariableMacros } from './scripts/variables.js'; import { initPresetManager } from './scripts/preset-manager.js'; @@ -250,7 +249,6 @@ export { name1, name2, is_send_press, - api_server_textgenerationwebui, max_context, chat_metadata, streamingProcessor, @@ -662,7 +660,6 @@ let chat_file_for_del = ''; let online_status = 'no_connection'; let api_server = ''; -let api_server_textgenerationwebui = ''; let is_send_press = false; //Send generation @@ -888,7 +885,8 @@ async function getStatusKobold() { if (!endpoint) { console.warn('No endpoint for status check'); - return; + online_status = 'no_connection'; + return resultCheckStatus(); } try { @@ -931,11 +929,12 @@ async function getStatusKobold() { async function getStatusTextgen() { const url = '/api/backends/text-completions/status'; - let endpoint = getTextGenServer(); + const endpoint = getTextGenServer(); if (!endpoint) { console.warn('No endpoint for status check'); - return; + online_status = 'no_connection'; + return resultCheckStatus(); } try { @@ -953,11 +952,14 @@ async function getStatusTextgen() { const data = await response.json(); if (textgen_settings.type === MANCER) { - online_status = textgen_settings.mancer_model; loadMancerModels(data?.data); + online_status = textgen_settings.mancer_model; } else if (textgen_settings.type === TOGETHERAI) { - online_status = textgen_settings.togetherai_model; loadTogetherAIModels(data?.data); + online_status = textgen_settings.togetherai_model; + } else if (textgen_settings.type === OLLAMA) { + loadOllamaModels(data?.data); + online_status = textgen_settings.ollama_model || 'Connected'; } else { online_status = data?.result; } @@ -4468,7 +4470,7 @@ function extractMessageFromData(data) { case 'koboldhorde': return data.text; case 'textgenerationwebui': - return data.choices?.[0]?.text ?? data.content; + return data.choices?.[0]?.text ?? data.content ?? data.response; case 'novel': return data.output; case 'openai': @@ -5735,13 +5737,6 @@ async function getSettings() { setWorldInfoSettings(settings.world_info_settings ?? settings, data); - api_server_textgenerationwebui = settings.api_server_textgenerationwebui; - $('#textgenerationwebui_api_url_text').val(api_server_textgenerationwebui); - $('#aphrodite_api_url_text').val(api_server_textgenerationwebui); - $('#tabby_api_url_text').val(api_server_textgenerationwebui); - $('#llamacpp_api_url_text').val(api_server_textgenerationwebui); - $('#koboldcpp_api_url_text').val(api_server_textgenerationwebui); - selected_button = settings.selected_button; if (data.enable_extensions) { @@ -5781,7 +5776,6 @@ async function saveSettings(type) { active_character: active_character, active_group: active_group, api_server: api_server, - api_server_textgenerationwebui: api_server_textgenerationwebui, preset_settings: preset_settings, user_avatar: user_avatar, amount_gen: amount_gen, @@ -7478,100 +7472,105 @@ const swipe_right = () => { } }; +const CONNECT_API_MAP = { + 'kobold': { + button: '#api_button', + }, + 'horde': { + selected: 'koboldhorde', + }, + 'novel': { + button: '#api_button_novel', + }, + 'ooba': { + selected: 'textgenerationwebui', + button: '#api_button_textgenerationwebui', + type: textgen_types.OOBA, + }, + 'tabby': { + selected: 'textgenerationwebui', + button: '#api_button_textgenerationwebui', + type: textgen_types.TABBY, + }, + 'llamacpp': { + selected: 'textgenerationwebui', + button: '#api_button_textgenerationwebui', + type: textgen_types.LLAMACPP, + }, + 'ollama': { + selected: 'textgenerationwebui', + button: '#api_button_textgenerationwebui', + type: textgen_types.OLLAMA, + }, + 'mancer': { + selected: 'textgenerationwebui', + button: '#api_button_textgenerationwebui', + type: textgen_types.MANCER, + }, + 'aphrodite': { + selected: 'textgenerationwebui', + button: '#api_button_textgenerationwebui', + type: textgen_types.APHRODITE, + }, + 'kcpp': { + selected: 'textgenerationwebui', + button: '#api_button_textgenerationwebui', + type: textgen_types.KOBOLDCPP, + }, + 'togetherai': { + selected: 'textgenerationwebui', + button: '#api_button_textgenerationwebui', + type: textgen_types.TOGETHERAI, + }, + 'oai': { + selected: 'openai', + source: 'openai', + button: '#api_button_openai', + }, + 'claude': { + selected: 'openai', + source: 'claude', + button: '#api_button_openai', + }, + 'windowai': { + selected: 'openai', + source: 'windowai', + button: '#api_button_openai', + }, + 'openrouter': { + selected: 'openai', + source: 'openrouter', + button: '#api_button_openai', + }, + 'scale': { + selected: 'openai', + source: 'scale', + button: '#api_button_openai', + }, + 'ai21': { + selected: 'openai', + source: 'ai21', + button: '#api_button_openai', + }, + 'makersuite': { + selected: 'openai', + source: 'makersuite', + button: '#api_button_openai', + }, + 'mistralai': { + selected: 'openai', + source: 'mistralai', + button: '#api_button_openai', + }, +}; + /** * @param {string} text API name */ async function connectAPISlash(_, text) { if (!text) return; - const apiMap = { - 'kobold': { - button: '#api_button', - }, - 'horde': { - selected: 'koboldhorde', - }, - 'novel': { - button: '#api_button_novel', - }, - 'ooba': { - selected: 'textgenerationwebui', - button: '#api_button_textgenerationwebui', - type: textgen_types.OOBA, - }, - 'tabby': { - selected: 'textgenerationwebui', - button: '#api_button_textgenerationwebui', - type: textgen_types.TABBY, - }, - 'llamacpp': { - selected: 'textgenerationwebui', - button: '#api_button_textgenerationwebui', - type: textgen_types.LLAMACPP, - }, - 'mancer': { - selected: 'textgenerationwebui', - button: '#api_button_textgenerationwebui', - type: textgen_types.MANCER, - }, - 'aphrodite': { - selected: 'textgenerationwebui', - button: '#api_button_textgenerationwebui', - type: textgen_types.APHRODITE, - }, - 'kcpp': { - selected: 'textgenerationwebui', - button: '#api_button_textgenerationwebui', - type: textgen_types.KOBOLDCPP, - }, - 'togetherai': { - selected: 'textgenerationwebui', - button: '#api_button_textgenerationwebui', - type: textgen_types.TOGETHERAI, - }, - 'oai': { - selected: 'openai', - source: 'openai', - button: '#api_button_openai', - }, - 'claude': { - selected: 'openai', - source: 'claude', - button: '#api_button_openai', - }, - 'windowai': { - selected: 'openai', - source: 'windowai', - button: '#api_button_openai', - }, - 'openrouter': { - selected: 'openai', - source: 'openrouter', - button: '#api_button_openai', - }, - 'scale': { - selected: 'openai', - source: 'scale', - button: '#api_button_openai', - }, - 'ai21': { - selected: 'openai', - source: 'ai21', - button: '#api_button_openai', - }, - 'makersuite': { - selected: 'openai', - source: 'makersuite', - button: '#api_button_openai', - }, - 'mistralai': { - selected: 'openai', - source: 'mistralai', - button: '#api_button_openai', - }, - }; - - const apiConfig = apiMap[text.toLowerCase()]; + const apiConfig = CONNECT_API_MAP[text.toLowerCase()]; if (!apiConfig) { toastr.error(`Error: ${text} is not a valid API`); return; @@ -7855,7 +7854,7 @@ jQuery(async function () { } registerSlashCommand('dupe', DupeChar, [], '– duplicates the currently selected character', true, true); - registerSlashCommand('api', connectAPISlash, [], '(kobold, horde, novel, ooba, tabby, mancer, aphrodite, kcpp, oai, claude, windowai, openrouter, scale, ai21, makersuite, mistralai, togetherai, llamacpp) – connect to an API', true, true); + registerSlashCommand('api', connectAPISlash, [], `(${Object.keys(CONNECT_API_MAP)}) – connect to an API`, true, true); registerSlashCommand('impersonate', doImpersonate, ['imp'], '– calls an impersonation response', true, true); registerSlashCommand('delchat', doDeleteChat, [], '– deletes the current chat', true, true); registerSlashCommand('closechat', doCloseChat, [], '– closes the current chat', true, true); @@ -8414,19 +8413,7 @@ jQuery(async function () { await writeSecret(SECRET_KEYS.TOGETHERAI, togetherKey); } - const urlSourceId = getTextGenUrlSourceId(); - - if (urlSourceId && $(urlSourceId).val() !== '') { - let value = formatTextGenURL(String($(urlSourceId).val()).trim()); - if (!value) { - callPopup('Please enter a valid URL.', 'text'); - return; - } - - $(urlSourceId).val(value); - api_server_textgenerationwebui = value; - } - + validateTextGenUrl(); startStatusLoading(); main_api = 'textgenerationwebui'; saveSettingsDebounced(); diff --git a/public/scripts/RossAscends-mods.js b/public/scripts/RossAscends-mods.js index 4dd724060..a5abd41fd 100644 --- a/public/scripts/RossAscends-mods.js +++ b/public/scripts/RossAscends-mods.js @@ -4,7 +4,6 @@ import { online_status, main_api, api_server, - api_server_textgenerationwebui, is_send_press, max_context, saveSettingsDebounced, @@ -35,7 +34,7 @@ import { import { debounce, delay, getStringHash, isValidUrl } from './utils.js'; import { chat_completion_sources, oai_settings } from './openai.js'; import { getTokenCount } from './tokenizers.js'; -import { textgen_types, textgenerationwebui_settings as textgen_settings } from './textgen-settings.js'; +import { textgen_types, textgenerationwebui_settings as textgen_settings, getTextGenServer } from './textgen-settings.js'; import Bowser from '../lib/bowser.min.js'; @@ -387,7 +386,7 @@ function RA_autoconnect(PrevApi) { ) { $('#api_button_textgenerationwebui').trigger('click'); } - else if (api_server_textgenerationwebui && isValidUrl(api_server_textgenerationwebui)) { + else if (isValidUrl(getTextGenServer())) { $('#api_button_textgenerationwebui').trigger('click'); } break; diff --git a/public/scripts/textgen-models.js b/public/scripts/textgen-models.js index 03d908a7a..c828f6404 100644 --- a/public/scripts/textgen-models.js +++ b/public/scripts/textgen-models.js @@ -1,10 +1,30 @@ -import { setGenerationParamsFromPreset } from '../script.js'; +import { callPopup, getRequestHeaders, setGenerationParamsFromPreset } from '../script.js'; import { isMobile } from './RossAscends-mods.js'; -import { textgenerationwebui_settings as textgen_settings } from './textgen-settings.js'; +import { textgenerationwebui_settings as textgen_settings, textgen_types } from './textgen-settings.js'; let mancerModels = []; let togetherModels = []; +export async function loadOllamaModels(data) { + if (!Array.isArray(data)) { + console.error('Invalid Ollama models data', data); + return; + } + + if (!data.find(x => x.id === textgen_settings.ollama_model)) { + textgen_settings.ollama_model = data[0]?.id || ''; + } + + $('#ollama_model').empty(); + for (const model of data) { + const option = document.createElement('option'); + option.value = model.id; + option.text = model.name; + option.selected = model.id === textgen_settings.ollama_model; + $('#ollama_model').append(option); + } +} + export async function loadTogetherAIModels(data) { if (!Array.isArray(data)) { console.error('Invalid Together AI models data', data); @@ -13,6 +33,10 @@ export async function loadTogetherAIModels(data) { togetherModels = data; + if (!data.find(x => x.name === textgen_settings.togetherai_model)) { + textgen_settings.togetherai_model = data[0]?.name || ''; + } + $('#model_togetherai_select').empty(); for (const model of data) { // Hey buddy, I think you've got the wrong door. @@ -36,6 +60,10 @@ export async function loadMancerModels(data) { mancerModels = data; + if (!data.find(x => x.id === textgen_settings.mancer_model)) { + textgen_settings.mancer_model = data[0]?.id || ''; + } + $('#mancer_model').empty(); for (const model of data) { const option = document.createElement('option'); @@ -55,7 +83,6 @@ function onMancerModelSelect() { setGenerationParamsFromPreset({ max_length: limits.context, genamt: limits.completion }); } - function onTogetherModelSelect() { const modelName = String($('#model_togetherai_select').val()); textgen_settings.togetherai_model = modelName; @@ -64,6 +91,12 @@ function onTogetherModelSelect() { setGenerationParamsFromPreset({ max_length: model.context_length }); } +function onOllamaModelSelect() { + const modelId = String($('#ollama_model').val()); + textgen_settings.ollama_model = modelId; + $('#api_button_textgenerationwebui').trigger('click'); +} + function getMancerModelTemplate(option) { const model = mancerModels.find(x => x.id === option?.element?.value); @@ -97,9 +130,52 @@ function getTogetherModelTemplate(option) { `)); } +async function downloadOllamaModel() { + try { + const serverUrl = textgen_settings.server_urls[textgen_types.OLLAMA]; + + if (!serverUrl) { + toastr.info('Please connect to an Ollama server first.'); + return; + } + + const html = `Enter a model tag, for example llama2:latest.
    + See
    Library for available models.`; + const name = await callPopup(html, 'input', '', { okButton: 'Download' }); + + if (!name) { + return; + } + + toastr.info('Download may take a while, please wait...', 'Working on it'); + + const response = await fetch('/api/backends/text-completions/ollama/download', { + method: 'POST', + headers: getRequestHeaders(), + body: JSON.stringify({ + name: name, + api_server: serverUrl, + }), + }); + + if (!response.ok) { + throw new Error(response.statusText); + } + + // Force refresh the model list + toastr.success('Download complete. Please select the model from the dropdown.'); + $('#api_button_textgenerationwebui').trigger('click'); + } catch (err) { + console.error(err); + toastr.error('Failed to download Ollama model. Please try again.'); + } +} + jQuery(function () { $('#mancer_model').on('change', onMancerModelSelect); $('#model_togetherai_select').on('change', onTogetherModelSelect); + $('#ollama_model').on('change', onOllamaModelSelect); + $('#ollama_download_model').on('click', downloadOllamaModel); if (!isMobile()) { $('#mancer_model').select2({ @@ -116,5 +192,11 @@ jQuery(function () { width: '100%', templateResult: getTogetherModelTemplate, }); + $('#ollama_model').select2({ + placeholder: 'Select a model', + searchInputPlaceholder: 'Search models...', + searchInputCssClass: 'text_pole', + width: '100%', + }); } }); diff --git a/public/scripts/textgen-settings.js b/public/scripts/textgen-settings.js index 2a0c50c31..475aec70f 100644 --- a/public/scripts/textgen-settings.js +++ b/public/scripts/textgen-settings.js @@ -1,5 +1,4 @@ import { - api_server_textgenerationwebui, getRequestHeaders, getStoppingStrings, max_context, @@ -34,9 +33,10 @@ export const textgen_types = { KOBOLDCPP: 'koboldcpp', TOGETHERAI: 'togetherai', LLAMACPP: 'llamacpp', + OLLAMA: 'ollama', }; -const { MANCER, APHRODITE, TOGETHERAI, OOBA } = textgen_types; +const { MANCER, APHRODITE, TOGETHERAI, OOBA, OLLAMA, LLAMACPP } = textgen_types; const BIAS_KEY = '#textgenerationwebui_api-settings'; // Maybe let it be configurable in the future? @@ -46,6 +46,15 @@ const MANCER_SERVER_DEFAULT = 'https://neuro.mancer.tech'; let MANCER_SERVER = localStorage.getItem(MANCER_SERVER_KEY) ?? MANCER_SERVER_DEFAULT; let TOGETHERAI_SERVER = 'https://api.together.xyz'; +const SERVER_INPUTS = { + [textgen_types.OOBA]: '#textgenerationwebui_api_url_text', + [textgen_types.APHRODITE]: '#aphrodite_api_url_text', + [textgen_types.TABBY]: '#tabby_api_url_text', + [textgen_types.KOBOLDCPP]: '#koboldcpp_api_url_text', + [textgen_types.LLAMACPP]: '#llamacpp_api_url_text', + [textgen_types.OLLAMA]: '#ollama_api_url_text', +}; + const KOBOLDCPP_ORDER = [6, 0, 1, 3, 4, 2, 5]; const settings = { temp: 0.7, @@ -95,10 +104,12 @@ const settings = { type: textgen_types.OOBA, mancer_model: 'mytholite', togetherai_model: 'Gryphe/MythoMax-L2-13b', + ollama_model: '', legacy_api: false, sampler_order: KOBOLDCPP_ORDER, logit_bias: [], n: 1, + server_urls: {}, }; export let textgenerationwebui_banned_in_macros = []; @@ -154,6 +165,37 @@ const setting_names = [ 'logit_bias', ]; +export function validateTextGenUrl() { + const selector = SERVER_INPUTS[settings.type]; + + if (!selector) { + return; + } + + const control = $(selector); + const url = String(control.val()).trim(); + const formattedUrl = formatTextGenURL(url); + + if (!formattedUrl) { + toastr.error('Enter a valid API URL', 'Text Completion API'); + return; + } + + control.val(formattedUrl); +} + +export function getTextGenServer() { + if (settings.type === MANCER) { + return MANCER_SERVER; + } + + if (settings.type === TOGETHERAI) { + return TOGETHERAI_SERVER; + } + + return settings.server_urls[settings.type] ?? ''; +} + async function selectPreset(name) { const preset = textgenerationwebui_presets[textgenerationwebui_preset_names.indexOf(name)]; @@ -291,6 +333,21 @@ function loadTextGenSettings(data, loadedSettings) { textgenerationwebui_preset_names = data.textgenerationwebui_preset_names ?? []; Object.assign(settings, loadedSettings.textgenerationwebui_settings ?? {}); + if (loadedSettings.api_server_textgenerationwebui) { + for (const type of Object.keys(SERVER_INPUTS)) { + settings.server_urls[type] = loadedSettings.api_server_textgenerationwebui; + } + delete loadedSettings.api_server_textgenerationwebui; + } + + for (const [type, selector] of Object.entries(SERVER_INPUTS)) { + const control = $(selector); + control.val(settings.server_urls[type] ?? '').on('input', function () { + settings.server_urls[type] = String($(this).val()); + saveSettingsDebounced(); + }); + } + if (loadedSettings.api_use_mancer_webui) { settings.type = MANCER; } @@ -336,21 +393,6 @@ function loadTextGenSettings(data, loadedSettings) { }); } -export function getTextGenUrlSourceId() { - switch (settings.type) { - case textgen_types.OOBA: - return '#textgenerationwebui_api_url_text'; - case textgen_types.APHRODITE: - return '#aphrodite_api_url_text'; - case textgen_types.TABBY: - return '#tabby_api_url_text'; - case textgen_types.KOBOLDCPP: - return '#koboldcpp_api_url_text'; - case textgen_types.LLAMACPP: - return '#llamacpp_api_url_text'; - } -} - /** * Sorts the sampler items by the given order. * @param {any[]} orderArray Sampler order array. @@ -423,7 +465,10 @@ jQuery(function () { BIAS_CACHE.delete(BIAS_KEY); $('#main_api').trigger('change'); - $('#api_button_textgenerationwebui').trigger('click'); + + if (!SERVER_INPUTS[type] || settings.server_urls[type]) { + $('#api_button_textgenerationwebui').trigger('click'); + } saveSettingsDebounced(); }); @@ -620,21 +665,18 @@ function getModel() { return online_status; } + if (settings.type === OLLAMA) { + if (!settings.ollama_model) { + toastr.error('No Ollama model selected.', 'Text Completion API'); + throw new Error('No Ollama model selected'); + } + + return settings.ollama_model; + } + return undefined; } -export function getTextGenServer() { - if (settings.type === MANCER) { - return MANCER_SERVER; - } - - if (settings.type === TOGETHERAI) { - return TOGETHERAI_SERVER; - } - - return api_server_textgenerationwebui; -} - export function getTextGenGenerationData(finalPrompt, maxTokens, isImpersonate, isContinue, cfgValues, type) { const canMultiSwipe = !isContinue && !isImpersonate && type !== 'quiet'; let params = { @@ -687,6 +729,13 @@ export function getTextGenGenerationData(finalPrompt, maxTokens, isImpersonate, 'guidance_scale': cfgValues?.guidanceScale?.value ?? settings.guidance_scale ?? 1, 'negative_prompt': cfgValues?.negativePrompt ?? substituteParams(settings.negative_prompt) ?? '', 'grammar_string': settings.grammar_string, + // llama.cpp aliases. In case someone wants to use LM Studio as Text Completion API + 'repeat_penalty': settings.rep_pen, + 'tfs_z': settings.tfs, + 'repeat_last_n': settings.rep_pen_range, + 'n_predict': settings.maxTokens, + 'mirostat': settings.mirostat_mode, + 'ignore_eos': settings.ban_eos_token, }; const aphroditeParams = { 'n': canMultiSwipe ? settings.n : 1, @@ -697,7 +746,7 @@ export function getTextGenGenerationData(finalPrompt, maxTokens, isImpersonate, //'logprobs': settings.log_probs_aphrodite, //'prompt_logprobs': settings.prompt_log_probs_aphrodite, }; - if (settings.type === textgen_types.APHRODITE) { + if (settings.type === APHRODITE) { params = Object.assign(params, aphroditeParams); } else { params = Object.assign(params, nonAphroditeParams); @@ -709,7 +758,7 @@ export function getTextGenGenerationData(finalPrompt, maxTokens, isImpersonate, params.logit_bias = logitBias; } - if (settings.type === textgen_types.LLAMACPP) { + if (settings.type === LLAMACPP || settings.type === OLLAMA) { // Convert bias and token bans to array of arrays const logitBiasArray = (params.logit_bias && typeof params.logit_bias === 'object' && Object.keys(params.logit_bias).length > 0) ? Object.entries(params.logit_bias).map(([key, value]) => [Number(key), value]) @@ -717,14 +766,9 @@ export function getTextGenGenerationData(finalPrompt, maxTokens, isImpersonate, const tokenBans = toIntArray(getCustomTokenBans()); logitBiasArray.push(...tokenBans.map(x => [Number(x), false])); const llamaCppParams = { - 'repeat_penalty': settings.rep_pen, - 'tfs_z': settings.tfs, - 'repeat_last_n': settings.rep_pen_range, - 'n_predict': settings.maxTokens, - 'mirostat': settings.mirostat_mode, - 'ignore_eos': settings.ban_eos_token, - 'grammar': settings.grammar_string, 'logit_bias': logitBiasArray, + // Conflicts with ooba's grammar_string + 'grammar': settings.grammar_string, }; params = Object.assign(params, llamaCppParams); } diff --git a/public/scripts/tokenizers.js b/public/scripts/tokenizers.js index acc7021fb..3c7bc3f17 100644 --- a/public/scripts/tokenizers.js +++ b/public/scripts/tokenizers.js @@ -1,10 +1,10 @@ -import { characters, main_api, api_server, api_server_textgenerationwebui, nai_settings, online_status, this_chid } from '../script.js'; +import { characters, main_api, api_server, nai_settings, online_status, this_chid } from '../script.js'; import { power_user, registerDebugFunction } from './power-user.js'; import { chat_completion_sources, model_list, oai_settings } from './openai.js'; import { groups, selected_group } from './group-chats.js'; import { getStringHash } from './utils.js'; import { kai_flags } from './kai-settings.js'; -import { textgen_types, textgenerationwebui_settings as textgen_settings } from './textgen-settings.js'; +import { textgen_types, textgenerationwebui_settings as textgen_settings, getTextGenServer } from './textgen-settings.js'; const { OOBA, TABBY, KOBOLDCPP, APHRODITE, LLAMACPP } = textgen_types; @@ -537,7 +537,7 @@ function getTextgenAPITokenizationParams(str) { return { text: str, api_type: textgen_settings.type, - url: api_server_textgenerationwebui, + url: getTextGenServer(), legacy_api: textgen_settings.legacy_api && (textgen_settings.type === OOBA || textgen_settings.type === APHRODITE), }; } diff --git a/src/constants.js b/src/constants.js index bcf3f4005..03c6ccb82 100644 --- a/src/constants.js +++ b/src/constants.js @@ -173,6 +173,7 @@ const TEXTGEN_TYPES = { KOBOLDCPP: 'koboldcpp', TOGETHERAI: 'togetherai', LLAMACPP: 'llamacpp', + OLLAMA: 'ollama', }; // https://docs.together.ai/reference/completions @@ -187,6 +188,25 @@ const TOGETHERAI_KEYS = [ 'stream', ]; +// https://github.com/jmorganca/ollama/blob/main/docs/api.md#request-with-options +const OLLAMA_KEYS = [ + 'num_predict', + 'stop', + 'temperature', + 'repeat_penalty', + 'presence_penalty', + 'frequency_penalty', + 'top_k', + 'top_p', + 'tfs_z', + 'typical_p', + 'seed', + 'repeat_last_n', + 'mirostat', + 'mirostat_tau', + 'mirostat_eta', +]; + const AVATAR_WIDTH = 400; const AVATAR_HEIGHT = 600; @@ -201,4 +221,5 @@ module.exports = { AVATAR_WIDTH, AVATAR_HEIGHT, TOGETHERAI_KEYS, + OLLAMA_KEYS, }; diff --git a/src/endpoints/backends/text-completions.js b/src/endpoints/backends/text-completions.js index 7c2f2da95..a683d1a50 100644 --- a/src/endpoints/backends/text-completions.js +++ b/src/endpoints/backends/text-completions.js @@ -1,14 +1,61 @@ const express = require('express'); const fetch = require('node-fetch').default; const _ = require('lodash'); +const Readable = require('stream').Readable; const { jsonParser } = require('../../express-common'); -const { TEXTGEN_TYPES, TOGETHERAI_KEYS } = require('../../constants'); +const { TEXTGEN_TYPES, TOGETHERAI_KEYS, OLLAMA_KEYS } = require('../../constants'); const { forwardFetchResponse } = require('../../util'); const { setAdditionalHeaders } = require('../../additional-headers'); const router = express.Router(); +/** + * Special boy's steaming routine. Wrap this abomination into proper SSE stream. + * @param {import('node-fetch').Response} jsonStream JSON stream + * @param {import('express').Request} request Express request + * @param {import('express').Response} response Express response + * @returns {Promise} Nothing valuable + */ +async function parseOllamaStream(jsonStream, request, response) { + try { + let partialData = ''; + jsonStream.body.on('data', (data) => { + const chunk = data.toString(); + partialData += chunk; + while (true) { + let json; + try { + json = JSON.parse(partialData); + } catch (e) { + break; + } + const text = json.response || ''; + response.write(`data: {"choices": [{"text": "${text}"}]}\n\n`); + partialData = ''; + } + }); + + request.socket.on('close', function () { + if (jsonStream.body instanceof Readable) jsonStream.body.destroy(); + response.end(); + }); + + jsonStream.body.on('end', () => { + console.log('Streaming request finished'); + response.write('data: [DONE]\n\n'); + response.end(); + }); + } catch (error) { + console.log('Error forwarding streaming response:', error); + if (!response.headersSent) { + return response.status(500).send({ error: true }); + } else { + return response.end(); + } + } +} + //************** Ooba/OpenAI text completions API router.post('/status', jsonParser, async function (request, response) { if (!request.body) return response.sendStatus(400); @@ -51,6 +98,9 @@ router.post('/status', jsonParser, async function (request, response) { case TEXTGEN_TYPES.TOGETHERAI: url += '/api/models?&info'; break; + case TEXTGEN_TYPES.OLLAMA: + url += '/api/tags'; + break; } } @@ -73,6 +123,10 @@ router.post('/status', jsonParser, async function (request, response) { data = { data: data.map(x => ({ id: x.name, ...x })) }; } + if (request.body.api_type === TEXTGEN_TYPES.OLLAMA && Array.isArray(data.models)) { + data = { data: data.models.map(x => ({ id: x.name, ...x })) }; + } + if (!Array.isArray(data.data)) { console.log('Models response is not an array.'); return response.status(400); @@ -127,8 +181,8 @@ router.post('/status', jsonParser, async function (request, response) { } }); -router.post('/generate', jsonParser, async function (request, response_generate) { - if (!request.body) return response_generate.sendStatus(400); +router.post('/generate', jsonParser, async function (request, response) { + if (!request.body) return response.sendStatus(400); try { if (request.body.api_server.indexOf('localhost') !== -1) { @@ -164,6 +218,9 @@ router.post('/generate', jsonParser, async function (request, response_generate) case TEXTGEN_TYPES.LLAMACPP: url += '/completion'; break; + case TEXTGEN_TYPES.OLLAMA: + url += '/api/generate'; + break; } } @@ -186,16 +243,31 @@ router.post('/generate', jsonParser, async function (request, response_generate) args.body = JSON.stringify(request.body); } - if (request.body.stream) { + if (request.body.api_type === TEXTGEN_TYPES.OLLAMA) { + args.body = JSON.stringify({ + model: request.body.model, + prompt: request.body.prompt, + stream: request.body.stream ?? false, + raw: true, + options: _.pickBy(request.body, (_, key) => OLLAMA_KEYS.includes(key)), + }); + } + + if (request.body.api_type === TEXTGEN_TYPES.OLLAMA && request.body.stream) { + const stream = await fetch(url, args); + parseOllamaStream(stream, request, response); + } else if (request.body.stream) { const completionsStream = await fetch(url, args); // Pipe remote SSE stream to Express response - forwardFetchResponse(completionsStream, response_generate); + forwardFetchResponse(completionsStream, response); } else { const completionsReply = await fetch(url, args); if (completionsReply.ok) { - const data = await completionsReply.json(); + const text = await completionsReply.text(); + console.log('Endpoint response:', text); + const data = JSON.parse(text); console.log('Endpoint response:', data); // Wrap legacy response to OAI completions format @@ -204,28 +276,60 @@ router.post('/generate', jsonParser, async function (request, response_generate) data['choices'] = [{ text }]; } - return response_generate.send(data); + return response.send(data); } else { const text = await completionsReply.text(); const errorBody = { error: true, status: completionsReply.status, response: text }; - if (!response_generate.headersSent) { - return response_generate.send(errorBody); + if (!response.headersSent) { + return response.send(errorBody); } - return response_generate.end(); + return response.end(); } } } catch (error) { let value = { error: true, status: error?.status, response: error?.statusText }; console.log('Endpoint error:', error); - if (!response_generate.headersSent) { - return response_generate.send(value); + if (!response.headersSent) { + return response.send(value); } - return response_generate.end(); + return response.end(); } }); +const ollama = express.Router(); + +ollama.post('/download', jsonParser, async function (request, response) { + try { + if (!request.body.name || !request.body.api_server) return response.sendStatus(400); + + const name = request.body.name; + const url = String(request.body.api_server).replace(/\/$/, ''); + + const fetchResponse = await fetch(`${url}/api/pull`, { + method: 'POST', + body: JSON.stringify({ + name: name, + stream: false, + }), + headers: { 'Content-Type': 'application/json' }, + }); + + if (!fetchResponse.ok) { + console.log('Download error:', fetchResponse.status, fetchResponse.statusText); + return response.status(fetchResponse.status).send({ error: true }); + } + + return response.send({ ok: true }); + } catch (error) { + console.error(error); + return response.status(500); + } +}); + +router.use('/ollama', ollama); + module.exports = { router }; From 44318fef2218866da3dfbf1223cd49f23942cfe7 Mon Sep 17 00:00:00 2001 From: Cohee <18619528+Cohee1207@users.noreply.github.com> Date: Tue, 19 Dec 2023 16:49:21 +0200 Subject: [PATCH 41/82] Fix double logging of non-streamed replies --- src/endpoints/backends/text-completions.js | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/src/endpoints/backends/text-completions.js b/src/endpoints/backends/text-completions.js index a683d1a50..d531fbfdc 100644 --- a/src/endpoints/backends/text-completions.js +++ b/src/endpoints/backends/text-completions.js @@ -265,9 +265,7 @@ router.post('/generate', jsonParser, async function (request, response) { const completionsReply = await fetch(url, args); if (completionsReply.ok) { - const text = await completionsReply.text(); - console.log('Endpoint response:', text); - const data = JSON.parse(text); + const data = await completionsReply.json(); console.log('Endpoint response:', data); // Wrap legacy response to OAI completions format From 6859e4443e30be0500d00982ae6fe9785fbe44df Mon Sep 17 00:00:00 2001 From: Cohee <18619528+Cohee1207@users.noreply.github.com> Date: Tue, 19 Dec 2023 19:17:19 +0200 Subject: [PATCH 42/82] Fix ollama chunk wrapper --- src/endpoints/backends/text-completions.js | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/endpoints/backends/text-completions.js b/src/endpoints/backends/text-completions.js index d531fbfdc..0924e43ad 100644 --- a/src/endpoints/backends/text-completions.js +++ b/src/endpoints/backends/text-completions.js @@ -31,7 +31,8 @@ async function parseOllamaStream(jsonStream, request, response) { break; } const text = json.response || ''; - response.write(`data: {"choices": [{"text": "${text}"}]}\n\n`); + const chunk = { choices: [{ text }] }; + response.write(`data: ${JSON.stringify(chunk)}\n\n`); partialData = ''; } }); From 423c2b70dc2f9d789f6e54a56383d72f4652b647 Mon Sep 17 00:00:00 2001 From: Cohee <18619528+Cohee1207@users.noreply.github.com> Date: Tue, 19 Dec 2023 19:44:52 +0200 Subject: [PATCH 43/82] Camel case variable name --- src/endpoints/prompt-converters.js | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/src/endpoints/prompt-converters.js b/src/endpoints/prompt-converters.js index 3d864e1b8..12efd1cdc 100644 --- a/src/endpoints/prompt-converters.js +++ b/src/endpoints/prompt-converters.js @@ -3,13 +3,13 @@ * @param {object[]} messages Array of messages * @param {boolean} addAssistantPostfix Add Assistant postfix. * @param {string} addAssistantPrefill Add Assistant prefill after the assistant postfix. - * @param {boolean} withSyspromptSupport Indicates if the Claude model supports the system prompt format. + * @param {boolean} withSysPromptSupport Indicates if the Claude model supports the system prompt format. * @param {boolean} useSystemPrompt Indicates if the system prompt format should be used. * @param {string} addSysHumanMsg Add Human message between system prompt and assistant. * @returns {string} Prompt for Claude * @copyright Prompt Conversion script taken from RisuAI by kwaroran (GPLv3). */ -function convertClaudePrompt(messages, addAssistantPostfix, addAssistantPrefill, withSyspromptSupport, useSystemPrompt, addSysHumanMsg) { +function convertClaudePrompt(messages, addAssistantPostfix, addAssistantPrefill, withSysPromptSupport, useSystemPrompt, addSysHumanMsg) { //Prepare messages for claude. if (messages.length > 0) { @@ -31,7 +31,7 @@ function convertClaudePrompt(messages, addAssistantPostfix, addAssistantPrefill, }); // When 2.1+ and 'Use system prompt" checked, switches to the system prompt format by setting the first message's role to the 'system'. // Inserts the human's message before the first the assistant one, if there are no such message or prefix found. - if (withSyspromptSupport && useSystemPrompt) { + if (withSysPromptSupport && useSystemPrompt) { messages[0].role = 'system'; if (firstAssistantIndex > 0 && addSysHumanMsg && !hasUser) { messages.splice(firstAssistantIndex, 0, { From 3b22159f53ad3ab16f0e52e4f38bf022db01f806 Mon Sep 17 00:00:00 2001 From: Cohee <18619528+Cohee1207@users.noreply.github.com> Date: Tue, 19 Dec 2023 19:45:28 +0200 Subject: [PATCH 44/82] Fix spelling --- public/index.html | 27 ++++++++++++++++++--------- 1 file changed, 18 insertions(+), 9 deletions(-) diff --git a/public/index.html b/public/index.html index 6901b5c6b..86bb0623a 100644 --- a/public/index.html +++ b/public/index.html @@ -1537,29 +1537,38 @@
    - Exclude the assistant suffix from being added to the end of prompt (Requires jailbreak with 'Assistant:' in it). + + Exclude the assistant suffix from being added to the end of prompt (Requires jailbreak with 'Assistant:' in it). +
    -
    +
    Assistant Prefill
    -
    From a78875ca081274444c4d5318e7c16896c2a63850 Mon Sep 17 00:00:00 2001 From: Cohee <18619528+Cohee1207@users.noreply.github.com> Date: Tue, 19 Dec 2023 19:47:23 +0200 Subject: [PATCH 45/82] Use native color util --- src/endpoints/backends/chat-completions.js | 33 +++++++++++----------- 1 file changed, 16 insertions(+), 17 deletions(-) diff --git a/src/endpoints/backends/chat-completions.js b/src/endpoints/backends/chat-completions.js index 436d7d9c3..0a8208a9c 100644 --- a/src/endpoints/backends/chat-completions.js +++ b/src/endpoints/backends/chat-completions.js @@ -4,7 +4,7 @@ const { Readable } = require('stream'); const { jsonParser } = require('../../express-common'); const { CHAT_COMPLETION_SOURCES, GEMINI_SAFETY, BISON_SAFETY } = require('../../constants'); -const { forwardFetchResponse, getConfigValue, tryParse, uuidv4 } = require('../../util'); +const { forwardFetchResponse, getConfigValue, tryParse, uuidv4, color } = require('../../util'); const { convertClaudePrompt, convertGooglePrompt, convertTextCompletionPrompt } = require('../prompt-converters'); const { readSecret, SECRET_KEYS } = require('../secrets'); @@ -21,11 +21,10 @@ const API_CLAUDE = 'https://api.anthropic.com/v1'; async function sendClaudeRequest(request, response) { const apiUrl = new URL(request.body.reverse_proxy || API_CLAUDE).toString(); const apiKey = request.body.reverse_proxy ? request.body.proxy_password : readSecret(SECRET_KEYS.CLAUDE); - const chalk = require('chalk'); const divider = '-'.repeat(process.stdout.columns); if (!apiKey) { - console.log(chalk.red(`Claude API key is missing.\n${divider}`)); + console.log(color.red(`Claude API key is missing.\n${divider}`)); return response.status(400).send({ error: true }); } @@ -39,14 +38,14 @@ async function sendClaudeRequest(request, response) { let isSyspromptSupported = request.body.model === 'claude-2' || request.body.model === 'claude-2.1'; let requestPrompt = convertClaudePrompt(request.body.messages, !request.body.exclude_assistant, request.body.assistant_prefill, isSyspromptSupported, request.body.claude_use_sysprompt, request.body.human_sysprompt_message); - console.log(chalk.green(`${divider}\nClaude request\n`) + chalk.cyan(`PROMPT\n${divider}\n${requestPrompt}\n${divider}`)); + console.log(color.green(`${divider}\nClaude request\n`) + color.cyan(`PROMPT\n${divider}\n${requestPrompt}\n${divider}`)); // Check Claude messages sequence and prefixes presence. const sequence = requestPrompt.split('\n').filter(x => x.startsWith('Human:') || x.startsWith('Assistant:')); let humanErrorCount = 0; let assistantErrorCount = 0; - let humanFound = sequence.some(line => line.startsWith('Human:')); - let assistantFound = sequence.some(line => line.startsWith('Assistant:')); + const humanFound = sequence.some(line => line.startsWith('Human:')); + const assistantFound = sequence.some(line => line.startsWith('Assistant:')); for (let i = 0; i < sequence.length - 1; i++) { if (sequence[i].startsWith(sequence[i + 1].split(':')[0])) { @@ -59,20 +58,20 @@ async function sendClaudeRequest(request, response) { } if (!humanFound) { - console.log(chalk.red(`${divider}\nWarning: No 'Human:' prefix found in the prompt.\n${divider}`)); + console.log(color.red(`${divider}\nWarning: No 'Human:' prefix found in the prompt.\n${divider}`)); } if (!assistantFound) { - console.log(chalk.red(`${divider}\nWarning: No 'Assistant: ' prefix found in the prompt.\n${divider}`)); + console.log(color.red(`${divider}\nWarning: No 'Assistant: ' prefix found in the prompt.\n${divider}`)); } if (!sequence[0].startsWith('Human:')) { - console.log(chalk.red(`${divider}\nWarning: The messages sequence should start with 'Human:' prefix.\nMake sure you have 'Human:' prefix at the very beggining of the prompt, or after the system prompt.\n${divider}`)); + console.log(color.red(`${divider}\nWarning: The messages sequence should start with 'Human:' prefix.\nMake sure you have 'Human:' prefix at the very beggining of the prompt, or after the system prompt.\n${divider}`)); } if (humanErrorCount > 0 || assistantErrorCount > 0) { - console.log(chalk.red(`${divider}\nWarning: Detected incorrect Prefix sequence(s).`)); - console.log(chalk.red(`Incorrect "Human:" prefix(es): ${humanErrorCount}.\nIncorrect "Assistant: " prefix(es): ${assistantErrorCount}.`)); - console.log(chalk.red('Check the prompt above and fix it in the sillytavern.')); - console.log(chalk.red('\nThe correct sequence should look like this:\nSystem prompt <-(for the sysprompt format only, else have 2 empty lines above the first human\'s message.)')); - console.log(chalk.red(` <-----(Each message beginning with the "Assistant:/Human:" prefix must have one empty line above.)\nHuman:\n\nAssistant:\n...\n\nHuman:\n\nAssistant:\n${divider}`)); + console.log(color.red(`${divider}\nWarning: Detected incorrect Prefix sequence(s).`)); + console.log(color.red(`Incorrect "Human:" prefix(es): ${humanErrorCount}.\nIncorrect "Assistant: " prefix(es): ${assistantErrorCount}.`)); + console.log(color.red('Check the prompt above and fix it in the sillytavern.')); + console.log(color.red('\nThe correct sequence should look like this:\nSystem prompt <-(for the sysprompt format only, else have 2 empty lines above the first human\'s message.)')); + console.log(color.red(` <-----(Each message beginning with the "Assistant:/Human:" prefix must have one empty line above.)\nHuman:\n\nAssistant:\n...\n\nHuman:\n\nAssistant:\n${divider}`)); } const stop_sequences = ['\n\nHuman:', '\n\nSystem:', '\n\nAssistant:']; @@ -107,20 +106,20 @@ async function sendClaudeRequest(request, response) { forwardFetchResponse(generateResponse, response); } else { if (!generateResponse.ok) { - console.log(chalk.red(`Claude API returned error: ${generateResponse.status} ${generateResponse.statusText}\n${await generateResponse.text()}\n${divider}`)); + console.log(color.red(`Claude API returned error: ${generateResponse.status} ${generateResponse.statusText}\n${await generateResponse.text()}\n${divider}`)); return response.status(generateResponse.status).send({ error: true }); } const generateResponseJson = await generateResponse.json(); const responseText = generateResponseJson.completion; - console.log(chalk.green(`Claude response\n${divider}\n${responseText}\n${divider}`)); + console.log(color.green(`Claude response\n${divider}\n${responseText}\n${divider}`)); // Wrap it back to OAI format const reply = { choices: [{ 'message': { 'content': responseText } }] }; return response.send(reply); } } catch (error) { - console.log(chalk.red(`Error communicating with Claude: ${error}\n${divider}`)); + console.log(color.red(`Error communicating with Claude: ${error}\n${divider}`)); if (!response.headersSent) { return response.status(500).send({ error: true }); } From da1e9cb3b2c54bce7cbafd1277344968e4f17de5 Mon Sep 17 00:00:00 2001 From: Cohee <18619528+Cohee1207@users.noreply.github.com> Date: Tue, 19 Dec 2023 19:48:42 +0200 Subject: [PATCH 46/82] Use const where possible --- src/endpoints/backends/chat-completions.js | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/src/endpoints/backends/chat-completions.js b/src/endpoints/backends/chat-completions.js index 0a8208a9c..42ec0c065 100644 --- a/src/endpoints/backends/chat-completions.js +++ b/src/endpoints/backends/chat-completions.js @@ -35,17 +35,17 @@ async function sendClaudeRequest(request, response) { controller.abort(); }); - let isSyspromptSupported = request.body.model === 'claude-2' || request.body.model === 'claude-2.1'; - let requestPrompt = convertClaudePrompt(request.body.messages, !request.body.exclude_assistant, request.body.assistant_prefill, isSyspromptSupported, request.body.claude_use_sysprompt, request.body.human_sysprompt_message); + const isSysPromptSupported = request.body.model === 'claude-2' || request.body.model === 'claude-2.1'; + const requestPrompt = convertClaudePrompt(request.body.messages, !request.body.exclude_assistant, request.body.assistant_prefill, isSysPromptSupported, request.body.claude_use_sysprompt, request.body.human_sysprompt_message); console.log(color.green(`${divider}\nClaude request\n`) + color.cyan(`PROMPT\n${divider}\n${requestPrompt}\n${divider}`)); // Check Claude messages sequence and prefixes presence. const sequence = requestPrompt.split('\n').filter(x => x.startsWith('Human:') || x.startsWith('Assistant:')); - let humanErrorCount = 0; - let assistantErrorCount = 0; const humanFound = sequence.some(line => line.startsWith('Human:')); const assistantFound = sequence.some(line => line.startsWith('Assistant:')); + let humanErrorCount = 0; + let assistantErrorCount = 0; for (let i = 0; i < sequence.length - 1; i++) { if (sequence[i].startsWith(sequence[i + 1].split(':')[0])) { From f30f75b3103fac9ca9960c46e9f8b973ee96a33f Mon Sep 17 00:00:00 2001 From: maver Date: Mon, 18 Dec 2023 21:55:12 +0100 Subject: [PATCH 47/82] Add GENERATE_BEFORE_COMBINE_PROMPTS event Allows for context to be ordered by extensions --- public/script.js | 64 ++++++++++++++++++++++++++++++++++-------------- 1 file changed, 46 insertions(+), 18 deletions(-) diff --git a/public/script.js b/public/script.js index 0c04d8b73..fa9664511 100644 --- a/public/script.js +++ b/public/script.js @@ -315,6 +315,7 @@ export const event_types = { FORCE_SET_BACKGROUND: 'force_set_background', CHAT_DELETED: 'chat_deleted', GROUP_CHAT_DELETED: 'group_chat_deleted', + GENERATE_BEFORE_COMBINE_PROMPTS: 'generate_before_combine_prompts', }; export const eventSource = new EventEmitter(); @@ -3623,30 +3624,57 @@ async function Generate(type, { automatic_trigger, force_name2, quiet_prompt, qu generatedPromptCache = cleanupPromptCache(generatedPromptCache); } - // Right now, everything is suffixed with a newline - mesSendString = finalMesSend.map((e) => `${e.extensionPrompts.join('')}${e.message}`).join(''); + // Flattens the multiple prompt objects to a string. + const combine = () => { + // Right now, everything is suffixed with a newline + mesSendString = finalMesSend.map((e) => `${e.extensionPrompts.join('')}${e.message}`).join(''); - // add chat preamble - mesSendString = addChatsPreamble(mesSendString); + // add a custom dingus (if defined) + mesSendString = addChatsSeparator(mesSendString); - // add a custom dingus (if defined) - mesSendString = addChatsSeparator(mesSendString); + // add chat preamble + mesSendString = addChatsPreamble(mesSendString); - let combinedPrompt = - beforeScenarioAnchor + - storyString + - afterScenarioAnchor + - mesExmString + - mesSendString + - generatedPromptCache; + let combinedPrompt = beforeScenarioAnchor + + storyString + + afterScenarioAnchor + + mesExmString + + mesSendString + + generatedPromptCache; - combinedPrompt = combinedPrompt.replace(/\r/gm, ''); + combinedPrompt = combinedPrompt.replace(/\r/gm, ''); - if (power_user.collapse_newlines) { - combinedPrompt = collapseNewlines(combinedPrompt); - } + if (power_user.collapse_newlines) { + combinedPrompt = collapseNewlines(combinedPrompt); + } - return combinedPrompt; + return combinedPrompt; + }; + + let data = { + api: main_api, + combinedPrompt: null, + description, + personality, + persona, + scenario, + char: name2, + user: name1, + beforeScenarioAnchor, + afterScenarioAnchor, + mesExmString, + finalMesSend, + generatedPromptCache, + main: system, + jailbreak, + naiPreamble: nai_settings.preamble, + }; + + // Before returning the combined prompt, give available context related information to all subscribers. + eventSource.emitAndWait(event_types.GENERATE_BEFORE_COMBINE_PROMPTS, data); + + // If one or multiple subscribers return a value, forfeit the responsibillity of flattening the context. + return !data.combinedPrompt ? combine() : data.combinedPrompt; } // Get the negative prompt first since it has the unmodified mesSend array From 8d63ce5559e31d08d7b3ba3fd3597a9cd6b7d84d Mon Sep 17 00:00:00 2001 From: maver Date: Tue, 19 Dec 2023 19:27:24 +0100 Subject: [PATCH 48/82] Log Novel Ai prompt to console When prompt logging is enabled. --- public/scripts/nai-settings.js | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/public/scripts/nai-settings.js b/public/scripts/nai-settings.js index e0feaaaf4..4b81eedd7 100644 --- a/public/scripts/nai-settings.js +++ b/public/scripts/nai-settings.js @@ -8,7 +8,7 @@ import { substituteParams, } from '../script.js'; import { getCfgPrompt } from './cfg-scale.js'; -import { MAX_CONTEXT_DEFAULT, MAX_RESPONSE_DEFAULT } from './power-user.js'; +import { MAX_CONTEXT_DEFAULT, MAX_RESPONSE_DEFAULT, power_user } from './power-user.js'; import { getTextTokens, tokenizers } from './tokenizers.js'; import EventSourceStream from './sse-stream.js'; import { @@ -437,6 +437,10 @@ export function getNovelGenerationData(finalPrompt, settings, maxLength, isImper biasCache = logitBias; } + if (power_user.console_log_prompts) { + console.log(finalPrompt); + } + return { 'input': finalPrompt, 'model': nai_settings.model_novel, From 029cf598ce0934b8c16a5abab7fd324a77fbc518 Mon Sep 17 00:00:00 2001 From: Cohee <18619528+Cohee1207@users.noreply.github.com> Date: Tue, 19 Dec 2023 23:12:14 +0200 Subject: [PATCH 49/82] Fix /peek command --- public/scripts/slash-commands.js | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/public/scripts/slash-commands.js b/public/scripts/slash-commands.js index 23ca73a56..97b24b1c7 100644 --- a/public/scripts/slash-commands.js +++ b/public/scripts/slash-commands.js @@ -845,7 +845,7 @@ async function unhideMessageCallback(_, arg) { /** * Copium for running group actions when the member is offscreen. * @param {number} chid - character ID - * @param {string} action - one of 'enable', 'disable', 'up', 'down', 'peek', 'remove' + * @param {string} action - one of 'enable', 'disable', 'up', 'down', 'view', 'remove' * @returns {void} */ function performGroupMemberAction(chid, action) { @@ -868,7 +868,9 @@ function performGroupMemberAction(chid, action) { if (wasOffscreen) { $(pageSizeSelector).val(paginationValue).trigger('change'); - $(paginationSelector).pagination('go', pageValue); + if ($(paginationSelector).length) { + $(paginationSelector).pagination('go', pageValue); + } } } @@ -958,7 +960,7 @@ async function peekCallback(_, arg) { return ''; } - performGroupMemberAction(chid, 'peek'); + performGroupMemberAction(chid, 'view'); return ''; } From 4b131067e428e25b448f7bd655628f060d9ee19f Mon Sep 17 00:00:00 2001 From: Cohee <18619528+Cohee1207@users.noreply.github.com> Date: Wed, 20 Dec 2023 00:45:45 +0200 Subject: [PATCH 50/82] Add local multimodal caption sources --- public/scripts/extensions/caption/index.js | 20 ++++- public/scripts/extensions/shared.js | 97 +++++++++++++++++----- src/endpoints/backends/text-completions.js | 96 ++++++++++++++++++++- 3 files changed, 185 insertions(+), 28 deletions(-) diff --git a/public/scripts/extensions/caption/index.js b/public/scripts/extensions/caption/index.js index 97cc4cf37..ec126fcda 100644 --- a/public/scripts/extensions/caption/index.js +++ b/public/scripts/extensions/caption/index.js @@ -4,6 +4,7 @@ import { callPopup, getRequestHeaders, saveSettingsDebounced, substituteParams } import { getMessageTimeStamp } from '../../RossAscends-mods.js'; import { SECRET_KEYS, secret_state } from '../../secrets.js'; import { getMultimodalCaption } from '../shared.js'; +import { textgen_types, textgenerationwebui_settings } from '../../textgen-settings.js'; export { MODULE_NAME }; const MODULE_NAME = 'caption'; @@ -134,7 +135,7 @@ async function doCaptionRequest(base64Img, fileData) { case 'horde': return await captionHorde(base64Img); case 'multimodal': - return await captionMultimodal(extension_settings.caption.multimodal_api === 'google' ? base64Img : fileData); + return await captionMultimodal(fileData); default: throw new Error('Unknown caption source.'); } @@ -271,9 +272,11 @@ jQuery(function () { $(sendButton).on('click', () => { const hasCaptionModule = (modules.includes('caption') && extension_settings.caption.source === 'extras') || - (extension_settings.caption.source === 'multimodal' && extension_settings.caption.multimodal_api === 'openai' && secret_state[SECRET_KEYS.OPENAI]) || + (extension_settings.caption.source === 'multimodal' && extension_settings.caption.multimodal_api === 'openai' && (secret_state[SECRET_KEYS.OPENAI] || extension_settings.caption.allow_reverse_proxy)) || (extension_settings.caption.source === 'multimodal' && extension_settings.caption.multimodal_api === 'openrouter' && secret_state[SECRET_KEYS.OPENROUTER]) || (extension_settings.caption.source === 'multimodal' && extension_settings.caption.multimodal_api === 'google' && secret_state[SECRET_KEYS.MAKERSUITE]) || + (extension_settings.caption.source === 'multimodal' && extension_settings.caption.multimodal_api === 'ollama' && textgenerationwebui_settings.server_urls[textgen_types.OLLAMA]) || + (extension_settings.caption.source === 'multimodal' && extension_settings.caption.multimodal_api === 'llamacpp' && textgenerationwebui_settings.server_urls[textgen_types.LLAMACPP]) || extension_settings.caption.source === 'local' || extension_settings.caption.source === 'horde'; @@ -329,7 +332,7 @@ jQuery(function () { @@ -337,9 +340,11 @@ jQuery(function () {
    @@ -349,12 +354,19 @@ jQuery(function () { + + + +
    +
    + Hint: Set your API keys and endpoints in the 'API Connections' tab first. +
    diff --git a/public/scripts/extensions/shared.js b/public/scripts/extensions/shared.js index 7d4e16720..4947f9b32 100644 --- a/public/scripts/extensions/shared.js +++ b/public/scripts/extensions/shared.js @@ -2,6 +2,7 @@ import { getRequestHeaders } from '../../script.js'; import { extension_settings } from '../extensions.js'; import { oai_settings } from '../openai.js'; import { SECRET_KEYS, secret_state } from '../secrets.js'; +import { textgen_types, textgenerationwebui_settings } from '../textgen-settings.js'; import { createThumbnail, isValidUrl } from '../utils.js'; /** @@ -11,20 +12,18 @@ import { createThumbnail, isValidUrl } from '../utils.js'; * @returns {Promise} Generated caption */ export async function getMultimodalCaption(base64Img, prompt) { - if (extension_settings.caption.multimodal_api === 'openai' && !secret_state[SECRET_KEYS.OPENAI]) { - throw new Error('OpenAI API key is not set.'); - } + throwIfInvalidModel(); - if (extension_settings.caption.multimodal_api === 'openrouter' && !secret_state[SECRET_KEYS.OPENROUTER]) { - throw new Error('OpenRouter API key is not set.'); - } + const noPrefix = ['google', 'ollama', 'llamacpp'].includes(extension_settings.caption.multimodal_api); - if (extension_settings.caption.multimodal_api === 'google' && !secret_state[SECRET_KEYS.MAKERSUITE]) { - throw new Error('MakerSuite API key is not set.'); + if (noPrefix && base64Img.startsWith('data:image/')) { + base64Img = base64Img.split(',')[1]; } // OpenRouter has a payload limit of ~2MB. Google is 4MB, but we love democracy. const isGoogle = extension_settings.caption.multimodal_api === 'google'; + const isOllama = extension_settings.caption.multimodal_api === 'ollama'; + const isLlamaCpp = extension_settings.caption.multimodal_api === 'llamacpp'; const base64Bytes = base64Img.length * 0.75; const compressionLimit = 2 * 1024 * 1024; if (['google', 'openrouter'].includes(extension_settings.caption.multimodal_api) && base64Bytes > compressionLimit) { @@ -45,27 +44,79 @@ export async function getMultimodalCaption(base64Img, prompt) { const proxyUrl = useReverseProxy ? oai_settings.reverse_proxy : ''; const proxyPassword = useReverseProxy ? oai_settings.proxy_password : ''; - const apiResult = await fetch(`/api/${isGoogle ? 'google' : 'openai'}/caption-image`, { + const requestBody = { + image: base64Img, + prompt: prompt, + }; + + if (!isGoogle) { + requestBody.api = extension_settings.caption.multimodal_api || 'openai'; + requestBody.model = extension_settings.caption.multimodal_model || 'gpt-4-vision-preview'; + requestBody.reverse_proxy = proxyUrl; + requestBody.proxy_password = proxyPassword; + } + + if (isOllama) { + if (extension_settings.caption.multimodal_model === 'ollama_current') { + requestBody.model = textgenerationwebui_settings.ollama_model; + } + + requestBody.server_url = textgenerationwebui_settings.server_urls[textgen_types.OLLAMA]; + } + + if (isLlamaCpp) { + requestBody.server_url = textgenerationwebui_settings.server_urls[textgen_types.LLAMACPP]; + } + + function getEndpointUrl() { + switch (extension_settings.caption.multimodal_api) { + case 'google': + return '/api/google/caption-image'; + case 'llamacpp': + return '/api/backends/text-completions/llamacpp/caption-image'; + case 'ollama': + return '/api/backends/text-completions/ollama/caption-image'; + default: + return '/api/openai/caption-image'; + } + } + + const apiResult = await fetch(getEndpointUrl(), { method: 'POST', headers: getRequestHeaders(), - body: JSON.stringify({ - image: base64Img, - prompt: prompt, - ...(isGoogle - ? {} - : { - api: extension_settings.caption.multimodal_api || 'openai', - model: extension_settings.caption.multimodal_model || 'gpt-4-vision-preview', - reverse_proxy: proxyUrl, - proxy_password: proxyPassword, - }), - }), + body: JSON.stringify(requestBody), }); if (!apiResult.ok) { - throw new Error('Failed to caption image via OpenAI.'); + throw new Error('Failed to caption image via Multimodal API.'); } const { caption } = await apiResult.json(); - return caption; + return String(caption).trim(); +} + +function throwIfInvalidModel() { + if (extension_settings.caption.multimodal_api === 'openai' && !secret_state[SECRET_KEYS.OPENAI]) { + throw new Error('OpenAI API key is not set.'); + } + + if (extension_settings.caption.multimodal_api === 'openrouter' && !secret_state[SECRET_KEYS.OPENROUTER]) { + throw new Error('OpenRouter API key is not set.'); + } + + if (extension_settings.caption.multimodal_api === 'google' && !secret_state[SECRET_KEYS.MAKERSUITE]) { + throw new Error('MakerSuite API key is not set.'); + } + + if (extension_settings.caption.multimodal_api === 'ollama' && !textgenerationwebui_settings.server_urls[textgen_types.OLLAMA]) { + throw new Error('Ollama server URL is not set.'); + } + + if (extension_settings.caption.multimodal_api === 'ollama' && extension_settings.caption.multimodal_model === 'ollama_current' && !textgenerationwebui_settings.ollama_model) { + throw new Error('Ollama model is not set.'); + } + + if (extension_settings.caption.multimodal_api === 'llamacpp' && !textgenerationwebui_settings.server_urls[textgen_types.LLAMACPP]) { + throw new Error('LlamaCPP server URL is not set.'); + } } diff --git a/src/endpoints/backends/text-completions.js b/src/endpoints/backends/text-completions.js index 0924e43ad..614b41557 100644 --- a/src/endpoints/backends/text-completions.js +++ b/src/endpoints/backends/text-completions.js @@ -310,11 +310,12 @@ ollama.post('/download', jsonParser, async function (request, response) { const fetchResponse = await fetch(`${url}/api/pull`, { method: 'POST', + headers: { 'Content-Type': 'application/json' }, body: JSON.stringify({ name: name, stream: false, }), - headers: { 'Content-Type': 'application/json' }, + timeout: 0, }); if (!fetchResponse.ok) { @@ -329,6 +330,99 @@ ollama.post('/download', jsonParser, async function (request, response) { } }); +ollama.post('/caption-image', jsonParser, async function (request, response) { + try { + if (!request.body.server_url || !request.body.model) { + return response.sendStatus(400); + } + + console.log('Ollama caption request:', request.body); + // Convert to string + remove trailing slash + /v1 suffix + const baseUrl = String(request.body.server_url).replace(/\/$/, '').replace(/\/v1$/, ''); + + const fetchResponse = await fetch(`${baseUrl}/api/generate`, { + method: 'POST', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify({ + model: request.body.model, + prompt: request.body.prompt, + images: [request.body.image], + stream: false, + }), + timeout: 0, + }); + + if (!fetchResponse.ok) { + console.log('Ollama caption error:', fetchResponse.status, fetchResponse.statusText); + return response.status(500).send({ error: true }); + } + + const data = await fetchResponse.json(); + console.log('Ollama caption response:', data); + + const caption = data?.response || ''; + + if (!caption) { + console.log('Ollama caption is empty.'); + return response.status(500).send({ error: true }); + } + + return response.send({ caption }); + } catch (error) { + console.error(error); + return response.status(500); + } +}); + +const llamacpp = express.Router(); + +llamacpp.post('/caption-image', jsonParser, async function (request, response) { + try { + if (!request.body.server_url) { + return response.sendStatus(400); + } + + console.log('LlamaCpp caption request:', request.body); + // Convert to string + remove trailing slash + /v1 suffix + const baseUrl = String(request.body.server_url).replace(/\/$/, '').replace(/\/v1$/, ''); + + const fetchResponse = await fetch(`${baseUrl}/completion`, { + method: 'POST', + headers: { 'Content-Type': 'application/json' }, + timeout: 0, + body: JSON.stringify({ + prompt: `USER:[img-1]${String(request.body.prompt).trim()}\nASSISTANT:`, + image_data: [{ data: request.body.image, id: 1 }], + temperature: 0.1, + stream: false, + stop: ['USER:', ''], + }), + }); + + if (!fetchResponse.ok) { + console.log('LlamaCpp caption error:', fetchResponse.status, fetchResponse.statusText); + return response.status(500).send({ error: true }); + } + + const data = await fetchResponse.json(); + console.log('LlamaCpp caption response:', data); + + const caption = data?.content || ''; + + if (!caption) { + console.log('LlamaCpp caption is empty.'); + return response.status(500).send({ error: true }); + } + + return response.send({ caption }); + + } catch (error) { + console.error(error); + return response.status(500); + } +}); + router.use('/ollama', ollama); +router.use('/llamacpp', llamacpp); module.exports = { router }; From 93db2bf9537f3311967731010826ac6e5d682500 Mon Sep 17 00:00:00 2001 From: Cohee <18619528+Cohee1207@users.noreply.github.com> Date: Wed, 20 Dec 2023 01:56:35 +0200 Subject: [PATCH 51/82] Simplify extras summary settings --- public/scripts/extensions/memory/index.js | 115 +++------------------- 1 file changed, 13 insertions(+), 102 deletions(-) diff --git a/public/scripts/extensions/memory/index.js b/public/scripts/extensions/memory/index.js index f3373ddad..a8934f5c4 100644 --- a/public/scripts/extensions/memory/index.js +++ b/public/scripts/extensions/memory/index.js @@ -5,6 +5,7 @@ import { is_group_generating, selected_group } from '../../group-chats.js'; import { registerSlashCommand } from '../../slash-commands.js'; import { loadMovingUIState } from '../../power-user.js'; import { dragElement } from '../../RossAscends-mods.js'; +import { getTextTokens, tokenizers } from '../../tokenizers.js'; export { MODULE_NAME }; const MODULE_NAME = '1_memory'; @@ -42,26 +43,6 @@ const defaultPrompt = '[Pause your roleplay. Summarize the most important facts const defaultTemplate = '[Summary: {{summary}}]'; const defaultSettings = { - minLongMemory: 16, - maxLongMemory: 1024, - longMemoryLength: 128, - shortMemoryLength: 512, - minShortMemory: 128, - maxShortMemory: 1024, - shortMemoryStep: 16, - longMemoryStep: 8, - repetitionPenaltyStep: 0.05, - repetitionPenalty: 1.2, - maxRepetitionPenalty: 2.0, - minRepetitionPenalty: 1.0, - temperature: 1.0, - minTemperature: 0.1, - maxTemperature: 2.0, - temperatureStep: 0.05, - lengthPenalty: 1, - minLengthPenalty: -4, - maxLengthPenalty: 4, - lengthPenaltyStep: 0.1, memoryFrozen: false, SkipWIAN: false, source: summary_sources.extras, @@ -95,11 +76,6 @@ function loadSettings() { } $('#summary_source').val(extension_settings.memory.source).trigger('change'); - $('#memory_long_length').val(extension_settings.memory.longMemoryLength).trigger('input'); - $('#memory_short_length').val(extension_settings.memory.shortMemoryLength).trigger('input'); - $('#memory_repetition_penalty').val(extension_settings.memory.repetitionPenalty).trigger('input'); - $('#memory_temperature').val(extension_settings.memory.temperature).trigger('input'); - $('#memory_length_penalty').val(extension_settings.memory.lengthPenalty).trigger('input'); $('#memory_frozen').prop('checked', extension_settings.memory.memoryFrozen).trigger('input'); $('#memory_skipWIAN').prop('checked', extension_settings.memory.SkipWIAN).trigger('input'); $('#memory_prompt').val(extension_settings.memory.prompt).trigger('input'); @@ -126,51 +102,6 @@ function switchSourceControls(value) { }); } -function onMemoryShortInput() { - const value = $(this).val(); - extension_settings.memory.shortMemoryLength = Number(value); - $('#memory_short_length_tokens').text(value); - saveSettingsDebounced(); - - // Don't let long buffer be bigger than short - if (extension_settings.memory.longMemoryLength > extension_settings.memory.shortMemoryLength) { - $('#memory_long_length').val(extension_settings.memory.shortMemoryLength).trigger('input'); - } -} - -function onMemoryLongInput() { - const value = $(this).val(); - extension_settings.memory.longMemoryLength = Number(value); - $('#memory_long_length_tokens').text(value); - saveSettingsDebounced(); - - // Don't let long buffer be bigger than short - if (extension_settings.memory.longMemoryLength > extension_settings.memory.shortMemoryLength) { - $('#memory_short_length').val(extension_settings.memory.longMemoryLength).trigger('input'); - } -} - -function onMemoryRepetitionPenaltyInput() { - const value = $(this).val(); - extension_settings.memory.repetitionPenalty = Number(value); - $('#memory_repetition_penalty_value').text(extension_settings.memory.repetitionPenalty.toFixed(2)); - saveSettingsDebounced(); -} - -function onMemoryTemperatureInput() { - const value = $(this).val(); - extension_settings.memory.temperature = Number(value); - $('#memory_temperature_value').text(extension_settings.memory.temperature.toFixed(2)); - saveSettingsDebounced(); -} - -function onMemoryLengthPenaltyInput() { - const value = $(this).val(); - extension_settings.memory.lengthPenalty = Number(value); - $('#memory_length_penalty_value').text(extension_settings.memory.lengthPenalty.toFixed(2)); - saveSettingsDebounced(); -} - function onMemoryFrozenInput() { const value = Boolean($(this).prop('checked')); extension_settings.memory.memoryFrozen = value; @@ -444,33 +375,36 @@ async function summarizeChatExtras(context) { const longMemory = getLatestMemoryFromChat(chat); const reversedChat = chat.slice().reverse(); reversedChat.shift(); - let memoryBuffer = []; + const memoryBuffer = []; + const CONTEXT_SIZE = 1024 - 64; - for (let mes of reversedChat) { + for (const message of reversedChat) { // we reached the point of latest memory - if (longMemory && mes.extra && mes.extra.memory == longMemory) { + if (longMemory && message.extra && message.extra.memory == longMemory) { break; } // don't care about system - if (mes.is_system) { + if (message.is_system) { continue; } // determine the sender's name - const name = mes.is_user ? (context.name1 ?? 'You') : (mes.force_avatar ? mes.name : context.name2); - const entry = `${name}:\n${mes['mes']}`; + const entry = `${message.name}:\n${message.mes}`; memoryBuffer.push(entry); // check if token limit was reached - if (context.getTokenCount(getMemoryString()) >= extension_settings.memory.shortMemoryLength) { + const tokens = getTextTokens(tokenizers.GPT2, getMemoryString()).length; + if (tokens >= CONTEXT_SIZE) { break; } } const resultingString = getMemoryString(); + const resultingTokens = getTextTokens(tokenizers.GPT2, resultingString).length; - if (context.getTokenCount(resultingString) < extension_settings.memory.shortMemoryLength) { + if (!resultingString || resultingTokens < CONTEXT_SIZE) { + console.debug('Not enough context to summarize'); return; } @@ -488,13 +422,7 @@ async function summarizeChatExtras(context) { }, body: JSON.stringify({ text: resultingString, - params: { - min_length: extension_settings.memory.longMemoryLength * 0, // testing how it behaves 0 min length - max_length: extension_settings.memory.longMemoryLength, - repetition_penalty: extension_settings.memory.repetitionPenalty, - temperature: extension_settings.memory.temperature, - length_penalty: extension_settings.memory.lengthPenalty, - }, + params: {}, }), }); @@ -623,11 +551,6 @@ function setupListeners() { //setup shared listeners for popout and regular ext menu $('#memory_restore').off('click').on('click', onMemoryRestoreClick); $('#memory_contents').off('click').on('input', onMemoryContentInput); - $('#memory_long_length').off('click').on('input', onMemoryLongInput); - $('#memory_short_length').off('click').on('input', onMemoryShortInput); - $('#memory_repetition_penalty').off('click').on('input', onMemoryRepetitionPenaltyInput); - $('#memory_temperature').off('click').on('input', onMemoryTemperatureInput); - $('#memory_length_penalty').off('click').on('input', onMemoryLengthPenaltyInput); $('#memory_frozen').off('click').on('input', onMemoryFrozenInput); $('#memory_skipWIAN').off('click').on('input', onMemorySkipWIANInput); $('#summary_source').off('click').on('change', onSummarySourceChange); @@ -720,18 +643,6 @@ jQuery(function () { If both sliders are non-zero, then both will trigger summary updates a their respective intervals.
    -
    - - - - - - - - - - -
    From c212a71425003dd88cc544840bc9d1cc95479274 Mon Sep 17 00:00:00 2001 From: Cohee <18619528+Cohee1207@users.noreply.github.com> Date: Wed, 20 Dec 2023 15:51:00 +0200 Subject: [PATCH 52/82] Fix ignore list of preset manager --- default/config.yaml | 2 +- public/scripts/preset-manager.js | 5 +++++ 2 files changed, 6 insertions(+), 1 deletion(-) diff --git a/default/config.yaml b/default/config.yaml index edee81009..48fa68cec 100644 --- a/default/config.yaml +++ b/default/config.yaml @@ -55,7 +55,7 @@ openai: # Will send a random user ID to OpenAI completion API randomizeUserId: false # If not empty, will add this as a system message to the start of every caption completion prompt - # Example: "Perform the instructions to the best of your ability.\n\n" (for LLaVA) + # Example: "Perform the instructions to the best of your ability.\n" (for LLaVA) # Not used in image inlining mode captionSystemPrompt: "" # -- DEEPL TRANSLATION CONFIGURATION -- diff --git a/public/scripts/preset-manager.js b/public/scripts/preset-manager.js index 3a7a48907..3f9334f00 100644 --- a/public/scripts/preset-manager.js +++ b/public/scripts/preset-manager.js @@ -304,7 +304,12 @@ class PresetManager { 'streaming_kobold', 'enabled', 'seed', + 'legacy_api', 'mancer_model', + 'togetherai_model', + 'ollama_model', + 'server_urls', + 'type', ]; const settings = Object.assign({}, getSettingsByApiId(this.apiId)); From 041b9d4b017aee0ef86430991e1df98101f2b02b Mon Sep 17 00:00:00 2001 From: Cohee <18619528+Cohee1207@users.noreply.github.com> Date: Wed, 20 Dec 2023 17:03:37 +0200 Subject: [PATCH 53/82] Add style sanitizer to message renderer --- public/lib/css-parser.map | 1 + public/lib/css-parser.mjs | 765 ++++++++++++++++++++++++++++++++++++++ public/script.js | 34 +- public/scripts/chats.js | 56 +++ 4 files changed, 849 insertions(+), 7 deletions(-) create mode 100644 public/lib/css-parser.map create mode 100644 public/lib/css-parser.mjs diff --git a/public/lib/css-parser.map b/public/lib/css-parser.map new file mode 100644 index 000000000..bcaaee2ef --- /dev/null +++ b/public/lib/css-parser.map @@ -0,0 +1 @@ +{"mappings":";;;;;;;;;;;;;AEAe,uDAA4B;IAOzC,YACE,QAAgB,EAChB,GAAW,EACX,MAAc,EACd,MAAc,EACd,GAAW,CACX;QACA,KAAK,CAAC,WAAW,MAAM,SAAS,MAAM,SAAS,OAAO;QACtD,IAAI,CAAC,MAAM,GAAG;QACd,IAAI,CAAC,QAAQ,GAAG;QAChB,IAAI,CAAC,IAAI,GAAG;QACZ,IAAI,CAAC,MAAM,GAAG;QACd,IAAI,CAAC,MAAM,GAAG;IAChB;AACF;;;;;;;;ACrBA;;CAEC,GACc;IAKb,YACE,KAAqC,EACrC,GAAmC,EACnC,MAAc,CACd;QACA,IAAI,CAAC,KAAK,GAAG;QACb,IAAI,CAAC,GAAG,GAAG;QACX,IAAI,CAAC,MAAM,GAAG;IAChB;AACF;;;;;;;UCdY;;;;;;;;;;;;;;;;;;;GAAA,8CAAA;;;AHuBZ,0CAA0C;AAC1C,yEAAyE;AACzE,gEAAgE;AAChE,+BAA+B;AAC/B,MAAM,kCAAY;AAEX,MAAM,4CAAQ,CACnB,KACA;IAEA,UAAU,WAAW,CAAC;IAEtB;;GAEC,GACD,IAAI,SAAS;IACb,IAAI,SAAS;IAEb;;GAEC,GACD,SAAS,eAAe,GAAW;QACjC,MAAM,QAAQ,IAAI,KAAK,CAAC;QACxB,IAAI,OAAO,UAAU,MAAM,MAAM;QACjC,MAAM,IAAI,IAAI,WAAW,CAAC;QAC1B,SAAS,CAAC,IAAI,IAAI,MAAM,GAAG,IAAI,SAAS,IAAI,MAAM;IACpD;IAEA;;GAEC,GACD,SAAS;QACP,MAAM,QAAQ;YAAC,MAAM;YAAQ,QAAQ;QAAM;QAC3C,OAAO,SACL,IAA0B;YAEzB,KAAY,QAAQ,GAAG,IAAI,CAAA,GAAA,wCAAO,EACjC,OACA;gBAAC,MAAM;gBAAQ,QAAQ;YAAM,GAC7B,SAAS,UAAU;YAErB;YACA,OAAO;QACT;IACF;IAEA;;GAEC,GACD,MAAM,aAAmC,EAAE;IAE3C,SAAS,MAAM,GAAW;QACxB,MAAM,MAAM,IAAI,CAAA,GAAA,wCAAY,EAC1B,SAAS,UAAU,IACnB,KACA,QACA,QACA;QAGF,IAAI,SAAS,QACX,WAAW,IAAI,CAAC;aAEhB,MAAM;IAEV;IAEA;;GAEC,GACD,SAAS;QACP,MAAM,YAAY;QAElB,MAAM,SAA2B;YAC/B,MAAM,CAAA,GAAA,yCAAO,EAAE,UAAU;YACzB,YAAY;gBACV,QAAQ,SAAS;gBACjB,OAAO;gBACP,eAAe;YACjB;QACF;QAEA,OAAO;IACT;IAEA;;GAEC,GACD,SAAS;QACP,OAAO,MAAM;IACf;IAEA;;GAEC,GACD,SAAS;QACP,OAAO,MAAM;IACf;IAEA;;GAEC,GACD,SAAS;QACP,IAAI;QACJ,MAAM,QAA0C,EAAE;QAClD;QACA,SAAS;QACT,MAAO,IAAI,MAAM,IAAI,IAAI,MAAM,CAAC,OAAO,OAAQ,CAAA,OAAO,YAAY,MAAK,EACrE,IAAI,MAAM;YACR,MAAM,IAAI,CAAC;YACX,SAAS;QACX;QAEF,OAAO;IACT;IAEA;;GAEC,GACD,SAAS,MAAM,EAAU;QACvB,MAAM,IAAI,GAAG,IAAI,CAAC;QAClB,IAAI,CAAC,GACH;QAEF,MAAM,MAAM,CAAC,CAAC,EAAE;QAChB,eAAe;QACf,MAAM,IAAI,KAAK,CAAC,IAAI,MAAM;QAC1B,OAAO;IACT;IAEA;;GAEC,GACD,SAAS;QACP,MAAM;IACR;IAEA;;GAEC,GACD,SAAS,SACP,KAAiC;QAEjC,IAAI;QACJ,QAAQ,SAAS,EAAE;QACnB,MAAQ,IAAI,UACV,IAAI,GACF,MAAM,IAAI,CAAC;QAGf,OAAO;IACT;IAEA;;GAEC,GACD,SAAS;QACP,MAAM,MAAM;QACZ,IAAI,QAAQ,IAAI,MAAM,CAAC,MAAM,QAAQ,IAAI,MAAM,CAAC,IAC9C;QAGF,MAAM,IAAI,MAAM;QAChB,IAAI,CAAC,GACH,OAAO,MAAM;QAGf,OAAO,IAAmB;YACxB,MAAM,CAAA,GAAA,yCAAO,EAAE,OAAO;YACtB,SAAS,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,GAAG;QACzB;IACF;IAEA,SAAS,sBACP,GAAW,EACX,KAAa,EACb,KAAa;QAEb,IAAI,MAAM,QAAQ;QAClB,IAAI,QAAQ;QACZ,IAAI,mBAAmB,IAAI,OAAO,CAAC,KAAK;QACxC,MAAO,CAAC,SAAS,qBAAqB,GAAI;YACxC,MAAM,kBAAkB,IAAI,OAAO,CAAC,KAAK;YACzC,IAAI,oBAAoB,MAAM,kBAAkB,kBAAkB;gBAChE,MAAM,aAAa,sBACjB,KACA,kBAAkB,GAClB,QAAQ;gBAEV,MAAM,aAAa;gBACnB,mBAAmB,IAAI,OAAO,CAAC,KAAK;YACtC,OACE,QAAQ;QAEZ;QACA,IAAI,SAAS,qBAAqB,IAChC,OAAO;aAEP,OAAO;IAEX;IAEA;;GAEC,GACD,SAAS;QACP,MAAM,IAAI,MAAM;QAChB,IAAI,CAAC,GACH;QAGF,8BAA8B;QAC9B,IAAI,MAAM,2BAAK,CAAC,CAAC,EAAE,EAAE,OAAO,CAAC,iCAAW;QAExC,0FAA0F;QAC1F,IAAI,IAAI,OAAO,CAAC,SAAS,IACvB,OAAO;YAAC;SAAI;QAGd,iDAAiD;QACjD,IAAI,MAAM;QACV,IAAI,mBAAmB,IAAI,OAAO,CAAC,KAAK;QACxC,MAAO,qBAAqB,GAAI;YAC9B,MAAM,mBAAmB,sBAAsB,KAAK,kBAAkB;YACtE,IAAI,qBAAqB,IACvB;YAEF,MAAM,mBAAmB;YACzB,MACE,IAAI,SAAS,CAAC,GAAG,oBACjB,IACG,SAAS,CAAC,kBAAkB,kBAC5B,OAAO,CAAC,MAAM,YACjB,IAAI,SAAS,CAAC;YAChB,mBAAmB,IAAI,OAAO,CAAC,KAAK;QACtC;QAEA,yCAAyC;QACzC,MAAM,GACJ;;;;;;;;;;;;;;;OAeC,IACA,OAAO,CAAC,wBAAwB,CAAA,IAAK,EAAE,OAAO,CAAC,MAAM;QAExD,uDAAuD;QACvD,OACE,GACE,4BAA4B;SAC3B,KAAK,CAAC,IACP,6BAA6B;SAC5B,GAAG,CAAC,CAAA;YACH,OAAO,2BAAK,EAAE,OAAO,CAAC,WAAW;QACnC;IAEN;IAEA;;GAEC,GACD,SAAS;QACP,MAAM,MAAM;QAEZ,OAAO;QACP,MAAM,YAAY,MAAM;QACxB,IAAI,CAAC,WACH;QAEF,MAAM,YAAY,2BAAK,SAAS,CAAC,EAAE;QAEnC,IAAI;QACJ,IAAI,CAAC,MAAM,UACT,OAAO,MAAM;QAGf,MAAM;QACN,MAAM,MAAM,MAAM;QAElB,MAAM,MAAM,IAAuB;YACjC,MAAM,CAAA,GAAA,yCAAO,EAAE,WAAW;YAC1B,UAAU,UAAU,OAAO,CAAC,iCAAW;YACvC,OAAO,MAAM,2BAAK,GAAG,CAAC,EAAE,EAAE,OAAO,CAAC,iCAAW,MAAM;QACrD;QAEA,IAAI;QACJ,MAAM;QAEN,OAAO;IACT;IAEA;;GAEC,GACD,SAAS;QACP,MAAM,QAAkD,EAAE;QAE1D,IAAI,CAAC,QACH,OAAO,MAAM;QAEf,SAAS;QAET,eAAe;QACf,IAAI;QACJ,MAAQ,OAAO,cACb,IAAI,MAAM;YACR,MAAM,IAAI,CAAC;YACX,SAAS;QACX;QAGF,IAAI,CAAC,SACH,OAAO,MAAM;QAEf,OAAO;IACT;IAEA;;GAEC,GACD,SAAS;QACP,IAAI;QACJ,MAAM,OAAO,EAAE;QACf,MAAM,MAAM;QAEZ,MAAQ,IAAI,MAAM,uCAAyC;YACzD,KAAK,IAAI,CAAC,CAAC,CAAC,EAAE;YACd,MAAM;QACR;QAEA,IAAI,CAAC,KAAK,MAAM,EACd;QAGF,OAAO,IAAoB;YACzB,MAAM,CAAA,GAAA,yCAAO,EAAE,QAAQ;YACvB,QAAQ;YACR,cAAc,kBAAkB,EAAE;QACpC;IACF;IAEA;;GAEC,GACD,SAAS;QACP,MAAM,MAAM;QACZ,MAAM,KAAK,MAAM;QAEjB,IAAI,CAAC,IACH;QAEF,MAAM,SAAS,EAAE,CAAC,EAAE;QAEpB,aAAa;QACb,MAAM,KAAK,MAAM;QACjB,IAAI,CAAC,IACH,OAAO,MAAM;QAEf,MAAM,OAAO,EAAE,CAAC,EAAE;QAElB,IAAI,CAAC,QACH,OAAO,MAAM;QAGf,IAAI;QACJ,IAAI,SAAgD;QACpD,MAAQ,QAAQ,WAAa;YAC3B,OAAO,IAAI,CAAC;YACZ,SAAS,OAAO,MAAM,CAAC;QACzB;QAEA,IAAI,CAAC,SACH,OAAO,MAAM;QAGf,OAAO,IAAqB;YAC1B,MAAM,CAAA,GAAA,yCAAO,EAAE,SAAS;YACxB,MAAM;YACN,QAAQ;YACR,WAAW;QACb;IACF;IAEA;;GAEC,GACD,SAAS;QACP,MAAM,MAAM;QACZ,MAAM,IAAI,MAAM;QAEhB,IAAI,CAAC,GACH;QAEF,MAAM,WAAW,2BAAK,CAAC,CAAC,EAAE;QAE1B,IAAI,CAAC,QACH,OAAO,MAAM;QAGf,MAAM,QAAQ,WAAyB,MAAM,CAAC;QAE9C,IAAI,CAAC,SACH,OAAO,MAAM;QAGf,OAAO,IAAoB;YACzB,MAAM,CAAA,GAAA,yCAAO,EAAE,QAAQ;YACvB,UAAU;YACV,OAAO;QACT;IACF;IAEA;;GAEC,GACD,SAAS;QACP,MAAM,MAAM;QACZ,MAAM,IAAI,MAAM;QAEhB,IAAI,CAAC,GACH;QAGF,IAAI,CAAC,QACH,OAAO,MAAM;QAGf,MAAM,QAAQ,WAAyB,MAAM,CAAC;QAE9C,IAAI,CAAC,SACH,OAAO,MAAM;QAGf,OAAO,IAAgB;YACrB,MAAM,CAAA,GAAA,yCAAO,EAAE,IAAI;YACnB,OAAO;QACT;IACF;IAEA;;GAEC,GACD,SAAS;QACP,MAAM,MAAM;QACZ,MAAM,IAAI,MAAM;QAEhB,IAAI,CAAC,GACH;QAEF,MAAM,YAAY,2BAAK,CAAC,CAAC,EAAE;QAE3B,IAAI,CAAC,QACH,OAAO,MAAM;QAGf,MAAM,QAAQ,WAAyB,MAAM,CAAC;QAE9C,IAAI,CAAC,SACH,OAAO,MAAM;QAGf,OAAO,IAAqB;YAC1B,MAAM,CAAA,GAAA,yCAAO,EAAE,SAAS;YACxB,WAAW;YACX,OAAO;QACT;IACF;IAEA;;GAEC,GACD,SAAS;QACP,MAAM,MAAM;QACZ,MAAM,IAAI,MAAM;QAEhB,IAAI,CAAC,GACH;QAEF,MAAM,QAAQ,2BAAK,CAAC,CAAC,EAAE;QAEvB,IAAI,CAAC,QAAQ;YACX,MAAM;YACN,OAAO,IAAiB;gBACtB,MAAM,CAAA,GAAA,yCAAO,EAAE,KAAK;gBACpB,OAAO;YACT;QACF;QAEA,MAAM,QAAQ,WAAyB,MAAM,CAAC;QAE9C,IAAI,CAAC,SACH,OAAO,MAAM;QAGf,OAAO,IAAiB;YACtB,MAAM,CAAA,GAAA,yCAAO,EAAE,KAAK;YACpB,OAAO;YACP,OAAO;QACT;IACF;IAEA;;GAEC,GACD,SAAS;QACP,MAAM,MAAM;QACZ,MAAM,IAAI,MAAM;QAEhB,IAAI,CAAC,GACH;QAEF,MAAM,QAAQ,2BAAK,CAAC,CAAC,EAAE;QAEvB,IAAI,CAAC,QACH,OAAO,MAAM;QAGf,MAAM,QAAQ,WAAyB,MAAM,CAAC;QAE9C,IAAI,CAAC,SACH,OAAO,MAAM;QAGf,OAAO,IAAiB;YACtB,MAAM,CAAA,GAAA,yCAAO,EAAE,KAAK;YACpB,OAAO;YACP,OAAO;QACT;IACF;IAEA;;GAEC,GACD,SAAS;QACP,MAAM,MAAM;QACZ,MAAM,IAAI,MAAM;QAChB,IAAI,CAAC,GACH;QAGF,OAAO,IAAuB;YAC5B,MAAM,CAAA,GAAA,yCAAO,EAAE,WAAW;YAC1B,MAAM,2BAAK,CAAC,CAAC,EAAE;YACf,OAAO,2BAAK,CAAC,CAAC,EAAE;QAClB;IACF;IAEA;;GAEC,GACD,SAAS;QACP,MAAM,MAAM;QACZ,MAAM,IAAI,MAAM;QAChB,IAAI,CAAC,GACH;QAGF,MAAM,MAAM,cAAc,EAAE;QAE5B,IAAI,CAAC,QACH,OAAO,MAAM;QAEf,IAAI,QAAQ;QAEZ,eAAe;QACf,IAAI;QACJ,MAAQ,OAAO,cAAgB;YAC7B,MAAM,IAAI,CAAC;YACX,QAAQ,MAAM,MAAM,CAAC;QACvB;QAEA,IAAI,CAAC,SACH,OAAO,MAAM;QAGf,OAAO,IAAgB;YACrB,MAAM,CAAA,GAAA,yCAAO,EAAE,IAAI;YACnB,WAAW;YACX,cAAc;QAChB;IACF;IAEA;;GAEC,GACD,SAAS;QACP,MAAM,MAAM;QACZ,MAAM,IAAI,MAAM;QAChB,IAAI,CAAC,GACH;QAGF,MAAM,SAAS,2BAAK,CAAC,CAAC,EAAE;QACxB,MAAM,MAAM,2BAAK,CAAC,CAAC,EAAE;QAErB,IAAI,CAAC,QACH,OAAO,MAAM;QAGf,MAAM,QAAQ,WAAyB,MAAM,CAAC;QAE9C,IAAI,CAAC,SACH,OAAO,MAAM;QAGf,OAAO,IAAoB;YACzB,MAAM,CAAA,GAAA,yCAAO,EAAE,QAAQ;YACvB,UAAU;YACV,QAAQ;YACR,OAAO;QACT;IACF;IAEA;;GAEC,GACD,SAAS;QACP,MAAM,MAAM;QACZ,MAAM,IAAI,MAAM;QAChB,IAAI,CAAC,GACH;QAGF,IAAI,CAAC,QACH,OAAO,MAAM;QAEf,IAAI,QAAQ;QAEZ,eAAe;QACf,IAAI;QACJ,MAAQ,OAAO,cAAgB;YAC7B,MAAM,IAAI,CAAC;YACX,QAAQ,MAAM,MAAM,CAAC;QACvB;QAEA,IAAI,CAAC,SACH,OAAO,MAAM;QAGf,OAAO,IAAoB;YACzB,MAAM,CAAA,GAAA,yCAAO,EAAE,QAAQ;YACvB,cAAc;QAChB;IACF;IAEA;;GAEC,GACD,MAAM,WAAW,eAA6B;IAE9C;;GAEC,GACD,MAAM,YAAY,eAA8B;IAEhD;;GAEC,GACD,MAAM,cAAc,eAAgC;IAEpD;;GAEC,GACD,SAAS,eACP,IAAY;QAEZ,MAAM,KAAK,IAAI,OACb,OACE,OACA;QAGJ,gDAAgD;QAEhD,OAAO;YACL,MAAM,MAAM;YACZ,MAAM,IAAI,MAAM;YAChB,IAAI,CAAC,GACH;YAEF,MAAM,MAA8B;gBAAC,MAAM;YAAI;YAC/C,GAAG,CAAC,KAAK,GAAG,CAAC,CAAC,EAAE,CAAC,IAAI;YACrB,OAAO,IAAQ;QACjB;IACF;IAEA;;GAEC,GACD,SAAS;QACP,IAAI,GAAG,CAAC,EAAE,KAAK,KACb;QAGF,OACE,iBACA,aACA,mBACA,gBACA,cACA,eACA,iBACA,gBACA,YACA,YACA,gBACA,iBACA;IAEJ;IAEA;;GAEC,GACD,SAAS;QACP,MAAM,MAAM;QACZ,MAAM,MAAM;QAEZ,IAAI,CAAC,KACH,OAAO,MAAM;QAEf;QAEA,OAAO,IAAgB;YACrB,MAAM,CAAA,GAAA,yCAAO,EAAE,IAAI;YACnB,WAAW;YACX,cAAc,kBAAkB,EAAE;QACpC;IACF;IAEA,OAAO,gCAAU;AACnB;AAEA;;CAEC,GACD,SAAS,2BAAK,GAAW;IACvB,OAAO,MAAM,IAAI,IAAI,KAAK;AAC5B;AAEA;;CAEC,GACD,SAAS,gCAAsC,GAAO,EAAE,MAAgB;IACtE,MAAM,SAAS,OAAO,OAAO,IAAI,IAAI,KAAK;IAC1C,MAAM,cAAc,SAAS,MAAM;IAEnC,IAAK,MAAM,KAAK,IAAK;QACnB,MAAM,QAAQ,GAAG,CAAC,EAAE;QACpB,IAAI,MAAM,OAAO,CAAC,QAChB,MAAM,OAAO,CAAC,CAAA;YACZ,gCAAU,GAAG;QACf;aACK,IAAI,SAAS,OAAO,UAAU,UACnC,gCAAU,OAAO;IAErB;IAEA,IAAI,QACF,OAAO,cAAc,CAAC,KAAK,UAAU;QACnC,cAAc;QACd,UAAU;QACV,YAAY;QACZ,OAAO,UAAU;IACnB;IAGF,OAAO;AACT;IAEA,2CAAe;;;;AK/wBf,MAAM;IAKJ,YAAY,OAA+C,CAAE;aAJ7D,QAAQ;aACR,cAAc;aACd,WAAW;QAGT,IAAI,OAAO,SAAS,WAAW,UAC7B,IAAI,CAAC,WAAW,GAAG,SAAS;QAE9B,IAAI,SAAS,UACX,IAAI,CAAC,QAAQ,GAAG;IAEpB;IAEA,uGAAuG;IACvG,6DAA6D;IAC7D,KAAK,GAAW,EAAE,SAA4C,EAAE;QAC9D,OAAO;IACT;IAEA;;GAEC,GACD,OAAO,KAAc,EAAE;QACrB,IAAI,CAAC,KAAK,GAAG,IAAI,CAAC,KAAK,IAAI;QAE3B,IAAI,OAAO;YACT,IAAI,CAAC,KAAK,IAAI;YACd,OAAO;QACT;QAEA,OAAO,MAAM,IAAI,CAAC,KAAK,EAAE,IAAI,CAAC,IAAI,CAAC,WAAW;IAChD;IAEA,MAAM,IAAoB,EAAU;QAClC,OAAQ,KAAK,IAAI;YACf,KAAK,CAAA,GAAA,yCAAO,EAAE,UAAU;gBACtB,OAAO,IAAI,CAAC,UAAU,CAAC;YACzB,KAAK,CAAA,GAAA,yCAAO,EAAE,IAAI;gBAChB,OAAO,IAAI,CAAC,IAAI,CAAC;YACnB,KAAK,CAAA,GAAA,yCAAO,EAAE,WAAW;gBACvB,OAAO,IAAI,CAAC,WAAW,CAAC;YAC1B,KAAK,CAAA,GAAA,yCAAO,EAAE,OAAO;gBACnB,OAAO,IAAI,CAAC,OAAO,CAAC;YACtB,KAAK,CAAA,GAAA,yCAAO,EAAE,SAAS;gBACrB,OAAO,IAAI,CAAC,SAAS,CAAC;YACxB,KAAK,CAAA,GAAA,yCAAO,EAAE,OAAO;gBACnB,OAAO,IAAI,CAAC,OAAO,CAAC;YACtB,KAAK,CAAA,GAAA,yCAAO,EAAE,QAAQ;gBACpB,OAAO,IAAI,CAAC,QAAQ,CAAC;YACvB,KAAK,CAAA,GAAA,yCAAO,EAAE,WAAW;gBACvB,OAAO,IAAI,CAAC,WAAW,CAAC;YAC1B,KAAK,CAAA,GAAA,yCAAO,EAAE,QAAQ;gBACpB,OAAO,IAAI,CAAC,QAAQ,CAAC;YACvB,KAAK,CAAA,GAAA,yCAAO,EAAE,IAAI;gBAChB,OAAO,IAAI,CAAC,IAAI,CAAC;YACnB,KAAK,CAAA,GAAA,yCAAO,EAAE,MAAM;gBAClB,OAAO,IAAI,CAAC,MAAM,CAAC;YACrB,KAAK,CAAA,GAAA,yCAAO,EAAE,SAAS;gBACrB,OAAO,IAAI,CAAC,SAAS,CAAC;YACxB,KAAK,CAAA,GAAA,yCAAO,EAAE,QAAQ;gBACpB,OAAO,IAAI,CAAC,QAAQ,CAAC;YACvB,KAAK,CAAA,GAAA,yCAAO,EAAE,KAAK;gBACjB,OAAO,IAAI,CAAC,KAAK,CAAC;YACpB,KAAK,CAAA,GAAA,yCAAO,EAAE,KAAK;gBACjB,OAAO,IAAI,CAAC,KAAK,CAAC;YACpB,KAAK,CAAA,GAAA,yCAAO,EAAE,SAAS;gBACrB,OAAO,IAAI,CAAC,SAAS,CAAC;YACxB,KAAK,CAAA,GAAA,yCAAO,EAAE,IAAI;gBAChB,OAAO,IAAI,CAAC,IAAI,CAAC;YACnB,KAAK,CAAA,GAAA,yCAAO,EAAE,QAAQ;gBACpB,OAAO,IAAI,CAAC,QAAQ,CAAC;QACzB;IACF;IAEA,SAAS,KAA4B,EAAE,KAAc,EAAE;QACrD,IAAI,MAAM;QACV,QAAQ,SAAS;QAEjB,IAAK,IAAI,IAAI,GAAG,SAAS,MAAM,MAAM,EAAE,IAAI,QAAQ,IAAK;YACtD,OAAO,IAAI,CAAC,KAAK,CAAC,KAAK,CAAC,EAAE;YAC1B,IAAI,SAAS,IAAI,SAAS,GACxB,OAAO,IAAI,CAAC,IAAI,CAAC;QAErB;QAEA,OAAO;IACT;IAEA,QAAQ,IAAsB,EAAE;QAC9B,IAAI,IAAI,CAAC,QAAQ,EACf,OAAO,KAAK,UAAU,CAAC,KAAK,CAAC,GAAG,CAAC,IAAI,CAAC,KAAK,EAAE,IAAI,EAAE,IAAI,CAAC;QAG1D,OAAO,IAAI,CAAC,UAAU,CAAC;IACzB;IAEA;;GAEC,GACD,WAAW,IAAsB,EAAE;QACjC,OAAO,IAAI,CAAC,QAAQ,CAAC,KAAK,UAAU,CAAC,KAAK,EAAE;IAC9C;IAEA;;GAEC,GACD,QAAQ,IAAmB,EAAE;QAC3B,IAAI,IAAI,CAAC,QAAQ,EACf,OAAO,IAAI,CAAC,IAAI,CAAC,IAAI,KAAK,QAAQ;QAEpC,OAAO,IAAI,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,KAAK,OAAO,KAAK,OAAO,GAAG,MAAM,KAAK,QAAQ;IAC5E;IAEA;;GAEC,GACD,UAAU,IAAqB,EAAE;QAC/B,IAAI,IAAI,CAAC,QAAQ,EACf,OACE,IAAI,CAAC,IAAI,CAAC,gBAAgB,KAAK,SAAS,EAAE,KAAK,QAAQ,IACvD,IAAI,CAAC,IAAI,CAAC,OACV,IAAI,CAAC,QAAQ,CAAC,KAAK,KAAK,IACxB,IAAI,CAAC,IAAI,CAAC;QAGd,OACE,IAAI,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,KAAK,gBAAgB,KAAK,SAAS,EAAE,KAAK,QAAQ,IACvE,IAAI,CAAC,IAAI,CAAC,SAAS,IAAI,CAAC,MAAM,CAAC,MAC/B,IAAI,CAAC,QAAQ,CAAC,KAAK,KAAK,EAAE,UAC1B,IAAI,CAAC,IAAI,CAAC,OAAO,IAAI,CAAC,MAAM,CAAC,MAAM,IAAI,CAAC,MAAM,KAAK;IAEvD;IAEA;;GAEC,GACD,MAAM,IAAiB,EAAE;QACvB,IAAI,IAAI,CAAC,QAAQ,EACf,OACE,IAAI,CAAC,IAAI,CAAC,YAAY,KAAK,KAAK,EAAE,KAAK,QAAQ,IAC9C,CAAA,KAAK,KAAK,GACP,IAAI,CAAC,IAAI,CAAC,OACV,IAAI,CAAC,QAAQ,CAAmB,KAAK,KAAK,IAC1C,IAAI,CAAC,IAAI,CAAC,OACV,GAAE;QAGV,OACE,IAAI,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,KAAK,YAAY,KAAK,KAAK,EAAE,KAAK,QAAQ,IAC9D,CAAA,KAAK,KAAK,GACP,IAAI,CAAC,IAAI,CAAC,SAAS,IAAI,CAAC,MAAM,CAAC,MAC/B,IAAI,CAAC,QAAQ,CAAmB,KAAK,KAAK,EAAE,UAC5C,IAAI,CAAC,IAAI,CAAC,OAAO,IAAI,CAAC,MAAM,CAAC,MAAM,IAAI,CAAC,MAAM,KAAK,OACnD,GAAE;IAEV;IAEA;;GAEC,GACD,OAAO,IAAkB,EAAE;QACzB,OAAO,IAAI,CAAC,IAAI,CAAC,aAAa,KAAK,MAAM,GAAG,KAAK,KAAK,QAAQ;IAChE;IAEA;;GAEC,GACD,MAAM,IAAiB,EAAE;QACvB,IAAI,IAAI,CAAC,QAAQ,EACf,OACE,IAAI,CAAC,IAAI,CAAC,YAAY,KAAK,KAAK,EAAE,KAAK,QAAQ,IAC/C,IAAI,CAAC,IAAI,CAAC,OACV,IAAI,CAAC,QAAQ,CAAC,KAAK,KAAK,IACxB,IAAI,CAAC,IAAI,CAAC;QAGd,OACE,IAAI,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,KAAK,YAAY,KAAK,KAAK,EAAE,KAAK,QAAQ,IAC/D,IAAI,CAAC,IAAI,CAAC,SAAS,IAAI,CAAC,MAAM,CAAC,MAC/B,IAAI,CAAC,QAAQ,CAAC,KAAK,KAAK,EAAE,UAC1B,IAAI,CAAC,IAAI,CAAC,OAAO,IAAI,CAAC,MAAM,CAAC,MAAM,IAAI,CAAC,MAAM,KAAK;IAEvD;IAEA;;GAEC,GACD,SAAS,IAAoB,EAAE;QAC7B,MAAM,MAAM,MAAO,CAAA,KAAK,MAAM,IAAI,EAAC,IAAK,cAAc,KAAK,QAAQ;QACnE,IAAI,IAAI,CAAC,QAAQ,EACf,OACE,IAAI,CAAC,IAAI,CAAC,KAAK,KAAK,QAAQ,IAC5B,IAAI,CAAC,IAAI,CAAC,OACV,IAAI,CAAC,QAAQ,CAAC,KAAK,KAAK,IACxB,IAAI,CAAC,IAAI,CAAC;QAGd,OACE,IAAI,CAAC,IAAI,CAAC,KAAK,KAAK,QAAQ,IAC5B,IAAI,CAAC,IAAI,CAAC,UAAe,IAAI,CAAC,MAAM,CAAC,MACrC,IAAI,CAAC,QAAQ,CAAC,KAAK,KAAK,EAAE,UAC1B,IAAI,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,CAAC,MAAM;IAEhC;IAEA;;GAEC,GACD,QAAQ,IAAmB,EAAE;QAC3B,OAAO,IAAI,CAAC,IAAI,CAAC,cAAc,KAAK,OAAO,GAAG,KAAK,KAAK,QAAQ;IAClE;IAEA;;GAEC,GACD,UAAU,IAAqB,EAAE;QAC/B,OAAO,IAAI,CAAC,IAAI,CAAC,gBAAgB,KAAK,SAAS,GAAG,KAAK,KAAK,QAAQ;IACtE;IAEA;;GAEC,GACD,SAAS,IAAoB,EAAE;QAC7B,IAAI,IAAI,CAAC,QAAQ,EACf,OACE,IAAI,CAAC,IAAI,CAAC,eAAe,KAAK,QAAQ,EAAE,KAAK,QAAQ,IACrD,IAAI,CAAC,IAAI,CAAC,OACV,IAAI,CAAC,QAAQ,CAAC,KAAK,KAAK,IACxB,IAAI,CAAC,IAAI,CAAC;QAGd,OACE,IAAI,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,KAAK,eAAe,KAAK,QAAQ,EAAE,KAAK,QAAQ,IACrE,IAAI,CAAC,IAAI,CAAC,SAAS,IAAI,CAAC,MAAM,CAAC,MAC/B,IAAI,CAAC,QAAQ,CAAC,KAAK,KAAK,EAAE,UAC1B,IAAI,CAAC,IAAI,CAAC,OAAO,IAAI,CAAC,MAAM,CAAC,MAAM,IAAI,CAAC,MAAM,KAAK;IAEvD;IAEA;;GAEC,GACD,UAAU,IAAqB,EAAE;QAC/B,IAAI,IAAI,CAAC,QAAQ,EACf,OACE,IAAI,CAAC,IAAI,CACP,MAAO,CAAA,KAAK,MAAM,IAAI,EAAC,IAAK,eAAe,KAAK,IAAI,EACpD,KAAK,QAAQ,IAEf,IAAI,CAAC,IAAI,CAAC,OACV,IAAI,CAAC,QAAQ,CAAC,KAAK,SAAS,IAC5B,IAAI,CAAC,IAAI,CAAC;QAGd,OACE,IAAI,CAAC,IAAI,CACP,MAAO,CAAA,KAAK,MAAM,IAAI,EAAC,IAAK,eAAe,KAAK,IAAI,EACpD,KAAK,QAAQ,IAEf,IAAI,CAAC,IAAI,CAAC,SAAS,IAAI,CAAC,MAAM,CAAC,MAC/B,IAAI,CAAC,QAAQ,CAAC,KAAK,SAAS,EAAE,QAC9B,IAAI,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,CAAC,MAAM;IAEhC;IAEA;;GAEC,GACD,SAAS,IAAoB,EAAE;QAC7B,MAAM,QAAQ,KAAK,YAAY;QAC/B,IAAI,IAAI,CAAC,QAAQ,EACf,OACE,IAAI,CAAC,IAAI,CAAC,KAAK,MAAM,CAAC,IAAI,CAAC,MAAM,KAAK,QAAQ,IAC9C,IAAI,CAAC,IAAI,CAAC,OACV,IAAI,CAAC,QAAQ,CAAC,SACd,IAAI,CAAC,IAAI,CAAC;QAId,OACE,IAAI,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,MACrB,IAAI,CAAC,IAAI,CAAC,KAAK,MAAM,CAAC,IAAI,CAAC,OAAO,KAAK,QAAQ,IAC/C,IAAI,CAAC,IAAI,CAAC,SAAS,IAAI,CAAC,MAAM,CAAC,MAC/B,IAAI,CAAC,QAAQ,CAAC,OAAO,QACrB,IAAI,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,CAAC,MAAM,OAAO,IAAI,CAAC,MAAM,KAAK;IAEvD;IAEA;;GAEC,GACD,KAAK,IAAgB,EAAE;QACrB,IAAI,IAAI,CAAC,QAAQ,EAAE;YACjB,MAAM,MAAM,KAAK,SAAS,CAAC,MAAM,GAAG,KAAK,SAAS,CAAC,IAAI,CAAC,QAAQ;YAEhE,OACE,IAAI,CAAC,IAAI,CAAC,WAAW,KAAK,KAAK,QAAQ,IACvC,IAAI,CAAC,IAAI,CAAC,OACV,IAAI,CAAC,QAAQ,CAAC,KAAK,YAAY,IAC/B,IAAI,CAAC,IAAI,CAAC;QAEd;QACA,MAAM,MAAM,KAAK,SAAS,CAAC,MAAM,GAAG,KAAK,SAAS,CAAC,IAAI,CAAC,QAAQ,MAAM;QAEtE,OACE,IAAI,CAAC,IAAI,CAAC,WAAW,KAAK,KAAK,QAAQ,IACvC,IAAI,CAAC,IAAI,CAAC,SACV,IAAI,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,CAAC,MACtB,IAAI,CAAC,QAAQ,CAAC,KAAK,YAAY,EAAE,QACjC,IAAI,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,CAAC,OACtB,IAAI,CAAC,IAAI,CAAC;IAEd;IAEA;;GAEC,GACD,SAAS,IAAoB,EAAE;QAC7B,IAAI,IAAI,CAAC,QAAQ,EACf,OACE,IAAI,CAAC,IAAI,CAAC,cAAc,KAAK,QAAQ,IACrC,IAAI,CAAC,IAAI,CAAC,OACV,IAAI,CAAC,QAAQ,CAAC,KAAK,YAAY,IAC/B,IAAI,CAAC,IAAI,CAAC;QAGd,OACE,IAAI,CAAC,IAAI,CAAC,eAAe,KAAK,QAAQ,IACtC,IAAI,CAAC,IAAI,CAAC,SACV,IAAI,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,CAAC,MACtB,IAAI,CAAC,QAAQ,CAAC,KAAK,YAAY,EAAE,QACjC,IAAI,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,CAAC,OACtB,IAAI,CAAC,IAAI,CAAC;IAEd;IAEA;;GAEC,GACD,KAAK,IAAgB,EAAE;QACrB,IAAI,IAAI,CAAC,QAAQ,EACf,OACE,IAAI,CAAC,IAAI,CAAC,SAAS,KAAK,QAAQ,IAChC,IAAI,CAAC,IAAI,CAAC,OACV,IAAI,CAAC,QAAQ,CAAC,KAAK,KAAK,IACxB,IAAI,CAAC,IAAI,CAAC;QAGd,OACE,IAAI,CAAC,IAAI,CAAC,SAAS,KAAK,QAAQ,IAChC,IAAI,CAAC,IAAI,CAAC,SAAS,IAAI,CAAC,MAAM,CAAC,MAC/B,IAAI,CAAC,QAAQ,CAAC,KAAK,KAAK,EAAE,UAC1B,IAAI,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,CAAC,MAAM;IAEhC;IAEA;;GAEC,GACD,YAAY,IAAuB,EAAE;QACnC,OAAO,IAAI,CAAC,IAAI,CACd,mBAAmB,KAAK,IAAI,GAAG,MAAM,KAAK,KAAK,GAAG,KAClD,KAAK,QAAQ;IAEjB;IAEA;;GAEC,GACD,KAAK,IAAgB,EAAE;QACrB,MAAM,QAAQ,KAAK,YAAY;QAC/B,IAAI,CAAC,MAAM,MAAM,EACf,OAAO;QAGT,IAAI,IAAI,CAAC,QAAQ,EACf,OACE,IAAI,CAAC,IAAI,CAAC,KAAK,SAAS,CAAC,IAAI,CAAC,MAAM,KAAK,QAAQ,IACjD,IAAI,CAAC,IAAI,CAAC,OACV,IAAI,CAAC,QAAQ,CAAC,SACd,IAAI,CAAC,IAAI,CAAC;QAGd,MAAM,SAAS,IAAI,CAAC,MAAM;QAE1B,OACE,IAAI,CAAC,IAAI,CACP,KAAK,SAAS,CACX,GAAG,CAAC,CAAA;YACH,OAAO,SAAS;QAClB,GACC,IAAI,CAAC,QACR,KAAK,QAAQ,IAEf,IAAI,CAAC,IAAI,CAAC,UACV,IAAI,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,CAAC,MACtB,IAAI,CAAC,QAAQ,CAAC,OAAO,QACrB,IAAI,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,CAAC,OACtB,IAAI,CAAC,IAAI,CAAC,OAAO,IAAI,CAAC,MAAM,KAAK;IAErC;IAEA;;GAEC,GACD,YAAY,IAAuB,EAAE;QACnC,IAAI,IAAI,CAAC,QAAQ,EACf,OACE,IAAI,CAAC,IAAI,CAAC,KAAK,QAAQ,GAAG,MAAM,KAAK,KAAK,EAAE,KAAK,QAAQ,IACzD,IAAI,CAAC,IAAI,CAAC;QAGd,OACE,IAAI,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,MACrB,IAAI,CAAC,IAAI,CAAC,KAAK,QAAQ,GAAG,OAAO,KAAK,KAAK,EAAE,KAAK,QAAQ,IAC1D,IAAI,CAAC,IAAI,CAAC;IAEd;AACF;IAEA,2CAAe;;;ID1bf,2CAAe,CACb,MACA;IAEA,MAAM,WAAW,IAAI,CAAA,GAAA,wCAAO,EAAE,WAAW,CAAC;IAC1C,OAAO,SAAS,OAAO,CAAC;AAC1B;;;;;;ALPO,MAAM,4CAAQ,CAAA,GAAA,wCAAM;AACpB,MAAM,4CAAY,CAAA,GAAA,wCAAU;IAInC,2CAAe;WAAC;eAAO;AAAS","sources":["src/index.ts","src/parse/index.ts","src/CssParseError.ts","src/CssPosition.ts","src/type.ts","src/stringify/index.ts","src/stringify/compiler.ts"],"sourcesContent":["import {default as parseFn} from './parse';\nimport {default as stringifyFn} from './stringify';\nexport const parse = parseFn;\nexport const stringify = stringifyFn;\nexport * from './type';\nexport * from './CssParseError';\nexport * from './CssPosition';\nexport default {parse, stringify};\n","import CssParseError from '../CssParseError';\nimport Position from '../CssPosition';\nimport {\n CssAtRuleAST,\n CssCharsetAST,\n CssCommentAST,\n CssCommonPositionAST,\n CssContainerAST,\n CssCustomMediaAST,\n CssDeclarationAST,\n CssDocumentAST,\n CssFontFaceAST,\n CssHostAST,\n CssImportAST,\n CssKeyframeAST,\n CssKeyframesAST,\n CssLayerAST,\n CssMediaAST,\n CssNamespaceAST,\n CssPageAST,\n CssRuleAST,\n CssStylesheetAST,\n CssSupportsAST,\n CssTypes,\n} from '../type';\n\n// http://www.w3.org/TR/CSS21/grammar.html\n// https://github.com/visionmedia/css-parse/pull/49#issuecomment-30088027\n// New rule => https://www.w3.org/TR/CSS22/syndata.html#comments\n// [^] is equivalent to [.\\n\\r]\nconst commentre = /\\/\\*[^]*?(?:\\*\\/|$)/g;\n\nexport const parse = (\n css: string,\n options?: {source?: string; silent?: boolean}\n): CssStylesheetAST => {\n options = options || {};\n\n /**\n * Positional.\n */\n let lineno = 1;\n let column = 1;\n\n /**\n * Update lineno and column based on `str`.\n */\n function updatePosition(str: string) {\n const lines = str.match(/\\n/g);\n if (lines) lineno += lines.length;\n const i = str.lastIndexOf('\\n');\n column = ~i ? str.length - i : column + str.length;\n }\n\n /**\n * Mark position and patch `node.position`.\n */\n function position() {\n const start = {line: lineno, column: column};\n return function (\n node: Omit\n ): T1 {\n (node as T1).position = new Position(\n start,\n {line: lineno, column: column},\n options?.source || ''\n );\n whitespace();\n return node as T1;\n };\n }\n\n /**\n * Error `msg`.\n */\n const errorsList: Array = [];\n\n function error(msg: string) {\n const err = new CssParseError(\n options?.source || '',\n msg,\n lineno,\n column,\n css\n );\n\n if (options?.silent) {\n errorsList.push(err);\n } else {\n throw err;\n }\n }\n\n /**\n * Parse stylesheet.\n */\n function stylesheet(): CssStylesheetAST {\n const rulesList = rules();\n\n const result: CssStylesheetAST = {\n type: CssTypes.stylesheet,\n stylesheet: {\n source: options?.source,\n rules: rulesList,\n parsingErrors: errorsList,\n },\n };\n\n return result;\n }\n\n /**\n * Opening brace.\n */\n function open() {\n return match(/^{\\s*/);\n }\n\n /**\n * Closing brace.\n */\n function close() {\n return match(/^}/);\n }\n\n /**\n * Parse ruleset.\n */\n function rules() {\n let node: CssRuleAST | CssAtRuleAST | void;\n const rules: Array = [];\n whitespace();\n comments(rules);\n while (css.length && css.charAt(0) !== '}' && (node = atrule() || rule())) {\n if (node) {\n rules.push(node);\n comments(rules);\n }\n }\n return rules;\n }\n\n /**\n * Match `re` and return captures.\n */\n function match(re: RegExp) {\n const m = re.exec(css);\n if (!m) {\n return;\n }\n const str = m[0];\n updatePosition(str);\n css = css.slice(str.length);\n return m;\n }\n\n /**\n * Parse whitespace.\n */\n function whitespace() {\n match(/^\\s*/);\n }\n\n /**\n * Parse comments;\n */\n function comments(\n rules?: Array\n ) {\n let c;\n rules = rules || [];\n while ((c = comment())) {\n if (c) {\n rules.push(c);\n }\n }\n return rules;\n }\n\n /**\n * Parse comment.\n */\n function comment(): CssCommentAST | void {\n const pos = position();\n if ('/' !== css.charAt(0) || '*' !== css.charAt(1)) {\n return;\n }\n\n const m = match(/^\\/\\*[^]*?\\*\\//);\n if (!m) {\n return error('End of comment missing');\n }\n\n return pos({\n type: CssTypes.comment,\n comment: m[0].slice(2, -2),\n });\n }\n\n function findClosingParenthese(\n str: string,\n start: number,\n depth: number\n ): number {\n let ptr = start + 1;\n let found = false;\n let closeParentheses = str.indexOf(')', ptr);\n while (!found && closeParentheses !== -1) {\n const nextParentheses = str.indexOf('(', ptr);\n if (nextParentheses !== -1 && nextParentheses < closeParentheses) {\n const nextSearch = findClosingParenthese(\n str,\n nextParentheses + 1,\n depth + 1\n );\n ptr = nextSearch + 1;\n closeParentheses = str.indexOf(')', ptr);\n } else {\n found = true;\n }\n }\n if (found && closeParentheses !== -1) {\n return closeParentheses;\n } else {\n return -1;\n }\n }\n\n /**\n * Parse selector.\n */\n function selector() {\n const m = match(/^([^{]+)/);\n if (!m) {\n return;\n }\n\n // remove comment in selector;\n let res = trim(m[0]).replace(commentre, '');\n\n // Optimisation: If there is no ',' no need to split or post-process (this is less costly)\n if (res.indexOf(',') === -1) {\n return [res];\n }\n\n // Replace all the , in the parentheses by \\u200C\n let ptr = 0;\n let startParentheses = res.indexOf('(', ptr);\n while (startParentheses !== -1) {\n const closeParentheses = findClosingParenthese(res, startParentheses, 0);\n if (closeParentheses === -1) {\n break;\n }\n ptr = closeParentheses + 1;\n res =\n res.substring(0, startParentheses) +\n res\n .substring(startParentheses, closeParentheses)\n .replace(/,/g, '\\u200C') +\n res.substring(closeParentheses);\n startParentheses = res.indexOf('(', ptr);\n }\n\n // Replace all the , in ' and \" by \\u200C\n res = res\n /**\n * replace ',' by \\u200C for data selector (div[data-lang=\"fr,de,us\"])\n *\n * Examples:\n * div[data-lang=\"fr,\\\"de,us\"]\n * div[data-lang='fr,\\'de,us']\n *\n * Regex logic:\n * (\"|')(?:\\\\\\1|.)*?\\1 => Handle the \" and '\n *\n * Optimization 1:\n * No greedy capture (see docs about the difference between .* and .*?)\n *\n * Optimization 2:\n * (\"|')(?:\\\\\\1|.)*?\\1 this use reference to capture group, it work faster.\n */\n .replace(/(\"|')(?:\\\\\\1|.)*?\\1/g, m => m.replace(/,/g, '\\u200C'));\n\n // Split all the left , and replace all the \\u200C by ,\n return (\n res\n // Split the selector by ','\n .split(',')\n // Replace back \\u200C by ','\n .map(s => {\n return trim(s.replace(/\\u200C/g, ','));\n })\n );\n }\n\n /**\n * Parse declaration.\n */\n function declaration(): CssDeclarationAST | void {\n const pos = position();\n\n // prop\n const propMatch = match(/^(\\*?[-#/*\\\\\\w]+(\\[[0-9a-z_-]+\\])?)\\s*/);\n if (!propMatch) {\n return;\n }\n const propValue = trim(propMatch[0]);\n\n // :\n if (!match(/^:\\s*/)) {\n return error(\"property missing ':'\");\n }\n\n // val\n const val = match(/^((?:'(?:\\\\'|.)*?'|\"(?:\\\\\"|.)*?\"|\\([^)]*?\\)|[^};])+)/);\n\n const ret = pos({\n type: CssTypes.declaration,\n property: propValue.replace(commentre, ''),\n value: val ? trim(val[0]).replace(commentre, '') : '',\n });\n\n // ;\n match(/^[;\\s]*/);\n\n return ret;\n }\n\n /**\n * Parse declarations.\n */\n function declarations() {\n const decls: Array = [];\n\n if (!open()) {\n return error(\"missing '{'\");\n }\n comments(decls);\n\n // declarations\n let decl;\n while ((decl = declaration())) {\n if (decl) {\n decls.push(decl);\n comments(decls);\n }\n }\n\n if (!close()) {\n return error(\"missing '}'\");\n }\n return decls;\n }\n\n /**\n * Parse keyframe.\n */\n function keyframe() {\n let m;\n const vals = [];\n const pos = position();\n\n while ((m = match(/^((\\d+\\.\\d+|\\.\\d+|\\d+)%?|[a-z]+)\\s*/))) {\n vals.push(m[1]);\n match(/^,\\s*/);\n }\n\n if (!vals.length) {\n return;\n }\n\n return pos({\n type: CssTypes.keyframe,\n values: vals,\n declarations: declarations() || [],\n });\n }\n\n /**\n * Parse keyframes.\n */\n function atkeyframes(): CssKeyframesAST | void {\n const pos = position();\n const m1 = match(/^@([-\\w]+)?keyframes\\s*/);\n\n if (!m1) {\n return;\n }\n const vendor = m1[1];\n\n // identifier\n const m2 = match(/^([-\\w]+)\\s*/);\n if (!m2) {\n return error('@keyframes missing name');\n }\n const name = m2[1];\n\n if (!open()) {\n return error(\"@keyframes missing '{'\");\n }\n\n let frame;\n let frames: Array = comments();\n while ((frame = keyframe())) {\n frames.push(frame);\n frames = frames.concat(comments());\n }\n\n if (!close()) {\n return error(\"@keyframes missing '}'\");\n }\n\n return pos({\n type: CssTypes.keyframes,\n name: name,\n vendor: vendor,\n keyframes: frames,\n });\n }\n\n /**\n * Parse supports.\n */\n function atsupports(): CssSupportsAST | void {\n const pos = position();\n const m = match(/^@supports *([^{]+)/);\n\n if (!m) {\n return;\n }\n const supports = trim(m[1]);\n\n if (!open()) {\n return error(\"@supports missing '{'\");\n }\n\n const style = comments().concat(rules());\n\n if (!close()) {\n return error(\"@supports missing '}'\");\n }\n\n return pos({\n type: CssTypes.supports,\n supports: supports,\n rules: style,\n });\n }\n\n /**\n * Parse host.\n */\n function athost() {\n const pos = position();\n const m = match(/^@host\\s*/);\n\n if (!m) {\n return;\n }\n\n if (!open()) {\n return error(\"@host missing '{'\");\n }\n\n const style = comments().concat(rules());\n\n if (!close()) {\n return error(\"@host missing '}'\");\n }\n\n return pos({\n type: CssTypes.host,\n rules: style,\n });\n }\n\n /**\n * Parse container.\n */\n function atcontainer(): CssContainerAST | void {\n const pos = position();\n const m = match(/^@container *([^{]+)/);\n\n if (!m) {\n return;\n }\n const container = trim(m[1]);\n\n if (!open()) {\n return error(\"@container missing '{'\");\n }\n\n const style = comments().concat(rules());\n\n if (!close()) {\n return error(\"@container missing '}'\");\n }\n\n return pos({\n type: CssTypes.container,\n container: container,\n rules: style,\n });\n }\n\n /**\n * Parse container.\n */\n function atlayer(): CssLayerAST | void {\n const pos = position();\n const m = match(/^@layer *([^{;@]+)/);\n\n if (!m) {\n return;\n }\n const layer = trim(m[1]);\n\n if (!open()) {\n match(/^[;\\s]*/);\n return pos({\n type: CssTypes.layer,\n layer: layer,\n });\n }\n\n const style = comments().concat(rules());\n\n if (!close()) {\n return error(\"@layer missing '}'\");\n }\n\n return pos({\n type: CssTypes.layer,\n layer: layer,\n rules: style,\n });\n }\n\n /**\n * Parse media.\n */\n function atmedia(): CssMediaAST | void {\n const pos = position();\n const m = match(/^@media *([^{]+)/);\n\n if (!m) {\n return;\n }\n const media = trim(m[1]);\n\n if (!open()) {\n return error(\"@media missing '{'\");\n }\n\n const style = comments().concat(rules());\n\n if (!close()) {\n return error(\"@media missing '}'\");\n }\n\n return pos({\n type: CssTypes.media,\n media: media,\n rules: style,\n });\n }\n\n /**\n * Parse custom-media.\n */\n function atcustommedia(): CssCustomMediaAST | void {\n const pos = position();\n const m = match(/^@custom-media\\s+(--\\S+)\\s*([^{;\\s][^{;]*);/);\n if (!m) {\n return;\n }\n\n return pos({\n type: CssTypes.customMedia,\n name: trim(m[1]),\n media: trim(m[2]),\n });\n }\n\n /**\n * Parse paged media.\n */\n function atpage(): CssPageAST | void {\n const pos = position();\n const m = match(/^@page */);\n if (!m) {\n return;\n }\n\n const sel = selector() || [];\n\n if (!open()) {\n return error(\"@page missing '{'\");\n }\n let decls = comments();\n\n // declarations\n let decl;\n while ((decl = declaration())) {\n decls.push(decl);\n decls = decls.concat(comments());\n }\n\n if (!close()) {\n return error(\"@page missing '}'\");\n }\n\n return pos({\n type: CssTypes.page,\n selectors: sel,\n declarations: decls,\n });\n }\n\n /**\n * Parse document.\n */\n function atdocument(): CssDocumentAST | void {\n const pos = position();\n const m = match(/^@([-\\w]+)?document *([^{]+)/);\n if (!m) {\n return;\n }\n\n const vendor = trim(m[1]);\n const doc = trim(m[2]);\n\n if (!open()) {\n return error(\"@document missing '{'\");\n }\n\n const style = comments().concat(rules());\n\n if (!close()) {\n return error(\"@document missing '}'\");\n }\n\n return pos({\n type: CssTypes.document,\n document: doc,\n vendor: vendor,\n rules: style,\n });\n }\n\n /**\n * Parse font-face.\n */\n function atfontface(): CssFontFaceAST | void {\n const pos = position();\n const m = match(/^@font-face\\s*/);\n if (!m) {\n return;\n }\n\n if (!open()) {\n return error(\"@font-face missing '{'\");\n }\n let decls = comments();\n\n // declarations\n let decl;\n while ((decl = declaration())) {\n decls.push(decl);\n decls = decls.concat(comments());\n }\n\n if (!close()) {\n return error(\"@font-face missing '}'\");\n }\n\n return pos({\n type: CssTypes.fontFace,\n declarations: decls,\n });\n }\n\n /**\n * Parse import\n */\n const atimport = _compileAtrule('import');\n\n /**\n * Parse charset\n */\n const atcharset = _compileAtrule('charset');\n\n /**\n * Parse namespace\n */\n const atnamespace = _compileAtrule('namespace');\n\n /**\n * Parse non-block at-rules\n */\n function _compileAtrule(\n name: string\n ): () => T1 | void {\n const re = new RegExp(\n '^@' +\n name +\n '\\\\s*((?::?[^;\\'\"]|\"(?:\\\\\\\\\"|[^\"])*?\"|\\'(?:\\\\\\\\\\'|[^\\'])*?\\')+)(?:;|$)'\n );\n\n // ^@import\\s*([^;\"']|(\"|')(?:\\\\\\2|.)*?\\2)+(;|$)\n\n return function (): T1 | void {\n const pos = position();\n const m = match(re);\n if (!m) {\n return;\n }\n const ret: Record = {type: name};\n ret[name] = m[1].trim();\n return pos(ret as unknown as T1) as T1;\n };\n }\n\n /**\n * Parse at rule.\n */\n function atrule(): CssAtRuleAST | void {\n if (css[0] !== '@') {\n return;\n }\n\n return (\n atkeyframes() ||\n atmedia() ||\n atcustommedia() ||\n atsupports() ||\n atimport() ||\n atcharset() ||\n atnamespace() ||\n atdocument() ||\n atpage() ||\n athost() ||\n atfontface() ||\n atcontainer() ||\n atlayer()\n );\n }\n\n /**\n * Parse rule.\n */\n function rule() {\n const pos = position();\n const sel = selector();\n\n if (!sel) {\n return error('selector missing');\n }\n comments();\n\n return pos({\n type: CssTypes.rule,\n selectors: sel,\n declarations: declarations() || [],\n });\n }\n\n return addParent(stylesheet());\n};\n\n/**\n * Trim `str`.\n */\nfunction trim(str: string) {\n return str ? str.trim() : '';\n}\n\n/**\n * Adds non-enumerable parent node reference to each node.\n */\nfunction addParent(obj: T1, parent?: unknown): T1 {\n const isNode = obj && typeof obj.type === 'string';\n const childParent = isNode ? obj : parent;\n\n for (const k in obj) {\n const value = obj[k];\n if (Array.isArray(value)) {\n value.forEach(v => {\n addParent(v, childParent);\n });\n } else if (value && typeof value === 'object') {\n addParent(value, childParent);\n }\n }\n\n if (isNode) {\n Object.defineProperty(obj, 'parent', {\n configurable: true,\n writable: true,\n enumerable: false,\n value: parent || null,\n });\n }\n\n return obj;\n}\n\nexport default parse;\n","export default class CssParseError extends Error {\n readonly reason: string;\n readonly filename?: string;\n readonly line: number;\n readonly column: number;\n readonly source: string;\n\n constructor(\n filename: string,\n msg: string,\n lineno: number,\n column: number,\n css: string\n ) {\n super(filename + ':' + lineno + ':' + column + ': ' + msg);\n this.reason = msg;\n this.filename = filename;\n this.line = lineno;\n this.column = column;\n this.source = css;\n }\n}\n","/**\n * Store position information for a node\n */\nexport default class Position {\n start: {line: number; column: number};\n end: {line: number; column: number};\n source?: string;\n\n constructor(\n start: {line: number; column: number},\n end: {line: number; column: number},\n source: string\n ) {\n this.start = start;\n this.end = end;\n this.source = source;\n }\n}\n","import CssParseError from './CssParseError';\nimport Position from './CssPosition';\n\nexport enum CssTypes {\n stylesheet = 'stylesheet',\n rule = 'rule',\n declaration = 'declaration',\n comment = 'comment',\n container = 'container',\n charset = 'charset',\n document = 'document',\n customMedia = 'custom-media',\n fontFace = 'font-face',\n host = 'host',\n import = 'import',\n keyframes = 'keyframes',\n keyframe = 'keyframe',\n layer = 'layer',\n media = 'media',\n namespace = 'namespace',\n page = 'page',\n supports = 'supports',\n}\n\nexport type CssCommonAST = {\n type: CssTypes;\n};\n\nexport type CssCommonPositionAST = CssCommonAST & {\n position?: Position;\n parent?: unknown;\n};\n\nexport type CssStylesheetAST = CssCommonAST & {\n type: CssTypes.stylesheet;\n stylesheet: {\n source?: string;\n rules: Array;\n parsingErrors?: Array;\n };\n};\n\nexport type CssRuleAST = CssCommonPositionAST & {\n type: CssTypes.rule;\n selectors: Array;\n declarations: Array;\n};\n\nexport type CssDeclarationAST = CssCommonPositionAST & {\n type: CssTypes.declaration;\n property: string;\n value: string;\n};\n\nexport type CssCommentAST = CssCommonPositionAST & {\n type: CssTypes.comment;\n comment: string;\n};\nexport type CssContainerAST = CssCommonPositionAST & {\n type: CssTypes.container;\n container: string;\n rules: Array;\n};\n\nexport type CssCharsetAST = CssCommonPositionAST & {\n type: CssTypes.charset;\n charset: string;\n};\nexport type CssCustomMediaAST = CssCommonPositionAST & {\n type: CssTypes.customMedia;\n name: string;\n media: string;\n};\nexport type CssDocumentAST = CssCommonPositionAST & {\n type: CssTypes.document;\n document: string;\n vendor?: string;\n rules: Array;\n};\nexport type CssFontFaceAST = CssCommonPositionAST & {\n type: CssTypes.fontFace;\n declarations: Array;\n};\nexport type CssHostAST = CssCommonPositionAST & {\n type: CssTypes.host;\n rules: Array;\n};\nexport type CssImportAST = CssCommonPositionAST & {\n type: CssTypes.import;\n import: string;\n};\nexport type CssKeyframesAST = CssCommonPositionAST & {\n type: CssTypes.keyframes;\n name: string;\n vendor?: string;\n keyframes: Array;\n};\nexport type CssKeyframeAST = CssCommonPositionAST & {\n type: CssTypes.keyframe;\n values: Array;\n declarations: Array;\n};\nexport type CssLayerAST = CssCommonPositionAST & {\n type: CssTypes.layer;\n layer: string;\n rules?: Array;\n};\nexport type CssMediaAST = CssCommonPositionAST & {\n type: CssTypes.media;\n media: string;\n rules: Array;\n};\nexport type CssNamespaceAST = CssCommonPositionAST & {\n type: CssTypes.namespace;\n namespace: string;\n};\nexport type CssPageAST = CssCommonPositionAST & {\n type: CssTypes.page;\n selectors: Array;\n declarations: Array;\n};\nexport type CssSupportsAST = CssCommonPositionAST & {\n type: CssTypes.supports;\n supports: string;\n rules: Array;\n};\n\nexport type CssAtRuleAST =\n | CssRuleAST\n | CssCommentAST\n | CssContainerAST\n | CssCharsetAST\n | CssCustomMediaAST\n | CssDocumentAST\n | CssFontFaceAST\n | CssHostAST\n | CssImportAST\n | CssKeyframesAST\n | CssLayerAST\n | CssMediaAST\n | CssNamespaceAST\n | CssPageAST\n | CssSupportsAST;\n\nexport type CssAllNodesAST =\n | CssAtRuleAST\n | CssStylesheetAST\n | CssDeclarationAST\n | CssKeyframeAST;\n","import {CssStylesheetAST} from '../type';\nimport Compiler from './compiler';\n\nexport default (\n node: CssStylesheetAST,\n options?: ConstructorParameters[0]\n) => {\n const compiler = new Compiler(options || {});\n return compiler.compile(node);\n};\n","import {\n CssAllNodesAST,\n CssCharsetAST,\n CssCommentAST,\n CssCommonPositionAST,\n CssContainerAST,\n CssCustomMediaAST,\n CssDeclarationAST,\n CssDocumentAST,\n CssFontFaceAST,\n CssHostAST,\n CssImportAST,\n CssKeyframeAST,\n CssKeyframesAST,\n CssLayerAST,\n CssMediaAST,\n CssNamespaceAST,\n CssPageAST,\n CssRuleAST,\n CssStylesheetAST,\n CssSupportsAST,\n CssTypes,\n} from '../type';\n\nclass Compiler {\n level = 0;\n indentation = ' ';\n compress = false;\n\n constructor(options?: {indent?: string; compress?: boolean}) {\n if (typeof options?.indent === 'string') {\n this.indentation = options?.indent;\n }\n if (options?.compress) {\n this.compress = true;\n }\n }\n\n // We disable no-unused-vars for _position. We keep position for potential reintroduction of source-map\n // eslint-disable-next-line @typescript-eslint/no-unused-vars\n emit(str: string, _position?: CssCommonPositionAST['position']) {\n return str;\n }\n\n /**\n * Increase, decrease or return current indentation.\n */\n indent(level?: number) {\n this.level = this.level || 1;\n\n if (level) {\n this.level += level;\n return '';\n }\n\n return Array(this.level).join(this.indentation);\n }\n\n visit(node: CssAllNodesAST): string {\n switch (node.type) {\n case CssTypes.stylesheet:\n return this.stylesheet(node);\n case CssTypes.rule:\n return this.rule(node);\n case CssTypes.declaration:\n return this.declaration(node);\n case CssTypes.comment:\n return this.comment(node);\n case CssTypes.container:\n return this.container(node);\n case CssTypes.charset:\n return this.charset(node);\n case CssTypes.document:\n return this.document(node);\n case CssTypes.customMedia:\n return this.customMedia(node);\n case CssTypes.fontFace:\n return this.fontFace(node);\n case CssTypes.host:\n return this.host(node);\n case CssTypes.import:\n return this.import(node);\n case CssTypes.keyframes:\n return this.keyframes(node);\n case CssTypes.keyframe:\n return this.keyframe(node);\n case CssTypes.layer:\n return this.layer(node);\n case CssTypes.media:\n return this.media(node);\n case CssTypes.namespace:\n return this.namespace(node);\n case CssTypes.page:\n return this.page(node);\n case CssTypes.supports:\n return this.supports(node);\n }\n }\n\n mapVisit(nodes: Array, delim?: string) {\n let buf = '';\n delim = delim || '';\n\n for (let i = 0, length = nodes.length; i < length; i++) {\n buf += this.visit(nodes[i]);\n if (delim && i < length - 1) {\n buf += this.emit(delim);\n }\n }\n\n return buf;\n }\n\n compile(node: CssStylesheetAST) {\n if (this.compress) {\n return node.stylesheet.rules.map(this.visit, this).join('');\n }\n\n return this.stylesheet(node);\n }\n\n /**\n * Visit stylesheet node.\n */\n stylesheet(node: CssStylesheetAST) {\n return this.mapVisit(node.stylesheet.rules, '\\n\\n');\n }\n\n /**\n * Visit comment node.\n */\n comment(node: CssCommentAST) {\n if (this.compress) {\n return this.emit('', node.position);\n }\n return this.emit(this.indent() + '/*' + node.comment + '*/', node.position);\n }\n\n /**\n * Visit container node.\n */\n container(node: CssContainerAST) {\n if (this.compress) {\n return (\n this.emit('@container ' + node.container, node.position) +\n this.emit('{') +\n this.mapVisit(node.rules) +\n this.emit('}')\n );\n }\n return (\n this.emit(this.indent() + '@container ' + node.container, node.position) +\n this.emit(' {\\n' + this.indent(1)) +\n this.mapVisit(node.rules, '\\n\\n') +\n this.emit('\\n' + this.indent(-1) + this.indent() + '}')\n );\n }\n\n /**\n * Visit container node.\n */\n layer(node: CssLayerAST) {\n if (this.compress) {\n return (\n this.emit('@layer ' + node.layer, node.position) +\n (node.rules\n ? this.emit('{') +\n this.mapVisit(node.rules) +\n this.emit('}')\n : ';')\n );\n }\n return (\n this.emit(this.indent() + '@layer ' + node.layer, node.position) +\n (node.rules\n ? this.emit(' {\\n' + this.indent(1)) +\n this.mapVisit(node.rules, '\\n\\n') +\n this.emit('\\n' + this.indent(-1) + this.indent() + '}')\n : ';')\n );\n }\n\n /**\n * Visit import node.\n */\n import(node: CssImportAST) {\n return this.emit('@import ' + node.import + ';', node.position);\n }\n\n /**\n * Visit media node.\n */\n media(node: CssMediaAST) {\n if (this.compress) {\n return (\n this.emit('@media ' + node.media, node.position) +\n this.emit('{') +\n this.mapVisit(node.rules) +\n this.emit('}')\n );\n }\n return (\n this.emit(this.indent() + '@media ' + node.media, node.position) +\n this.emit(' {\\n' + this.indent(1)) +\n this.mapVisit(node.rules, '\\n\\n') +\n this.emit('\\n' + this.indent(-1) + this.indent() + '}')\n );\n }\n\n /**\n * Visit document node.\n */\n document(node: CssDocumentAST) {\n const doc = '@' + (node.vendor || '') + 'document ' + node.document;\n if (this.compress) {\n return (\n this.emit(doc, node.position) +\n this.emit('{') +\n this.mapVisit(node.rules) +\n this.emit('}')\n );\n }\n return (\n this.emit(doc, node.position) +\n this.emit(' ' + ' {\\n' + this.indent(1)) +\n this.mapVisit(node.rules, '\\n\\n') +\n this.emit(this.indent(-1) + '\\n}')\n );\n }\n\n /**\n * Visit charset node.\n */\n charset(node: CssCharsetAST) {\n return this.emit('@charset ' + node.charset + ';', node.position);\n }\n\n /**\n * Visit namespace node.\n */\n namespace(node: CssNamespaceAST) {\n return this.emit('@namespace ' + node.namespace + ';', node.position);\n }\n\n /**\n * Visit supports node.\n */\n supports(node: CssSupportsAST) {\n if (this.compress) {\n return (\n this.emit('@supports ' + node.supports, node.position) +\n this.emit('{') +\n this.mapVisit(node.rules) +\n this.emit('}')\n );\n }\n return (\n this.emit(this.indent() + '@supports ' + node.supports, node.position) +\n this.emit(' {\\n' + this.indent(1)) +\n this.mapVisit(node.rules, '\\n\\n') +\n this.emit('\\n' + this.indent(-1) + this.indent() + '}')\n );\n }\n\n /**\n * Visit keyframes node.\n */\n keyframes(node: CssKeyframesAST) {\n if (this.compress) {\n return (\n this.emit(\n '@' + (node.vendor || '') + 'keyframes ' + node.name,\n node.position\n ) +\n this.emit('{') +\n this.mapVisit(node.keyframes) +\n this.emit('}')\n );\n }\n return (\n this.emit(\n '@' + (node.vendor || '') + 'keyframes ' + node.name,\n node.position\n ) +\n this.emit(' {\\n' + this.indent(1)) +\n this.mapVisit(node.keyframes, '\\n') +\n this.emit(this.indent(-1) + '}')\n );\n }\n\n /**\n * Visit keyframe node.\n */\n keyframe(node: CssKeyframeAST) {\n const decls = node.declarations;\n if (this.compress) {\n return (\n this.emit(node.values.join(','), node.position) +\n this.emit('{') +\n this.mapVisit(decls) +\n this.emit('}')\n );\n }\n\n return (\n this.emit(this.indent()) +\n this.emit(node.values.join(', '), node.position) +\n this.emit(' {\\n' + this.indent(1)) +\n this.mapVisit(decls, '\\n') +\n this.emit(this.indent(-1) + '\\n' + this.indent() + '}\\n')\n );\n }\n\n /**\n * Visit page node.\n */\n page(node: CssPageAST) {\n if (this.compress) {\n const sel = node.selectors.length ? node.selectors.join(', ') : '';\n\n return (\n this.emit('@page ' + sel, node.position) +\n this.emit('{') +\n this.mapVisit(node.declarations) +\n this.emit('}')\n );\n }\n const sel = node.selectors.length ? node.selectors.join(', ') + ' ' : '';\n\n return (\n this.emit('@page ' + sel, node.position) +\n this.emit('{\\n') +\n this.emit(this.indent(1)) +\n this.mapVisit(node.declarations, '\\n') +\n this.emit(this.indent(-1)) +\n this.emit('\\n}')\n );\n }\n\n /**\n * Visit font-face node.\n */\n fontFace(node: CssFontFaceAST) {\n if (this.compress) {\n return (\n this.emit('@font-face', node.position) +\n this.emit('{') +\n this.mapVisit(node.declarations) +\n this.emit('}')\n );\n }\n return (\n this.emit('@font-face ', node.position) +\n this.emit('{\\n') +\n this.emit(this.indent(1)) +\n this.mapVisit(node.declarations, '\\n') +\n this.emit(this.indent(-1)) +\n this.emit('\\n}')\n );\n }\n\n /**\n * Visit host node.\n */\n host(node: CssHostAST) {\n if (this.compress) {\n return (\n this.emit('@host', node.position) +\n this.emit('{') +\n this.mapVisit(node.rules) +\n this.emit('}')\n );\n }\n return (\n this.emit('@host', node.position) +\n this.emit(' {\\n' + this.indent(1)) +\n this.mapVisit(node.rules, '\\n\\n') +\n this.emit(this.indent(-1) + '\\n}')\n );\n }\n\n /**\n * Visit custom-media node.\n */\n customMedia(node: CssCustomMediaAST) {\n return this.emit(\n '@custom-media ' + node.name + ' ' + node.media + ';',\n node.position\n );\n }\n\n /**\n * Visit rule node.\n */\n rule(node: CssRuleAST) {\n const decls = node.declarations;\n if (!decls.length) {\n return '';\n }\n\n if (this.compress) {\n return (\n this.emit(node.selectors.join(','), node.position) +\n this.emit('{') +\n this.mapVisit(decls) +\n this.emit('}')\n );\n }\n const indent = this.indent();\n\n return (\n this.emit(\n node.selectors\n .map(s => {\n return indent + s;\n })\n .join(',\\n'),\n node.position\n ) +\n this.emit(' {\\n') +\n this.emit(this.indent(1)) +\n this.mapVisit(decls, '\\n') +\n this.emit(this.indent(-1)) +\n this.emit('\\n' + this.indent() + '}')\n );\n }\n\n /**\n * Visit declaration node.\n */\n declaration(node: CssDeclarationAST) {\n if (this.compress) {\n return (\n this.emit(node.property + ':' + node.value, node.position) +\n this.emit(';')\n );\n }\n return (\n this.emit(this.indent()) +\n this.emit(node.property + ': ' + node.value, node.position) +\n this.emit(';')\n );\n }\n}\n\nexport default Compiler;\n"],"names":[],"version":3,"file":"index.mjs.map"} \ No newline at end of file diff --git a/public/lib/css-parser.mjs b/public/lib/css-parser.mjs new file mode 100644 index 000000000..488f3d7c3 --- /dev/null +++ b/public/lib/css-parser.mjs @@ -0,0 +1,765 @@ + +function $parcel$defineInteropFlag(a) { + Object.defineProperty(a, '__esModule', {value: true, configurable: true}); +} + +function $parcel$export(e, n, v, s) { + Object.defineProperty(e, n, {get: v, set: s, enumerable: true, configurable: true}); +} +var $009ddb00d3ec72b8$exports = {}; + +$parcel$defineInteropFlag($009ddb00d3ec72b8$exports); + +$parcel$export($009ddb00d3ec72b8$exports, "default", () => $009ddb00d3ec72b8$export$2e2bcd8739ae039); +class $009ddb00d3ec72b8$export$2e2bcd8739ae039 extends Error { + constructor(filename, msg, lineno, column, css){ + super(filename + ":" + lineno + ":" + column + ": " + msg); + this.reason = msg; + this.filename = filename; + this.line = lineno; + this.column = column; + this.source = css; + } +} + + +var $0865a9fb4cc365fe$exports = {}; + +$parcel$defineInteropFlag($0865a9fb4cc365fe$exports); + +$parcel$export($0865a9fb4cc365fe$exports, "default", () => $0865a9fb4cc365fe$export$2e2bcd8739ae039); +/** + * Store position information for a node + */ class $0865a9fb4cc365fe$export$2e2bcd8739ae039 { + constructor(start, end, source){ + this.start = start; + this.end = end; + this.source = source; + } +} + + +var $b2e137848b48cf4f$exports = {}; + +$parcel$export($b2e137848b48cf4f$exports, "CssTypes", () => $b2e137848b48cf4f$export$9be5dd6e61d5d73a); +var $b2e137848b48cf4f$export$9be5dd6e61d5d73a; +(function(CssTypes) { + CssTypes["stylesheet"] = "stylesheet"; + CssTypes["rule"] = "rule"; + CssTypes["declaration"] = "declaration"; + CssTypes["comment"] = "comment"; + CssTypes["container"] = "container"; + CssTypes["charset"] = "charset"; + CssTypes["document"] = "document"; + CssTypes["customMedia"] = "custom-media"; + CssTypes["fontFace"] = "font-face"; + CssTypes["host"] = "host"; + CssTypes["import"] = "import"; + CssTypes["keyframes"] = "keyframes"; + CssTypes["keyframe"] = "keyframe"; + CssTypes["layer"] = "layer"; + CssTypes["media"] = "media"; + CssTypes["namespace"] = "namespace"; + CssTypes["page"] = "page"; + CssTypes["supports"] = "supports"; +})($b2e137848b48cf4f$export$9be5dd6e61d5d73a || ($b2e137848b48cf4f$export$9be5dd6e61d5d73a = {})); + + +// http://www.w3.org/TR/CSS21/grammar.html +// https://github.com/visionmedia/css-parse/pull/49#issuecomment-30088027 +// New rule => https://www.w3.org/TR/CSS22/syndata.html#comments +// [^] is equivalent to [.\n\r] +const $d708735ed1303b43$var$commentre = /\/\*[^]*?(?:\*\/|$)/g; +const $d708735ed1303b43$export$98e6a39c04603d36 = (css, options)=>{ + options = options || {}; + /** + * Positional. + */ let lineno = 1; + let column = 1; + /** + * Update lineno and column based on `str`. + */ function updatePosition(str) { + const lines = str.match(/\n/g); + if (lines) lineno += lines.length; + const i = str.lastIndexOf("\n"); + column = ~i ? str.length - i : column + str.length; + } + /** + * Mark position and patch `node.position`. + */ function position() { + const start = { + line: lineno, + column: column + }; + return function(node) { + node.position = new (0, $0865a9fb4cc365fe$export$2e2bcd8739ae039)(start, { + line: lineno, + column: column + }, options?.source || ""); + whitespace(); + return node; + }; + } + /** + * Error `msg`. + */ const errorsList = []; + function error(msg) { + const err = new (0, $009ddb00d3ec72b8$export$2e2bcd8739ae039)(options?.source || "", msg, lineno, column, css); + if (options?.silent) errorsList.push(err); + else throw err; + } + /** + * Parse stylesheet. + */ function stylesheet() { + const rulesList = rules(); + const result = { + type: (0, $b2e137848b48cf4f$export$9be5dd6e61d5d73a).stylesheet, + stylesheet: { + source: options?.source, + rules: rulesList, + parsingErrors: errorsList + } + }; + return result; + } + /** + * Opening brace. + */ function open() { + return match(/^{\s*/); + } + /** + * Closing brace. + */ function close() { + return match(/^}/); + } + /** + * Parse ruleset. + */ function rules() { + let node; + const rules = []; + whitespace(); + comments(rules); + while(css.length && css.charAt(0) !== "}" && (node = atrule() || rule()))if (node) { + rules.push(node); + comments(rules); + } + return rules; + } + /** + * Match `re` and return captures. + */ function match(re) { + const m = re.exec(css); + if (!m) return; + const str = m[0]; + updatePosition(str); + css = css.slice(str.length); + return m; + } + /** + * Parse whitespace. + */ function whitespace() { + match(/^\s*/); + } + /** + * Parse comments; + */ function comments(rules) { + let c; + rules = rules || []; + while(c = comment())if (c) rules.push(c); + return rules; + } + /** + * Parse comment. + */ function comment() { + const pos = position(); + if ("/" !== css.charAt(0) || "*" !== css.charAt(1)) return; + const m = match(/^\/\*[^]*?\*\//); + if (!m) return error("End of comment missing"); + return pos({ + type: (0, $b2e137848b48cf4f$export$9be5dd6e61d5d73a).comment, + comment: m[0].slice(2, -2) + }); + } + function findClosingParenthese(str, start, depth) { + let ptr = start + 1; + let found = false; + let closeParentheses = str.indexOf(")", ptr); + while(!found && closeParentheses !== -1){ + const nextParentheses = str.indexOf("(", ptr); + if (nextParentheses !== -1 && nextParentheses < closeParentheses) { + const nextSearch = findClosingParenthese(str, nextParentheses + 1, depth + 1); + ptr = nextSearch + 1; + closeParentheses = str.indexOf(")", ptr); + } else found = true; + } + if (found && closeParentheses !== -1) return closeParentheses; + else return -1; + } + /** + * Parse selector. + */ function selector() { + const m = match(/^([^{]+)/); + if (!m) return; + // remove comment in selector; + let res = $d708735ed1303b43$var$trim(m[0]).replace($d708735ed1303b43$var$commentre, ""); + // Optimisation: If there is no ',' no need to split or post-process (this is less costly) + if (res.indexOf(",") === -1) return [ + res + ]; + // Replace all the , in the parentheses by \u200C + let ptr = 0; + let startParentheses = res.indexOf("(", ptr); + while(startParentheses !== -1){ + const closeParentheses = findClosingParenthese(res, startParentheses, 0); + if (closeParentheses === -1) break; + ptr = closeParentheses + 1; + res = res.substring(0, startParentheses) + res.substring(startParentheses, closeParentheses).replace(/,/g, "\u200C") + res.substring(closeParentheses); + startParentheses = res.indexOf("(", ptr); + } + // Replace all the , in ' and " by \u200C + res = res/** + * replace ',' by \u200C for data selector (div[data-lang="fr,de,us"]) + * + * Examples: + * div[data-lang="fr,\"de,us"] + * div[data-lang='fr,\'de,us'] + * + * Regex logic: + * ("|')(?:\\\1|.)*?\1 => Handle the " and ' + * + * Optimization 1: + * No greedy capture (see docs about the difference between .* and .*?) + * + * Optimization 2: + * ("|')(?:\\\1|.)*?\1 this use reference to capture group, it work faster. + */ .replace(/("|')(?:\\\1|.)*?\1/g, (m)=>m.replace(/,/g, "\u200C")); + // Split all the left , and replace all the \u200C by , + return res// Split the selector by ',' + .split(",")// Replace back \u200C by ',' + .map((s)=>{ + return $d708735ed1303b43$var$trim(s.replace(/\u200C/g, ",")); + }); + } + /** + * Parse declaration. + */ function declaration() { + const pos = position(); + // prop + const propMatch = match(/^(\*?[-#/*\\\w]+(\[[0-9a-z_-]+\])?)\s*/); + if (!propMatch) return; + const propValue = $d708735ed1303b43$var$trim(propMatch[0]); + // : + if (!match(/^:\s*/)) return error("property missing ':'"); + // val + const val = match(/^((?:'(?:\\'|.)*?'|"(?:\\"|.)*?"|\([^)]*?\)|[^};])+)/); + const ret = pos({ + type: (0, $b2e137848b48cf4f$export$9be5dd6e61d5d73a).declaration, + property: propValue.replace($d708735ed1303b43$var$commentre, ""), + value: val ? $d708735ed1303b43$var$trim(val[0]).replace($d708735ed1303b43$var$commentre, "") : "" + }); + // ; + match(/^[;\s]*/); + return ret; + } + /** + * Parse declarations. + */ function declarations() { + const decls = []; + if (!open()) return error("missing '{'"); + comments(decls); + // declarations + let decl; + while(decl = declaration())if (decl) { + decls.push(decl); + comments(decls); + } + if (!close()) return error("missing '}'"); + return decls; + } + /** + * Parse keyframe. + */ function keyframe() { + let m; + const vals = []; + const pos = position(); + while(m = match(/^((\d+\.\d+|\.\d+|\d+)%?|[a-z]+)\s*/)){ + vals.push(m[1]); + match(/^,\s*/); + } + if (!vals.length) return; + return pos({ + type: (0, $b2e137848b48cf4f$export$9be5dd6e61d5d73a).keyframe, + values: vals, + declarations: declarations() || [] + }); + } + /** + * Parse keyframes. + */ function atkeyframes() { + const pos = position(); + const m1 = match(/^@([-\w]+)?keyframes\s*/); + if (!m1) return; + const vendor = m1[1]; + // identifier + const m2 = match(/^([-\w]+)\s*/); + if (!m2) return error("@keyframes missing name"); + const name = m2[1]; + if (!open()) return error("@keyframes missing '{'"); + let frame; + let frames = comments(); + while(frame = keyframe()){ + frames.push(frame); + frames = frames.concat(comments()); + } + if (!close()) return error("@keyframes missing '}'"); + return pos({ + type: (0, $b2e137848b48cf4f$export$9be5dd6e61d5d73a).keyframes, + name: name, + vendor: vendor, + keyframes: frames + }); + } + /** + * Parse supports. + */ function atsupports() { + const pos = position(); + const m = match(/^@supports *([^{]+)/); + if (!m) return; + const supports = $d708735ed1303b43$var$trim(m[1]); + if (!open()) return error("@supports missing '{'"); + const style = comments().concat(rules()); + if (!close()) return error("@supports missing '}'"); + return pos({ + type: (0, $b2e137848b48cf4f$export$9be5dd6e61d5d73a).supports, + supports: supports, + rules: style + }); + } + /** + * Parse host. + */ function athost() { + const pos = position(); + const m = match(/^@host\s*/); + if (!m) return; + if (!open()) return error("@host missing '{'"); + const style = comments().concat(rules()); + if (!close()) return error("@host missing '}'"); + return pos({ + type: (0, $b2e137848b48cf4f$export$9be5dd6e61d5d73a).host, + rules: style + }); + } + /** + * Parse container. + */ function atcontainer() { + const pos = position(); + const m = match(/^@container *([^{]+)/); + if (!m) return; + const container = $d708735ed1303b43$var$trim(m[1]); + if (!open()) return error("@container missing '{'"); + const style = comments().concat(rules()); + if (!close()) return error("@container missing '}'"); + return pos({ + type: (0, $b2e137848b48cf4f$export$9be5dd6e61d5d73a).container, + container: container, + rules: style + }); + } + /** + * Parse container. + */ function atlayer() { + const pos = position(); + const m = match(/^@layer *([^{;@]+)/); + if (!m) return; + const layer = $d708735ed1303b43$var$trim(m[1]); + if (!open()) { + match(/^[;\s]*/); + return pos({ + type: (0, $b2e137848b48cf4f$export$9be5dd6e61d5d73a).layer, + layer: layer + }); + } + const style = comments().concat(rules()); + if (!close()) return error("@layer missing '}'"); + return pos({ + type: (0, $b2e137848b48cf4f$export$9be5dd6e61d5d73a).layer, + layer: layer, + rules: style + }); + } + /** + * Parse media. + */ function atmedia() { + const pos = position(); + const m = match(/^@media *([^{]+)/); + if (!m) return; + const media = $d708735ed1303b43$var$trim(m[1]); + if (!open()) return error("@media missing '{'"); + const style = comments().concat(rules()); + if (!close()) return error("@media missing '}'"); + return pos({ + type: (0, $b2e137848b48cf4f$export$9be5dd6e61d5d73a).media, + media: media, + rules: style + }); + } + /** + * Parse custom-media. + */ function atcustommedia() { + const pos = position(); + const m = match(/^@custom-media\s+(--\S+)\s*([^{;\s][^{;]*);/); + if (!m) return; + return pos({ + type: (0, $b2e137848b48cf4f$export$9be5dd6e61d5d73a).customMedia, + name: $d708735ed1303b43$var$trim(m[1]), + media: $d708735ed1303b43$var$trim(m[2]) + }); + } + /** + * Parse paged media. + */ function atpage() { + const pos = position(); + const m = match(/^@page */); + if (!m) return; + const sel = selector() || []; + if (!open()) return error("@page missing '{'"); + let decls = comments(); + // declarations + let decl; + while(decl = declaration()){ + decls.push(decl); + decls = decls.concat(comments()); + } + if (!close()) return error("@page missing '}'"); + return pos({ + type: (0, $b2e137848b48cf4f$export$9be5dd6e61d5d73a).page, + selectors: sel, + declarations: decls + }); + } + /** + * Parse document. + */ function atdocument() { + const pos = position(); + const m = match(/^@([-\w]+)?document *([^{]+)/); + if (!m) return; + const vendor = $d708735ed1303b43$var$trim(m[1]); + const doc = $d708735ed1303b43$var$trim(m[2]); + if (!open()) return error("@document missing '{'"); + const style = comments().concat(rules()); + if (!close()) return error("@document missing '}'"); + return pos({ + type: (0, $b2e137848b48cf4f$export$9be5dd6e61d5d73a).document, + document: doc, + vendor: vendor, + rules: style + }); + } + /** + * Parse font-face. + */ function atfontface() { + const pos = position(); + const m = match(/^@font-face\s*/); + if (!m) return; + if (!open()) return error("@font-face missing '{'"); + let decls = comments(); + // declarations + let decl; + while(decl = declaration()){ + decls.push(decl); + decls = decls.concat(comments()); + } + if (!close()) return error("@font-face missing '}'"); + return pos({ + type: (0, $b2e137848b48cf4f$export$9be5dd6e61d5d73a).fontFace, + declarations: decls + }); + } + /** + * Parse import + */ const atimport = _compileAtrule("import"); + /** + * Parse charset + */ const atcharset = _compileAtrule("charset"); + /** + * Parse namespace + */ const atnamespace = _compileAtrule("namespace"); + /** + * Parse non-block at-rules + */ function _compileAtrule(name) { + const re = new RegExp("^@" + name + "\\s*((?::?[^;'\"]|\"(?:\\\\\"|[^\"])*?\"|'(?:\\\\'|[^'])*?')+)(?:;|$)"); + // ^@import\s*([^;"']|("|')(?:\\\2|.)*?\2)+(;|$) + return function() { + const pos = position(); + const m = match(re); + if (!m) return; + const ret = { + type: name + }; + ret[name] = m[1].trim(); + return pos(ret); + }; + } + /** + * Parse at rule. + */ function atrule() { + if (css[0] !== "@") return; + return atkeyframes() || atmedia() || atcustommedia() || atsupports() || atimport() || atcharset() || atnamespace() || atdocument() || atpage() || athost() || atfontface() || atcontainer() || atlayer(); + } + /** + * Parse rule. + */ function rule() { + const pos = position(); + const sel = selector(); + if (!sel) return error("selector missing"); + comments(); + return pos({ + type: (0, $b2e137848b48cf4f$export$9be5dd6e61d5d73a).rule, + selectors: sel, + declarations: declarations() || [] + }); + } + return $d708735ed1303b43$var$addParent(stylesheet()); +}; +/** + * Trim `str`. + */ function $d708735ed1303b43$var$trim(str) { + return str ? str.trim() : ""; +} +/** + * Adds non-enumerable parent node reference to each node. + */ function $d708735ed1303b43$var$addParent(obj, parent) { + const isNode = obj && typeof obj.type === "string"; + const childParent = isNode ? obj : parent; + for(const k in obj){ + const value = obj[k]; + if (Array.isArray(value)) value.forEach((v)=>{ + $d708735ed1303b43$var$addParent(v, childParent); + }); + else if (value && typeof value === "object") $d708735ed1303b43$var$addParent(value, childParent); + } + if (isNode) Object.defineProperty(obj, "parent", { + configurable: true, + writable: true, + enumerable: false, + value: parent || null + }); + return obj; +} +var $d708735ed1303b43$export$2e2bcd8739ae039 = $d708735ed1303b43$export$98e6a39c04603d36; + + + +class $de9540138ed1fd01$var$Compiler { + constructor(options){ + this.level = 0; + this.indentation = " "; + this.compress = false; + if (typeof options?.indent === "string") this.indentation = options?.indent; + if (options?.compress) this.compress = true; + } + // We disable no-unused-vars for _position. We keep position for potential reintroduction of source-map + // eslint-disable-next-line @typescript-eslint/no-unused-vars + emit(str, _position) { + return str; + } + /** + * Increase, decrease or return current indentation. + */ indent(level) { + this.level = this.level || 1; + if (level) { + this.level += level; + return ""; + } + return Array(this.level).join(this.indentation); + } + visit(node) { + switch(node.type){ + case (0, $b2e137848b48cf4f$export$9be5dd6e61d5d73a).stylesheet: + return this.stylesheet(node); + case (0, $b2e137848b48cf4f$export$9be5dd6e61d5d73a).rule: + return this.rule(node); + case (0, $b2e137848b48cf4f$export$9be5dd6e61d5d73a).declaration: + return this.declaration(node); + case (0, $b2e137848b48cf4f$export$9be5dd6e61d5d73a).comment: + return this.comment(node); + case (0, $b2e137848b48cf4f$export$9be5dd6e61d5d73a).container: + return this.container(node); + case (0, $b2e137848b48cf4f$export$9be5dd6e61d5d73a).charset: + return this.charset(node); + case (0, $b2e137848b48cf4f$export$9be5dd6e61d5d73a).document: + return this.document(node); + case (0, $b2e137848b48cf4f$export$9be5dd6e61d5d73a).customMedia: + return this.customMedia(node); + case (0, $b2e137848b48cf4f$export$9be5dd6e61d5d73a).fontFace: + return this.fontFace(node); + case (0, $b2e137848b48cf4f$export$9be5dd6e61d5d73a).host: + return this.host(node); + case (0, $b2e137848b48cf4f$export$9be5dd6e61d5d73a).import: + return this.import(node); + case (0, $b2e137848b48cf4f$export$9be5dd6e61d5d73a).keyframes: + return this.keyframes(node); + case (0, $b2e137848b48cf4f$export$9be5dd6e61d5d73a).keyframe: + return this.keyframe(node); + case (0, $b2e137848b48cf4f$export$9be5dd6e61d5d73a).layer: + return this.layer(node); + case (0, $b2e137848b48cf4f$export$9be5dd6e61d5d73a).media: + return this.media(node); + case (0, $b2e137848b48cf4f$export$9be5dd6e61d5d73a).namespace: + return this.namespace(node); + case (0, $b2e137848b48cf4f$export$9be5dd6e61d5d73a).page: + return this.page(node); + case (0, $b2e137848b48cf4f$export$9be5dd6e61d5d73a).supports: + return this.supports(node); + } + } + mapVisit(nodes, delim) { + let buf = ""; + delim = delim || ""; + for(let i = 0, length = nodes.length; i < length; i++){ + buf += this.visit(nodes[i]); + if (delim && i < length - 1) buf += this.emit(delim); + } + return buf; + } + compile(node) { + if (this.compress) return node.stylesheet.rules.map(this.visit, this).join(""); + return this.stylesheet(node); + } + /** + * Visit stylesheet node. + */ stylesheet(node) { + return this.mapVisit(node.stylesheet.rules, "\n\n"); + } + /** + * Visit comment node. + */ comment(node) { + if (this.compress) return this.emit("", node.position); + return this.emit(this.indent() + "/*" + node.comment + "*/", node.position); + } + /** + * Visit container node. + */ container(node) { + if (this.compress) return this.emit("@container " + node.container, node.position) + this.emit("{") + this.mapVisit(node.rules) + this.emit("}"); + return this.emit(this.indent() + "@container " + node.container, node.position) + this.emit(" {\n" + this.indent(1)) + this.mapVisit(node.rules, "\n\n") + this.emit("\n" + this.indent(-1) + this.indent() + "}"); + } + /** + * Visit container node. + */ layer(node) { + if (this.compress) return this.emit("@layer " + node.layer, node.position) + (node.rules ? this.emit("{") + this.mapVisit(node.rules) + this.emit("}") : ";"); + return this.emit(this.indent() + "@layer " + node.layer, node.position) + (node.rules ? this.emit(" {\n" + this.indent(1)) + this.mapVisit(node.rules, "\n\n") + this.emit("\n" + this.indent(-1) + this.indent() + "}") : ";"); + } + /** + * Visit import node. + */ import(node) { + return this.emit("@import " + node.import + ";", node.position); + } + /** + * Visit media node. + */ media(node) { + if (this.compress) return this.emit("@media " + node.media, node.position) + this.emit("{") + this.mapVisit(node.rules) + this.emit("}"); + return this.emit(this.indent() + "@media " + node.media, node.position) + this.emit(" {\n" + this.indent(1)) + this.mapVisit(node.rules, "\n\n") + this.emit("\n" + this.indent(-1) + this.indent() + "}"); + } + /** + * Visit document node. + */ document(node) { + const doc = "@" + (node.vendor || "") + "document " + node.document; + if (this.compress) return this.emit(doc, node.position) + this.emit("{") + this.mapVisit(node.rules) + this.emit("}"); + return this.emit(doc, node.position) + this.emit(" {\n" + this.indent(1)) + this.mapVisit(node.rules, "\n\n") + this.emit(this.indent(-1) + "\n}"); + } + /** + * Visit charset node. + */ charset(node) { + return this.emit("@charset " + node.charset + ";", node.position); + } + /** + * Visit namespace node. + */ namespace(node) { + return this.emit("@namespace " + node.namespace + ";", node.position); + } + /** + * Visit supports node. + */ supports(node) { + if (this.compress) return this.emit("@supports " + node.supports, node.position) + this.emit("{") + this.mapVisit(node.rules) + this.emit("}"); + return this.emit(this.indent() + "@supports " + node.supports, node.position) + this.emit(" {\n" + this.indent(1)) + this.mapVisit(node.rules, "\n\n") + this.emit("\n" + this.indent(-1) + this.indent() + "}"); + } + /** + * Visit keyframes node. + */ keyframes(node) { + if (this.compress) return this.emit("@" + (node.vendor || "") + "keyframes " + node.name, node.position) + this.emit("{") + this.mapVisit(node.keyframes) + this.emit("}"); + return this.emit("@" + (node.vendor || "") + "keyframes " + node.name, node.position) + this.emit(" {\n" + this.indent(1)) + this.mapVisit(node.keyframes, "\n") + this.emit(this.indent(-1) + "}"); + } + /** + * Visit keyframe node. + */ keyframe(node) { + const decls = node.declarations; + if (this.compress) return this.emit(node.values.join(","), node.position) + this.emit("{") + this.mapVisit(decls) + this.emit("}"); + return this.emit(this.indent()) + this.emit(node.values.join(", "), node.position) + this.emit(" {\n" + this.indent(1)) + this.mapVisit(decls, "\n") + this.emit(this.indent(-1) + "\n" + this.indent() + "}\n"); + } + /** + * Visit page node. + */ page(node) { + if (this.compress) { + const sel = node.selectors.length ? node.selectors.join(", ") : ""; + return this.emit("@page " + sel, node.position) + this.emit("{") + this.mapVisit(node.declarations) + this.emit("}"); + } + const sel = node.selectors.length ? node.selectors.join(", ") + " " : ""; + return this.emit("@page " + sel, node.position) + this.emit("{\n") + this.emit(this.indent(1)) + this.mapVisit(node.declarations, "\n") + this.emit(this.indent(-1)) + this.emit("\n}"); + } + /** + * Visit font-face node. + */ fontFace(node) { + if (this.compress) return this.emit("@font-face", node.position) + this.emit("{") + this.mapVisit(node.declarations) + this.emit("}"); + return this.emit("@font-face ", node.position) + this.emit("{\n") + this.emit(this.indent(1)) + this.mapVisit(node.declarations, "\n") + this.emit(this.indent(-1)) + this.emit("\n}"); + } + /** + * Visit host node. + */ host(node) { + if (this.compress) return this.emit("@host", node.position) + this.emit("{") + this.mapVisit(node.rules) + this.emit("}"); + return this.emit("@host", node.position) + this.emit(" {\n" + this.indent(1)) + this.mapVisit(node.rules, "\n\n") + this.emit(this.indent(-1) + "\n}"); + } + /** + * Visit custom-media node. + */ customMedia(node) { + return this.emit("@custom-media " + node.name + " " + node.media + ";", node.position); + } + /** + * Visit rule node. + */ rule(node) { + const decls = node.declarations; + if (!decls.length) return ""; + if (this.compress) return this.emit(node.selectors.join(","), node.position) + this.emit("{") + this.mapVisit(decls) + this.emit("}"); + const indent = this.indent(); + return this.emit(node.selectors.map((s)=>{ + return indent + s; + }).join(",\n"), node.position) + this.emit(" {\n") + this.emit(this.indent(1)) + this.mapVisit(decls, "\n") + this.emit(this.indent(-1)) + this.emit("\n" + this.indent() + "}"); + } + /** + * Visit declaration node. + */ declaration(node) { + if (this.compress) return this.emit(node.property + ":" + node.value, node.position) + this.emit(";"); + return this.emit(this.indent()) + this.emit(node.property + ": " + node.value, node.position) + this.emit(";"); + } +} +var $de9540138ed1fd01$export$2e2bcd8739ae039 = $de9540138ed1fd01$var$Compiler; + + +var $fdf773ab87e20450$export$2e2bcd8739ae039 = (node, options)=>{ + const compiler = new (0, $de9540138ed1fd01$export$2e2bcd8739ae039)(options || {}); + return compiler.compile(node); +}; + + + + + +const $149c1bd638913645$export$98e6a39c04603d36 = (0, $d708735ed1303b43$export$2e2bcd8739ae039); +const $149c1bd638913645$export$fac44ee5b035f737 = (0, $fdf773ab87e20450$export$2e2bcd8739ae039); +var $149c1bd638913645$export$2e2bcd8739ae039 = { + parse: $149c1bd638913645$export$98e6a39c04603d36, + stringify: $149c1bd638913645$export$fac44ee5b035f737 +}; + + +export {$149c1bd638913645$export$98e6a39c04603d36 as parse, $149c1bd638913645$export$fac44ee5b035f737 as stringify, $149c1bd638913645$export$2e2bcd8739ae039 as default, $b2e137848b48cf4f$export$9be5dd6e61d5d73a as CssTypes}; +//# sourceMappingURL=index.mjs.map diff --git a/public/script.js b/public/script.js index 164464c8e..e398454a9 100644 --- a/public/script.js +++ b/public/script.js @@ -189,7 +189,7 @@ import { getBackgrounds, initBackgrounds, loadBackgroundSettings, background_set import { hideLoader, showLoader } from './scripts/loader.js'; import { BulkEditOverlay, CharacterContextMenu } from './scripts/BulkEditOverlay.js'; import { loadMancerModels, loadOllamaModels, loadTogetherAIModels } from './scripts/textgen-models.js'; -import { appendFileContent, hasPendingFileAttachment, populateFileAttachment } from './scripts/chats.js'; +import { appendFileContent, hasPendingFileAttachment, populateFileAttachment, decodeStyleTags, encodeStyleTags } from './scripts/chats.js'; import { replaceVariableMacros } from './scripts/variables.js'; import { initPresetManager } from './scripts/preset-manager.js'; @@ -275,6 +275,22 @@ DOMPurify.addHook('afterSanitizeAttributes', function (node) { } }); +DOMPurify.addHook("uponSanitizeAttribute", (_, data, config) => { + if (!config['MESSAGE_SANITIZE']) { + return; + } + switch (data.attrName) { + case 'class': { + if (data.attrValue) { + data.attrValue = data.attrValue.split(' ').map((v) => { + return "custom-" + v; + }).join(' '); + } + break; + } + } +}); + // API OBJECT FOR EXTERNAL WIRING window['SillyTavern'] = {}; @@ -1550,7 +1566,11 @@ function messageFormatting(mes, ch_name, isSystem, isUser) { mes = mes.replace(new RegExp(`(^|\n)${ch_name}:`, 'g'), '$1'); } - mes = DOMPurify.sanitize(mes, { FORBID_TAGS: ['style'] }); + /** @type {any} */ + const config = { MESSAGE_SANITIZE: true, ADD_TAGS: ['custom-style'] }; + mes = encodeStyleTags(mes); + mes = DOMPurify.sanitize(mes, config); + mes = decodeStyleTags(mes); return mes; } @@ -3634,11 +3654,11 @@ async function Generate(type, { automatic_trigger, force_name2, quiet_prompt, qu mesSendString = addChatsPreamble(mesSendString); let combinedPrompt = beforeScenarioAnchor + - storyString + - afterScenarioAnchor + - mesExmString + - mesSendString + - generatedPromptCache; + storyString + + afterScenarioAnchor + + mesExmString + + mesSendString + + generatedPromptCache; combinedPrompt = combinedPrompt.replace(/\r/gm, ''); diff --git a/public/scripts/chats.js b/public/scripts/chats.js index 176d4d6a0..847915fc4 100644 --- a/public/scripts/chats.js +++ b/public/scripts/chats.js @@ -1,5 +1,6 @@ // Move chat functions here from script.js (eventually) +import css from '../lib/css-parser.mjs'; import { addCopyToCodeBlocks, appendMediaToMessage, @@ -360,6 +361,61 @@ export async function appendFileContent(message, messageText) { return messageText; } +/** + * Replaces style tags in the message text with custom tags with encoded content. + * @param {string} text + * @returns {string} Encoded message text + * @copyright https://github.com/kwaroran/risuAI + */ +export function encodeStyleTags(text) { + const styleRegex = /`; + } catch (error) { + return `CSS ERROR: ${error}`; + } + }); +} + jQuery(function () { $(document).on('click', '.mes_hide', async function () { const messageBlock = $(this).closest('.mes'); From 5734dbd17c348fa4037db045c1b4147c1b32c930 Mon Sep 17 00:00:00 2001 From: Cohee <18619528+Cohee1207@users.noreply.github.com> Date: Wed, 20 Dec 2023 18:29:03 +0200 Subject: [PATCH 54/82] Add custom endpoint type --- public/index.html | 54 +++++++++++++++---- public/script.js | 22 +++++--- public/scripts/RossAscends-mods.js | 1 + public/scripts/openai.js | 61 +++++++++++++++++++++- public/scripts/secrets.js | 2 + public/scripts/tokenizers.js | 4 ++ public/style.css | 4 +- src/constants.js | 1 + src/endpoints/backends/chat-completions.js | 20 +++++-- src/endpoints/secrets.js | 1 + 10 files changed, 144 insertions(+), 26 deletions(-) diff --git a/public/index.html b/public/index.html index 8aa5d7219..3e0a84bb2 100644 --- a/public/index.html +++ b/public/index.html @@ -437,14 +437,15 @@ Streaming
    - Display - the response bit by bit as it is generated.
    - When - this is off, responses will be displayed all at once when they are - complete. + + Display the response bit by bit as it is generated. +
    + + When this is off, responses will be displayed all at once when they are complete. +
    -
    +
    Temperature
    @@ -457,7 +458,7 @@
    -
    +
    Frequency Penalty
    @@ -470,7 +471,7 @@
    -
    +
    Presence Penalty
    @@ -509,7 +510,7 @@
    -
    +
    Top P
    @@ -733,6 +734,9 @@
    + + Doesn't work? Try adding /v1 at the end! +
    @@ -749,7 +753,7 @@
    -
    +
    Seed
    @@ -1921,6 +1925,7 @@ +

    OpenAI API key

    @@ -2225,6 +2230,35 @@
    +
    +

    Endpoint URL

    +
    + +
    +
    + + Doesn't work? Try adding /v1 at the end of the URL! + +
    +

    Custom API Key

    +
    + + +
    +
    + For privacy reasons, your API key will be hidden after you reload the page. +
    +

    Enter a Model ID

    +
    + +
    +

    Available Models

    +
    + +
    +
    diff --git a/public/script.js b/public/script.js index e398454a9..c30bf70cc 100644 --- a/public/script.js +++ b/public/script.js @@ -5453,6 +5453,7 @@ function changeMainAPI() { case chat_completion_sources.AI21: case chat_completion_sources.MAKERSUITE: case chat_completion_sources.MISTRALAI: + case chat_completion_sources.CUSTOM: default: setupChatCompletionPromptManager(oai_settings); break; @@ -7572,43 +7573,48 @@ const CONNECT_API_MAP = { }, 'oai': { selected: 'openai', - source: 'openai', button: '#api_button_openai', + source: chat_completion_sources.OPENAI, }, 'claude': { selected: 'openai', - source: 'claude', button: '#api_button_openai', + source: chat_completion_sources.CLAUDE, }, 'windowai': { selected: 'openai', - source: 'windowai', button: '#api_button_openai', + source: chat_completion_sources.WINDOWAI, }, 'openrouter': { selected: 'openai', - source: 'openrouter', button: '#api_button_openai', + source: chat_completion_sources.OPENROUTER, }, 'scale': { selected: 'openai', - source: 'scale', button: '#api_button_openai', + source: chat_completion_sources.SCALE, }, 'ai21': { selected: 'openai', - source: 'ai21', button: '#api_button_openai', + source: chat_completion_sources.AI21, }, 'makersuite': { selected: 'openai', - source: 'makersuite', button: '#api_button_openai', + source: chat_completion_sources.MAKERSUITE, }, 'mistralai': { selected: 'openai', - source: 'mistralai', button: '#api_button_openai', + source: chat_completion_sources.MISTRALAI, + }, + 'custom': { + selected: 'openai', + button: '#api_button_openai', + source: chat_completion_sources.CUSTOM, }, }; diff --git a/public/scripts/RossAscends-mods.js b/public/scripts/RossAscends-mods.js index a5abd41fd..1827b1b26 100644 --- a/public/scripts/RossAscends-mods.js +++ b/public/scripts/RossAscends-mods.js @@ -399,6 +399,7 @@ function RA_autoconnect(PrevApi) { || (secret_state[SECRET_KEYS.AI21] && oai_settings.chat_completion_source == chat_completion_sources.AI21) || (secret_state[SECRET_KEYS.MAKERSUITE] && oai_settings.chat_completion_source == chat_completion_sources.MAKERSUITE) || (secret_state[SECRET_KEYS.MISTRALAI] && oai_settings.chat_completion_source == chat_completion_sources.MISTRALAI) + || (secret_state[SECRET_KEYS.CUSTOM] && oai_settings.chat_completion_source == chat_completion_sources.CUSTOM) ) { $('#api_button_openai').trigger('click'); } diff --git a/public/scripts/openai.js b/public/scripts/openai.js index f3c17f373..d189e9f57 100644 --- a/public/scripts/openai.js +++ b/public/scripts/openai.js @@ -165,6 +165,7 @@ export const chat_completion_sources = { AI21: 'ai21', MAKERSUITE: 'makersuite', MISTRALAI: 'mistralai', + CUSTOM: 'custom', }; const prefixMap = selected_group ? { @@ -210,6 +211,8 @@ const default_settings = { google_model: 'gemini-pro', ai21_model: 'j2-ultra', mistralai_model: 'mistral-medium', + custom_model: '', + custom_url: '', windowai_model: '', openrouter_model: openrouter_website_model, openrouter_use_fallback: false, @@ -266,6 +269,8 @@ const oai_settings = { google_model: 'gemini-pro', ai21_model: 'j2-ultra', mistralai_model: 'mistral-medium', + custom_model: '', + custom_url: '', windowai_model: '', openrouter_model: openrouter_website_model, openrouter_use_fallback: false, @@ -1266,6 +1271,8 @@ function getChatCompletionModel() { return oai_settings.ai21_model; case chat_completion_sources.MISTRALAI: return oai_settings.mistralai_model; + case chat_completion_sources.CUSTOM: + return oai_settings.custom_model; default: throw new Error(`Unknown chat completion source: ${oai_settings.chat_completion_source}`); } @@ -1480,7 +1487,7 @@ async function sendOpenAIRequest(type, messages, signal) { return sendWindowAIRequest(messages, signal, stream); } - const logitBiasSources = [chat_completion_sources.OPENAI, chat_completion_sources.OPENROUTER, chat_completion_sources.SCALE]; + const logitBiasSources = [chat_completion_sources.OPENAI, chat_completion_sources.OPENROUTER, chat_completion_sources.SCALE, chat_completion_sources.CUSTOM]; if (oai_settings.bias_preset_selected && logitBiasSources.includes(oai_settings.chat_completion_source) && Array.isArray(oai_settings.bias_presets[oai_settings.bias_preset_selected]) @@ -2311,6 +2318,8 @@ function loadOpenAISettings(data, settings) { oai_settings.openrouter_force_instruct = settings.openrouter_force_instruct ?? default_settings.openrouter_force_instruct; oai_settings.ai21_model = settings.ai21_model ?? default_settings.ai21_model; oai_settings.mistralai_model = settings.mistralai_model ?? default_settings.mistralai_model; + oai_settings.custom_model = settings.custom_model ?? default_settings.custom_model; + oai_settings.custom_url = settings.custom_url ?? default_settings.custom_url; oai_settings.google_model = settings.google_model ?? default_settings.google_model; oai_settings.chat_completion_source = settings.chat_completion_source ?? default_settings.chat_completion_source; oai_settings.api_url_scale = settings.api_url_scale ?? default_settings.api_url_scale; @@ -2355,6 +2364,8 @@ function loadOpenAISettings(data, settings) { $(`#model_ai21_select option[value="${oai_settings.ai21_model}"`).attr('selected', true); $('#model_mistralai_select').val(oai_settings.mistralai_model); $(`#model_mistralai_select option[value="${oai_settings.mistralai_model}"`).attr('selected', true); + $('#custom_model_id').val(oai_settings.custom_model); + $('#custom_api_url_text').val(oai_settings.custom_url); $('#openai_max_context').val(oai_settings.openai_max_context); $('#openai_max_context_counter').val(`${oai_settings.openai_max_context}`); $('#model_openrouter_select').val(oai_settings.openrouter_model); @@ -2466,7 +2477,11 @@ async function getStatusOpen() { validateReverseProxy(); } - const canBypass = oai_settings.chat_completion_source === chat_completion_sources.OPENAI && oai_settings.bypass_status_check; + if (oai_settings.chat_completion_source === chat_completion_sources.CUSTOM) { + custom_url: oai_settings.custom_url; + } + + const canBypass = (oai_settings.chat_completion_source === chat_completion_sources.OPENAI && oai_settings.bypass_status_check) || oai_settings.chat_completion_source === chat_completion_sources.CUSTOM; if (canBypass) { setOnlineStatus('Status check bypassed'); } @@ -2533,6 +2548,7 @@ async function saveOpenAIPreset(name, settings, triggerUi = true) { openrouter_sort_models: settings.openrouter_sort_models, ai21_model: settings.ai21_model, mistralai_model: settings.mistralai_model, + custom_model: settings.custom_model, google_model: settings.google_model, temperature: settings.temp_openai, frequency_penalty: settings.freq_pen_openai, @@ -2905,6 +2921,8 @@ function onSettingsPresetChange() { openrouter_sort_models: ['#openrouter_sort_models', 'openrouter_sort_models', false], ai21_model: ['#model_ai21_select', 'ai21_model', false], mistralai_model: ['#model_mistralai_select', 'mistralai_model', false], + custom_model: ['#custom_model_id', 'custom_model', false], + custom_url: ['#custom_api_url_text', 'custom_url', false], google_model: ['#model_google_select', 'google_model', false], openai_max_context: ['#openai_max_context', 'openai_max_context', false], openai_max_tokens: ['#openai_max_tokens', 'openai_max_tokens', false], @@ -3094,6 +3112,12 @@ async function onModelChange() { oai_settings.mistralai_model = value; } + if (value && $(this).is('#model_custom_select')) { + console.log('Custom model changed to', value); + oai_settings.custom_model = value; + $('#custom_model_id').val(value).trigger('input'); + } + if (oai_settings.chat_completion_source == chat_completion_sources.SCALE) { if (oai_settings.max_context_unlocked) { $('#openai_max_context').attr('max', unlocked_max); @@ -3241,6 +3265,12 @@ async function onModelChange() { $('#top_k_openai').attr('max', 200).val(oai_settings.top_k_openai).trigger('input'); } + if (oai_settings.chat_completion_source == chat_completion_sources.CUSTOM) { + $('#openai_max_context').attr('max', unlocked_max); + oai_settings.openai_max_context = Math.min(Number($('#openai_max_context').attr('max')), oai_settings.openai_max_context); + $('#openai_max_context').val(oai_settings.openai_max_context).trigger('input'); + } + $('#openai_max_context_counter').attr('max', Number($('#openai_max_context').attr('max'))); saveSettingsDebounced(); @@ -3383,6 +3413,19 @@ async function onConnectButtonClick(e) { } } + if (oai_settings.chat_completion_source == chat_completion_sources.CUSTOM) { + const api_key_custom = String($('#api_key_custom').val()).trim(); + + if (api_key_custom.length) { + await writeSecret(SECRET_KEYS.CUSTOM, api_key_custom); + } + + if (!oai_settings.custom_url) { + console.log('No API URL saved for Custom'); + return; + } + } + startStatusLoading(); saveSettingsDebounced(); await getStatusOpen(); @@ -3418,6 +3461,9 @@ function toggleChatCompletionForms() { else if (oai_settings.chat_completion_source == chat_completion_sources.MISTRALAI) { $('#model_mistralai_select').trigger('change'); } + else if (oai_settings.chat_completion_source == chat_completion_sources.CUSTOM) { + $('#model_custom_select').trigger('change'); + } $('[data-source]').each(function () { const validSources = $(this).data('source').split(','); $(this).toggle(validSources.includes(oai_settings.chat_completion_source)); @@ -3780,6 +3826,16 @@ $(document).ready(async function () { saveSettingsDebounced(); }); + $('#custom_api_url_text').on('input', function () { + oai_settings.custom_url = String($(this).val()); + saveSettingsDebounced(); + }); + + $('#custom_model_id').on('input', function () { + oai_settings.custom_model = String($(this).val()); + saveSettingsDebounced(); + }); + $(document).on('input', '#openai_settings .autoSetHeight', function () { resetScrollHeight($(this)); }); @@ -3796,6 +3852,7 @@ $(document).ready(async function () { $('#openrouter_sort_models').on('change', onOpenrouterModelSortChange); $('#model_ai21_select').on('change', onModelChange); $('#model_mistralai_select').on('change', onModelChange); + $('#model_custom_select').on('change', onModelChange); $('#settings_preset_openai').on('change', onSettingsPresetChange); $('#new_oai_preset').on('click', onNewPresetClick); $('#delete_oai_preset').on('click', onDeletePresetClick); diff --git a/public/scripts/secrets.js b/public/scripts/secrets.js index 3bf58284d..1c1154180 100644 --- a/public/scripts/secrets.js +++ b/public/scripts/secrets.js @@ -16,6 +16,7 @@ export const SECRET_KEYS = { SERPAPI: 'api_key_serpapi', MISTRALAI: 'api_key_mistralai', TOGETHERAI: 'api_key_togetherai', + CUSTOM: 'api_key_custom', }; const INPUT_MAP = { @@ -32,6 +33,7 @@ const INPUT_MAP = { [SECRET_KEYS.APHRODITE]: '#api_key_aphrodite', [SECRET_KEYS.TABBY]: '#api_key_tabby', [SECRET_KEYS.MISTRALAI]: '#api_key_mistralai', + [SECRET_KEYS.CUSTOM]: '#api_key_custom', [SECRET_KEYS.TOGETHERAI]: '#api_key_togetherai', }; diff --git a/public/scripts/tokenizers.js b/public/scripts/tokenizers.js index 3c7bc3f17..fb296d59f 100644 --- a/public/scripts/tokenizers.js +++ b/public/scripts/tokenizers.js @@ -388,6 +388,10 @@ export function getTokenizerModel() { return mistralTokenizer; } + if (oai_settings.chat_completion_source == chat_completion_sources.CUSTOM) { + return oai_settings.custom_model; + } + // Default to Turbo 3.5 return turboTokenizer; } diff --git a/public/style.css b/public/style.css index cf2f1f75c..2f2e6d244 100644 --- a/public/style.css +++ b/public/style.css @@ -3531,11 +3531,11 @@ a { display: none; } -.reverse_proxy_warning { +.reverse_proxy_warning:not(small) { color: var(--warning); background-color: var(--black70a); text-shadow: none !important; - margin-top: 12px !important; + margin-top: 5px !important; border-radius: 5px; padding: 3px; border: 1px solid var(--SmartThemeBorderColor); diff --git a/src/constants.js b/src/constants.js index 03c6ccb82..80b7585b2 100644 --- a/src/constants.js +++ b/src/constants.js @@ -160,6 +160,7 @@ const CHAT_COMPLETION_SOURCES = { AI21: 'ai21', MAKERSUITE: 'makersuite', MISTRALAI: 'mistralai', + CUSTOM: 'custom', }; const UPLOADS_PATH = './uploads'; diff --git a/src/endpoints/backends/chat-completions.js b/src/endpoints/backends/chat-completions.js index 2d68a1239..536a714d5 100644 --- a/src/endpoints/backends/chat-completions.js +++ b/src/endpoints/backends/chat-completions.js @@ -502,12 +502,16 @@ router.post('/status', jsonParser, async function (request, response_getstatus_o } else if (request.body.chat_completion_source === CHAT_COMPLETION_SOURCES.MISTRALAI) { api_url = 'https://api.mistral.ai/v1'; api_key_openai = readSecret(SECRET_KEYS.MISTRALAI); + } else if (request.body.chat_completion_source === CHAT_COMPLETION_SOURCES.CUSTOM) { + api_url = request.body.custom_url; + api_key_openai = readSecret(SECRET_KEYS.CUSTOM); + headers = {}; } else { console.log('This chat completion source is not supported yet.'); return response_getstatus_openai.status(400).send({ error: true }); } - if (!api_key_openai && !request.body.reverse_proxy) { + if (!api_key_openai && !request.body.reverse_proxy && request.body.chat_completion_source !== CHAT_COMPLETION_SOURCES.CUSTOM) { console.log('OpenAI API key is missing.'); return response_getstatus_openai.status(400).send({ error: true }); } @@ -657,7 +661,7 @@ router.post('/generate', jsonParser, function (request, response) { let headers; let bodyParams; - if (request.body.chat_completion_source !== CHAT_COMPLETION_SOURCES.OPENROUTER) { + if (request.body.chat_completion_source === CHAT_COMPLETION_SOURCES.OPENAI) { apiUrl = new URL(request.body.reverse_proxy || API_OPENAI).toString(); apiKey = request.body.reverse_proxy ? request.body.proxy_password : readSecret(SECRET_KEYS.OPENAI); headers = {}; @@ -666,7 +670,7 @@ router.post('/generate', jsonParser, function (request, response) { if (getConfigValue('openai.randomizeUserId', false)) { bodyParams['user'] = uuidv4(); } - } else { + } else if (request.body.chat_completion_source === CHAT_COMPLETION_SOURCES.OPENROUTER) { apiUrl = 'https://openrouter.ai/api/v1'; apiKey = readSecret(SECRET_KEYS.OPENROUTER); // OpenRouter needs to pass the referer: https://openrouter.ai/docs @@ -676,9 +680,17 @@ router.post('/generate', jsonParser, function (request, response) { if (request.body.use_fallback) { bodyParams['route'] = 'fallback'; } + } else if (request.body.chat_completion_source === CHAT_COMPLETION_SOURCES.CUSTOM) { + apiUrl = request.body.custom_url; + apiKey = readSecret(SECRET_KEYS.CUSTOM); + headers = {}; + bodyParams = {}; + } else { + console.log('This chat completion source is not supported yet.'); + return response.status(400).send({ error: true }); } - if (!apiKey && !request.body.reverse_proxy) { + if (!apiKey && !request.body.reverse_proxy && request.body.chat_completion_source !== CHAT_COMPLETION_SOURCES.CUSTOM) { console.log('OpenAI API key is missing.'); return response.status(400).send({ error: true }); } diff --git a/src/endpoints/secrets.js b/src/endpoints/secrets.js index 000e344fe..8d28563bf 100644 --- a/src/endpoints/secrets.js +++ b/src/endpoints/secrets.js @@ -27,6 +27,7 @@ const SECRET_KEYS = { SERPAPI: 'api_key_serpapi', TOGETHERAI: 'api_key_togetherai', MISTRALAI: 'api_key_mistralai', + CUSTOM: 'api_key_custom', }; /** From ebec26154c4ededc0346f84647a35a6da5774147 Mon Sep 17 00:00:00 2001 From: Cohee <18619528+Cohee1207@users.noreply.github.com> Date: Wed, 20 Dec 2023 18:37:34 +0200 Subject: [PATCH 55/82] Welcome message fixed --- public/script.js | 6 +++++- public/scripts/templates/welcome.html | 2 +- 2 files changed, 6 insertions(+), 2 deletions(-) diff --git a/public/script.js b/public/script.js index e398454a9..e68f22465 100644 --- a/public/script.js +++ b/public/script.js @@ -283,6 +283,10 @@ DOMPurify.addHook("uponSanitizeAttribute", (_, data, config) => { case 'class': { if (data.attrValue) { data.attrValue = data.attrValue.split(' ').map((v) => { + if (v.startsWith('fa-') || v.startsWith('note-') || v === 'monospace') { + return v; + } + return "custom-" + v; }).join(' '); } @@ -7902,7 +7906,7 @@ jQuery(async function () { } registerSlashCommand('dupe', DupeChar, [], '– duplicates the currently selected character', true, true); - registerSlashCommand('api', connectAPISlash, [], `(${Object.keys(CONNECT_API_MAP)}) – connect to an API`, true, true); + registerSlashCommand('api', connectAPISlash, [], `(${Object.keys(CONNECT_API_MAP).join(', ')}) – connect to an API`, true, true); registerSlashCommand('impersonate', doImpersonate, ['imp'], '– calls an impersonation response', true, true); registerSlashCommand('delchat', doDeleteChat, [], '– deletes the current chat', true, true); registerSlashCommand('closechat', doCloseChat, [], '– closes the current chat', true, true); diff --git a/public/scripts/templates/welcome.html b/public/scripts/templates/welcome.html index 42000fb70..1b154ab36 100644 --- a/public/scripts/templates/welcome.html +++ b/public/scripts/templates/welcome.html @@ -15,7 +15,7 @@

    Confused or lost?

    • - ? - click these icons! + - click these icons!
    • Enter /? in the chat bar From ae64c998351621c0dd713e818536de9b5817e941 Mon Sep 17 00:00:00 2001 From: Cohee <18619528+Cohee1207@users.noreply.github.com> Date: Wed, 20 Dec 2023 21:05:20 +0200 Subject: [PATCH 56/82] Add custom caption source --- public/index.html | 23 +++++++-------- public/scripts/RossAscends-mods.js | 2 +- public/scripts/extensions/caption/index.js | 3 ++ public/scripts/extensions/shared.js | 10 +++++++ public/scripts/openai.js | 33 +++++++++++++++++++--- src/endpoints/openai.js | 10 ++++++- 6 files changed, 64 insertions(+), 17 deletions(-) diff --git a/public/index.html b/public/index.html index 3e0a84bb2..4b50f1477 100644 --- a/public/index.html +++ b/public/index.html @@ -582,7 +582,7 @@
    Wraps activated World Info entries before inserting into the prompt. Use - {0} to mark a place where the content is inserted. + {0} to mark a place where the content is inserted.
    @@ -596,7 +596,7 @@
    - Use {{scenario}} to mark a place where the content is inserted. + Use {{scenario}} to mark a place where the content is inserted.
    @@ -610,7 +610,7 @@
    - Use {{personality}} to mark a place where the content is inserted. + Use {{personality}} to mark a place where the content is inserted.
    @@ -735,7 +735,7 @@
    - Doesn't work? Try adding /v1 at the end! + Doesn't work? Try adding /v1 at the end!
    @@ -1516,7 +1516,7 @@ -
    +
    @@ -2231,7 +2231,7 @@
    -

    Endpoint URL

    +

    Custom Endpoint (Base URL)

    @@ -2240,7 +2240,10 @@ Doesn't work? Try adding /v1 at the end of the URL!
    -

    Custom API Key

    +

    + Custom API Key + (Optional) +

    @@ -2254,9 +2257,7 @@

    Available Models

    - +
    diff --git a/public/scripts/RossAscends-mods.js b/public/scripts/RossAscends-mods.js index 1827b1b26..7a0c648f3 100644 --- a/public/scripts/RossAscends-mods.js +++ b/public/scripts/RossAscends-mods.js @@ -399,7 +399,7 @@ function RA_autoconnect(PrevApi) { || (secret_state[SECRET_KEYS.AI21] && oai_settings.chat_completion_source == chat_completion_sources.AI21) || (secret_state[SECRET_KEYS.MAKERSUITE] && oai_settings.chat_completion_source == chat_completion_sources.MAKERSUITE) || (secret_state[SECRET_KEYS.MISTRALAI] && oai_settings.chat_completion_source == chat_completion_sources.MISTRALAI) - || (secret_state[SECRET_KEYS.CUSTOM] && oai_settings.chat_completion_source == chat_completion_sources.CUSTOM) + || (isValidUrl(oai_settings.custom_url) && oai_settings.chat_completion_source == chat_completion_sources.CUSTOM) ) { $('#api_button_openai').trigger('click'); } diff --git a/public/scripts/extensions/caption/index.js b/public/scripts/extensions/caption/index.js index ec126fcda..ca31ac0a3 100644 --- a/public/scripts/extensions/caption/index.js +++ b/public/scripts/extensions/caption/index.js @@ -277,6 +277,7 @@ jQuery(function () { (extension_settings.caption.source === 'multimodal' && extension_settings.caption.multimodal_api === 'google' && secret_state[SECRET_KEYS.MAKERSUITE]) || (extension_settings.caption.source === 'multimodal' && extension_settings.caption.multimodal_api === 'ollama' && textgenerationwebui_settings.server_urls[textgen_types.OLLAMA]) || (extension_settings.caption.source === 'multimodal' && extension_settings.caption.multimodal_api === 'llamacpp' && textgenerationwebui_settings.server_urls[textgen_types.LLAMACPP]) || + (extension_settings.caption.source === 'multimodal' && extension_settings.caption.multimodal_api === 'custom') || extension_settings.caption.source === 'local' || extension_settings.caption.source === 'horde'; @@ -345,6 +346,7 @@ jQuery(function () { +
    @@ -358,6 +360,7 @@ jQuery(function () { +