mirror of
https://github.com/SillyTavern/SillyTavern.git
synced 2025-02-18 13:10:40 +01:00
Compare commits
63 Commits
f3e4d82284
...
4b306c5ef8
Author | SHA1 | Date | |
---|---|---|---|
|
4b306c5ef8 | ||
|
91437ad52f | ||
|
cec4519dcb | ||
|
fdd798e2da | ||
|
95db691c6c | ||
|
626db7f8f2 | ||
|
b4be50d26a | ||
|
b1300d403c | ||
|
0afbd95d09 | ||
|
01fdd4206c | ||
|
df50fece6c | ||
|
70c45fb001 | ||
|
f6d0f7587c | ||
|
fc84e61d94 | ||
|
f648137ae2 | ||
|
41fdc1366d | ||
|
2b481dae2d | ||
|
da42d0a47a | ||
|
d281749d63 | ||
|
5515f28105 | ||
|
bc6c598796 | ||
|
ba91845ced | ||
|
803b7fca89 | ||
|
3c2e802547 | ||
|
c8e6ba4d7f | ||
|
362bdf0c46 | ||
|
b9cc763d51 | ||
|
eccd1ab381 | ||
|
049ae541aa | ||
|
2e661c36e5 | ||
|
50922c11fa | ||
|
8ed6439da2 | ||
|
876da6899d | ||
|
7cd68d827b | ||
|
c5bd43405c | ||
|
3af5302714 | ||
|
446146674c | ||
|
dedc10b412 | ||
|
c39db9f3f0 | ||
|
ce3fb2e941 | ||
|
90ee0398f2 | ||
|
ecbf9df79a | ||
|
85ca08a2ea | ||
|
1938e8ac14 | ||
|
6ae120900d | ||
|
8688079882 | ||
|
8de551fc94 | ||
|
0383ea52e9 | ||
|
9a15f45a2c | ||
|
669ba2fd36 | ||
|
3789381c6c | ||
|
c30dde8471 | ||
|
4214c9d894 | ||
|
50ffaeb06a | ||
|
bb062f5ec9 | ||
|
19d4d65374 | ||
|
2846c1d6a8 | ||
|
feb1b91619 | ||
|
cdc0147490 | ||
|
c2eaae3d42 | ||
|
f25ea9f6d6 | ||
|
0e2fdf37a8 | ||
|
7580efc40d |
40
.github/readme-ru_ru.md
vendored
40
.github/readme-ru_ru.md
vendored
@ -229,6 +229,46 @@ SillyTavern сохраняет ключи от ваших API в файле `sec
|
||||
1. Зайдите в файл `config.yaml` и установите `allowKeysExposure` в положение `true`.
|
||||
2. Перезапустите сервер SillyTavern.
|
||||
|
||||
## Аргументы командной строки
|
||||
|
||||
Вы можете передавать аргументы командной строки при запуске сервера SillyTavern, чтобы переопределять настройки из `config.yaml`.
|
||||
|
||||
### Примеры
|
||||
|
||||
```shell
|
||||
node server.js --port 8000 --listen false
|
||||
# или
|
||||
npm run start -- --port 8000 --listen false
|
||||
# или (только на Windows)
|
||||
Start.bat --port 8000 --listen false
|
||||
```
|
||||
|
||||
### Поддерживаемые аргументы
|
||||
|
||||
| Аргумент | Описание | Тип |
|
||||
|-------------------------|----------------------------------------------------------------------------------------------------------------|----------|
|
||||
| `--version` | Показывает номер версии. | boolean |
|
||||
| `--enableIPv6` | Включает IPv6. | boolean |
|
||||
| `--enableIPv4` | Включает IPv4. | boolean |
|
||||
| `--port` | Устанавливает порт, котрый будет использовать SillyTavern. Если не указан, то используется yaml-конфиг 'port'. | number |
|
||||
| `--dnsPreferIPv6` | Отдает предпочтение IPv6 для dns. Если не указан, то используется yaml-конфиг 'preferIPv6'. | boolean |
|
||||
| `--autorun` | Автоматический запуск SillyTavern в браузере. Если не указан, то используется yaml-конфиг 'autorun'. | boolean |
|
||||
| `--autorunHostname` | Имя хоста автозапуска, лучше оставить на 'auto'. | string |
|
||||
| `--autorunPortOverride` | Переопределяет порт для автозапуска. | string |
|
||||
| `--listen` | SillyTavern будет прослушивать все сетевые интерфейсы. Если не указан, то используется yaml-конфиг 'listen'. | boolean |
|
||||
| `--corsProxy` | Включает CORS-прокси. Если не указан, то используется yaml-конфиг 'enableCorsProxy'. | boolean |
|
||||
| `--disableCsrf` | Отключает защиту от CSRF. | boolean |
|
||||
| `--ssl` | Включает SSL. | boolean |
|
||||
| `--certPath` | Путь к файлу c сертификатом. | string |
|
||||
| `--keyPath` | Путь к файлу с закрытым ключом. | string |
|
||||
| `--whitelist` | Включает режим белого списка. | boolean |
|
||||
| `--dataRoot` | Корневой каталог для хранения данных. | string |
|
||||
| `--avoidLocalhost` | Избегает использования 'localhost' для автозапуска в режиме 'auto'. | boolean |
|
||||
| `--basicAuthMode` | Включает простую аутентификацию. | boolean |
|
||||
| `--requestProxyEnabled` | Разрешает использование прокси для исходящих запросов. | boolean |
|
||||
| `--requestProxyUrl` | URL-адрес прокси (протоколы HTTP или SOCKS). | string |
|
||||
| `--requestProxyBypass` | Bypass список прокси (список хостов, разделенных пробелами). | array |
|
||||
|
||||
## Удалённое подключение
|
||||
|
||||
В основном этим пользуются тогда, когда хотят использовать SillyTavern с телефона, запустив сервер SillyTavern на стационарном ПК в той же Wi-Fi-сети.
|
||||
|
@ -91,9 +91,6 @@
|
||||
<div class="margin0 title_restorable standoutHeader">
|
||||
<strong>
|
||||
<span data-i18n="kobldpresets">Kobold Presets</span>
|
||||
<a href="https://docs.sillytavern.app/usage/api-connections/koboldai/" class="notes-link" target="_blank">
|
||||
<span class="fa-solid fa-circle-question note-link-span"></span>
|
||||
</a>
|
||||
</strong>
|
||||
|
||||
<div class="flex-container gap3px">
|
||||
@ -1297,10 +1294,10 @@
|
||||
<div data-tg-type="aphrodite" class="alignitemscenter flex-container flexFlowColumn flexBasis30p flexGrow flexShrink gap0">
|
||||
<small>
|
||||
<span data-i18n="Top nsigma">Top nsigma</span>
|
||||
<div class="fa-solid fa-circle-info opacity50p" data-i18n="[title]A sampling method that filters logits based on their statistical properties. It keeps tokens within n standard deviations of the maximum logit value, providing a simpler alternative to top-p/top-k sampling while maintaining sampling stability across different temperatures."></div>
|
||||
<div class="fa-solid fa-circle-info opacity50p" title="A sampling method that filters logits based on their statistical properties. It keeps tokens within n standard deviations of the maximum logit value, providing a simpler alternative to top-p/top-k sampling while maintaining sampling stability across different temperatures."></div>
|
||||
</small>
|
||||
<input class="neo-range-slider" type="range" id="nsigma" name="volume" min="0" max="5" step="0.01">
|
||||
<input class="neo-range-input" type="number" min="0" max="5" step="0.01" data-for="nsigma" id="nsigma_counter_textgenerationwebui">
|
||||
<input class="neo-range-slider" type="range" id="nsigma_textgenerationwebui" name="volume" min="0" max="5" step="0.01">
|
||||
<input class="neo-range-input" type="number" min="0" max="5" step="0.01" data-for="nsigma_textgenerationwebui" id="nsigma_counter_textgenerationwebui">
|
||||
</div>
|
||||
<div data-tg-type="ooba,mancer,aphrodite" class="alignitemscenter flex-container flexFlowColumn flexBasis30p flexGrow flexShrink gap0">
|
||||
<small>
|
||||
@ -1973,7 +1970,7 @@
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<div class="range-block m-t-1" data-source="openai,openrouter,scale">
|
||||
<div class="range-block m-t-1" data-source="openai,openrouter,scale,custom">
|
||||
<div id="logit_bias_openai" class="range-block-title openai_restorable" data-i18n="Logit Bias">
|
||||
Logit Bias
|
||||
</div>
|
||||
@ -2748,6 +2745,7 @@
|
||||
</optgroup>
|
||||
<optgroup label="GPT-4o">
|
||||
<option value="gpt-4o">gpt-4o</option>
|
||||
<option value="gpt-4o-2024-11-20">gpt-4o-2024-11-20</option>
|
||||
<option value="gpt-4o-2024-08-06">gpt-4o-2024-08-06</option>
|
||||
<option value="gpt-4o-2024-05-13">gpt-4o-2024-05-13</option>
|
||||
<option value="chatgpt-4o-latest">chatgpt-4o-latest</option>
|
||||
@ -2815,9 +2813,6 @@
|
||||
<option value="claude-3-haiku-20240307">claude-3-haiku-20240307</option>
|
||||
<option value="claude-2.1">claude-2.1</option>
|
||||
<option value="claude-2.0">claude-2.0</option>
|
||||
<option value="claude-1.3">claude-1.3</option>
|
||||
<option value="claude-instant-1.2">claude-instant-1.2</option>
|
||||
<option value="claude-instant-1.1">claude-instant-1.1</option>
|
||||
</optgroup>
|
||||
</select>
|
||||
</div>
|
||||
@ -2987,6 +2982,7 @@
|
||||
<option value="chat-bison-001">PaLM 2 Chat (Legacy)</option>
|
||||
</optgroup>
|
||||
<optgroup label="Subversions">
|
||||
<option value="gemini-exp-1121">Gemini Experimental 2024-11-21</option>
|
||||
<option value="gemini-exp-1114">Gemini Experimental 2024-11-14</option>
|
||||
<option value="gemini-1.5-pro-exp-0801">Gemini 1.5 Pro Experiment 2024-08-01</option>
|
||||
<option value="gemini-1.5-pro-exp-0827">Gemini 1.5 Pro Experiment 2024-08-27</option>
|
||||
@ -3167,6 +3163,8 @@
|
||||
<h4 data-i18n="Cohere Model">Cohere Model</h4>
|
||||
<select id="model_cohere_select">
|
||||
<optgroup label="Stable">
|
||||
<option value="c4ai-aya-expanse-32b">c4ai-aya-expanse-32b</option>
|
||||
<option value="c4ai-aya-expanse-8b">c4ai-aya-expanse-8b</option>
|
||||
<option value="c4ai-aya-23-35b">c4ai-aya-23-35b</option>
|
||||
<option value="c4ai-aya-23-8b">c4ai-aya-23-8b</option>
|
||||
<option value="command-light">command-light</option>
|
||||
@ -3278,8 +3276,14 @@
|
||||
</div>
|
||||
<div id="AdvancedFormatting" class="drawer-content">
|
||||
<div class="flex-container alignItemsBaseline">
|
||||
<h3 class="margin0 flex1" data-i18n="Advanced Formatting">
|
||||
<h3 class="margin0 flex1 flex-container alignItemsBaseline">
|
||||
<span data-i18n="Advanced Formatting">
|
||||
Advanced Formatting
|
||||
</span>
|
||||
|
||||
<a href="https://docs.sillytavern.app/usage/prompts/" class="notes-link" target="_blank">
|
||||
<span class="fa-solid fa-circle-question note-link-span"></span>
|
||||
</a>
|
||||
</h3>
|
||||
<div class="flex-container">
|
||||
<input id="af_master_import_file" type="file" hidden accept=".json" class="displayNone">
|
||||
@ -3299,9 +3303,12 @@
|
||||
<h4 class="standoutHeader title_restorable">
|
||||
<div>
|
||||
<span data-i18n="Context Template">Context Template</span>
|
||||
<a href="https://docs.sillytavern.app/usage/core-concepts/advancedformatting/#context-template" class="notes-link" target="_blank">
|
||||
<span class="fa-solid fa-circle-question note-link-span"></span>
|
||||
</a>
|
||||
</div>
|
||||
<div class="flex-container">
|
||||
<label for="context_derived" class="checkbox_label flex1" title="Derive from Model Metadata, if possible." data-i18n="[title]context_derived">
|
||||
<input id="context_derived" type="checkbox" style="display:none;" />
|
||||
<small><i class="fa-solid fa-bolt menu_button margin0"></i></small>
|
||||
</label>
|
||||
</div>
|
||||
</h4>
|
||||
<div class="flex-container" title="Select your current Context Template" data-i18n="[title]Select your current Context Template">
|
||||
@ -3398,11 +3405,12 @@
|
||||
<h4 class="standoutHeader title_restorable justifySpaceBetween">
|
||||
<div class="flex-container">
|
||||
<span data-i18n="Instruct Template">Instruct Template</span>
|
||||
<a href="https://docs.sillytavern.app/usage/core-concepts/instructmode/" class="notes-link" target="_blank">
|
||||
<span class="fa-solid fa-circle-question note-link-span"></span>
|
||||
</a>
|
||||
</div>
|
||||
<div class="flex-container">
|
||||
<label for="instruct_derived" class="checkbox_label flex1" title="Derive from Model Metadata, if possible." data-i18n="[title]instruct_derived">
|
||||
<input id="instruct_derived" type="checkbox" style="display:none;" />
|
||||
<small><i class="fa-solid fa-bolt menu_button margin0"></i></small>
|
||||
</label>
|
||||
<label for="instruct_bind_to_context" class="checkbox_label flex1" title="Bind to Context
If enabled, Context templates will be automatically selected based on selected Instruct template name or by preference." data-i18n="[title]instruct_bind_to_context">
|
||||
<input id="instruct_bind_to_context" type="checkbox" style="display:none;" />
|
||||
<small><i class="fa-solid fa-link menu_button margin0"></i></small>
|
||||
@ -3583,9 +3591,6 @@
|
||||
<h4 class="standoutHeader title_restorable justifySpaceBetween">
|
||||
<div class="flex-container">
|
||||
<span data-i18n="System Prompt">System Prompt</span>
|
||||
<a href="https://docs.sillytavern.app/usage/core-concepts/advancedformatting/#system-prompt" class="notes-link" target="_blank">
|
||||
<span class="fa-solid fa-circle-question note-link-span"></span>
|
||||
</a>
|
||||
</div>
|
||||
<div class="flex-container">
|
||||
<label id="sysprompt_enabled_label" for="sysprompt_enabled" class="checkbox_label flex1" title="Enable System Prompt" data-i18n="[title]sysprompt_enabled">
|
||||
@ -3653,7 +3658,7 @@
|
||||
<div name="tokenizerSettingsBlock">
|
||||
<div name="tokenizerSelectorBlock">
|
||||
<h4 class="standoutHeader"><span data-i18n="Tokenizer">Tokenizer</span>
|
||||
<a href="https://docs.sillytavern.app/usage/core-concepts/advancedformatting/#tokenizer" class="notes-link" target="_blank">
|
||||
<a href="https://docs.sillytavern.app/usage/prompts/tokenizer/" class="notes-link" target="_blank">
|
||||
<span class="fa-solid fa-circle-question note-link-span"></span>
|
||||
</a>
|
||||
</h4>
|
||||
@ -3679,10 +3684,8 @@
|
||||
</div>
|
||||
<div class="range-block flex-container flexnowrap" name="tokenPaddingBlock">
|
||||
<div class="range-block-title justifyLeft">
|
||||
<small data-i18n="Token Padding" class="flex-container">Token Padding
|
||||
<a href="https://docs.sillytavern.app/usage/core-concepts/advancedformatting/#token-padding" class="notes-link" target="_blank">
|
||||
<span class="fa-solid fa-circle-question note-link-span"></span>
|
||||
</a>
|
||||
<small data-i18n="Token Padding">
|
||||
Token Padding
|
||||
</small>
|
||||
</div>
|
||||
<input id="token_padding" class="text_pole textarea_compact" type="number" min="-2048" max="2048" />
|
||||
@ -3740,7 +3743,7 @@
|
||||
</div>
|
||||
<h3 class="margin0">
|
||||
<span data-i18n="Worlds/Lorebooks">Worlds/Lorebooks</span>
|
||||
<a href="https://docs.sillytavern.app/usage/core-concepts/worldinfo/" class="notes-link" target="_blank">
|
||||
<a href="https://docs.sillytavern.app/usage/worldinfo/" class="notes-link" target="_blank">
|
||||
<span class="fa-solid fa-circle-question note-link-span"></span>
|
||||
</a>
|
||||
</h3>
|
||||
@ -4267,7 +4270,7 @@
|
||||
<audio id="audio_message_sound" src="sounds/message.mp3" hidden></audio>
|
||||
<span>
|
||||
<small data-i18n="Message Sound">Message Sound</small>
|
||||
<a href="https://docs.sillytavern.app/usage/core-concepts/uicustomization/#message-sound" class="notes-link" target="_blank">
|
||||
<a href="https://docs.sillytavern.app/usage/user_settings/uicustomization/#message-sound" class="notes-link" target="_blank">
|
||||
<span class="fa-solid fa-circle-question note-link-span"></span>
|
||||
</a>
|
||||
</span>
|
||||
@ -4484,7 +4487,7 @@
|
||||
</div>
|
||||
<div nane="AutoContiueBlock" class="inline-drawer wide100p flexFlowColumn">
|
||||
<div class="inline-drawer-toggle inline-drawer-header userSettingsInnerExpandable" title="Automatically 'continue' a response if the model stopped before reaching a certain amount of tokens.">
|
||||
<b><span data-i18n="Auto-swipe">Auto-Continue</span></b>
|
||||
<b><span data-i18n="Auto-Continue">Auto-Continue</span></b>
|
||||
<div class="fa-solid fa-circle-chevron-down inline-drawer-icon down"></div>
|
||||
</div>
|
||||
<div class="inline-drawer-content">
|
||||
@ -5799,7 +5802,7 @@
|
||||
<div class="flex-container justifySpaceBetween">
|
||||
<small for="group">
|
||||
<span data-i18n="Inclusion Group">Inclusion Group</span>
|
||||
<a href="https://docs.sillytavern.app/usage/core-concepts/worldinfo/#inclusion-group" class="notes-link" target="_blank" title="Inclusion Groups ensure only one entry from a group is activated at a time, if multiple are triggered. Supports multiple comma-separated groups. Documentation: World Info - Inclusion Group" data-i18n="[title]Inclusion Groups ensure only one entry from a group is activated at a time, if multiple are triggered.Documentation: World Info - Inclusion Group">
|
||||
<a href="https://docs.sillytavern.app/usage/worldinfo/#inclusion-group" class="notes-link" target="_blank" title="Inclusion Groups ensure only one entry from a group is activated at a time, if multiple are triggered. Supports multiple comma-separated groups. Documentation: World Info - Inclusion Group" data-i18n="[title]Inclusion Groups ensure only one entry from a group is activated at a time, if multiple are triggered.Documentation: World Info - Inclusion Group">
|
||||
<span class="fa-solid fa-circle-question note-link-span"></span>
|
||||
</a>
|
||||
</small>
|
||||
|
File diff suppressed because it is too large
Load Diff
@ -267,6 +267,7 @@ import { applyBrowserFixes } from './scripts/browser-fixes.js';
|
||||
import { initServerHistory } from './scripts/server-history.js';
|
||||
import { initSettingsSearch } from './scripts/setting-search.js';
|
||||
import { initBulkEdit } from './scripts/bulk-edit.js';
|
||||
import { deriveTemplatesFromChatTemplate } from './scripts/chat-templates.js';
|
||||
|
||||
//exporting functions and vars for mods
|
||||
export {
|
||||
@ -1235,6 +1236,38 @@ async function getStatusTextgen() {
|
||||
const supportsTokenization = response.headers.get('x-supports-tokenization') === 'true';
|
||||
supportsTokenization ? sessionStorage.setItem(TOKENIZER_SUPPORTED_KEY, 'true') : sessionStorage.removeItem(TOKENIZER_SUPPORTED_KEY);
|
||||
|
||||
const wantsInstructDerivation = (power_user.instruct.enabled && power_user.instruct.derived);
|
||||
const wantsContextDerivation = power_user.context_derived;
|
||||
const supportsChatTemplate = [textgen_types.KOBOLDCPP, textgen_types.LLAMACPP].includes(textgen_settings.type);
|
||||
if (supportsChatTemplate && (wantsInstructDerivation || wantsContextDerivation)) {
|
||||
const response = await fetch('/api/backends/text-completions/props', {
|
||||
method: 'POST',
|
||||
headers: getRequestHeaders(),
|
||||
body: JSON.stringify({
|
||||
api_server: endpoint,
|
||||
api_type: textgen_settings.type,
|
||||
}),
|
||||
});
|
||||
|
||||
if (response.ok) {
|
||||
const data = await response.json();
|
||||
if (data) {
|
||||
const { chat_template, chat_template_hash } = data;
|
||||
console.log(`We have chat template ${chat_template.split('\n')[0]}...`);
|
||||
const templates = await deriveTemplatesFromChatTemplate(chat_template, chat_template_hash);
|
||||
if (templates) {
|
||||
const { context, instruct } = templates;
|
||||
if (wantsContextDerivation) {
|
||||
selectContextPreset(context, { isAuto: true });
|
||||
}
|
||||
if (wantsInstructDerivation) {
|
||||
selectInstructPreset(instruct, { isAuto: true });
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// We didn't get a 200 status code, but the endpoint has an explanation. Which means it DID connect, but I digress.
|
||||
if (online_status === 'no_connection' && data.response) {
|
||||
toastr.error(data.response, t`API Error`, { timeOut: 5000, preventDuplicates: true });
|
||||
@ -2672,8 +2705,7 @@ export async function generateQuietPrompt(quiet_prompt, quietToLoud, skipWIAN, q
|
||||
quietName: quietName,
|
||||
};
|
||||
originalResponseLength = responseLengthCustomized ? saveResponseLength(main_api, responseLength) : -1;
|
||||
const generateFinished = await Generate('quiet', options);
|
||||
return generateFinished;
|
||||
return await Generate('quiet', options);
|
||||
} finally {
|
||||
if (responseLengthCustomized) {
|
||||
restoreResponseLength(main_api, originalResponseLength);
|
||||
@ -3328,9 +3360,9 @@ export async function generateRaw(prompt, api, instructOverride, quietToLoud, sy
|
||||
|
||||
let data = {};
|
||||
|
||||
if (api == 'koboldhorde') {
|
||||
if (api === 'koboldhorde') {
|
||||
data = await generateHorde(prompt, generateData, abortController.signal, false);
|
||||
} else if (api == 'openai') {
|
||||
} else if (api === 'openai') {
|
||||
data = await sendOpenAIRequest('quiet', generateData, abortController.signal);
|
||||
} else {
|
||||
const generateUrl = getGenerateUrl(api);
|
||||
@ -3343,13 +3375,15 @@ export async function generateRaw(prompt, api, instructOverride, quietToLoud, sy
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
const error = await response.json();
|
||||
throw error;
|
||||
throw await response.json();
|
||||
}
|
||||
|
||||
data = await response.json();
|
||||
}
|
||||
|
||||
// should only happen for text completions
|
||||
// other frontend paths do not return data if calling the backend fails,
|
||||
// they throw things instead
|
||||
if (data.error) {
|
||||
throw new Error(data.response);
|
||||
}
|
||||
@ -4401,6 +4435,11 @@ export async function Generate(type, { automatic_trigger, force_name2, quiet_pro
|
||||
return Promise.resolve();
|
||||
}
|
||||
|
||||
/**
|
||||
* Saves itemized prompt bits and calls streaming or non-streaming generation API.
|
||||
* @returns {Promise<void|*|Awaited<*>|String|{fromStream}|string|undefined|Object>}
|
||||
* @throws {Error|object} Error with message text, or Error with response JSON (OAI/Horde), or the actual response JSON (novel|textgenerationwebui|kobold)
|
||||
*/
|
||||
async function finishGenerating() {
|
||||
if (power_user.console_log_prompts) {
|
||||
console.log(generate_data.prompt);
|
||||
@ -4512,6 +4551,12 @@ export async function Generate(type, { automatic_trigger, force_name2, quiet_pro
|
||||
|
||||
return finishGenerating().then(onSuccess, onError);
|
||||
|
||||
/**
|
||||
* Handles the successful response from the generation API.
|
||||
* @param data
|
||||
* @returns {Promise<String|{fromStream}|*|string|string|void|Awaited<*>|undefined>}
|
||||
* @throws {Error} Throws an error if the response data contains an error message
|
||||
*/
|
||||
async function onSuccess(data) {
|
||||
if (!data) return;
|
||||
|
||||
@ -4521,6 +4566,7 @@ export async function Generate(type, { automatic_trigger, force_name2, quiet_pro
|
||||
|
||||
let messageChunk = '';
|
||||
|
||||
// if an error was returned in data (textgenwebui), show it and throw it
|
||||
if (data.error) {
|
||||
unblockGeneration(type);
|
||||
generatedPromptCache = '';
|
||||
@ -4635,9 +4681,15 @@ export async function Generate(type, { automatic_trigger, force_name2, quiet_pro
|
||||
return Object.defineProperty(new String(getMessage), 'messageChunk', { value: messageChunk });
|
||||
}
|
||||
|
||||
/**
|
||||
* Exception handler for finishGenerating
|
||||
* @param {Error|object} exception Error or response JSON
|
||||
* @throws {Error|object} Re-throws the exception
|
||||
*/
|
||||
function onError(exception) {
|
||||
// if the response JSON was thrown (novel|textgenerationwebui|kobold), show the error message
|
||||
if (typeof exception?.error?.message === 'string') {
|
||||
toastr.error(exception.error.message, t`Error`, { timeOut: 10000, extendedTimeOut: 20000 });
|
||||
toastr.error(exception.error.message, t`Text generation error`, { timeOut: 10000, extendedTimeOut: 20000 });
|
||||
}
|
||||
|
||||
generatedPromptCache = '';
|
||||
@ -5305,6 +5357,7 @@ function setInContextMessages(lastmsg, type) {
|
||||
* @param {string} type Generation type
|
||||
* @param {object} data Generation data
|
||||
* @returns {Promise<object>} Response data from the API
|
||||
* @throws {Error|object}
|
||||
*/
|
||||
export async function sendGenerationRequest(type, data) {
|
||||
if (main_api === 'openai') {
|
||||
@ -5324,12 +5377,10 @@ export async function sendGenerationRequest(type, data) {
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
const error = await response.json();
|
||||
throw error;
|
||||
throw await response.json();
|
||||
}
|
||||
|
||||
const responseData = await response.json();
|
||||
return responseData;
|
||||
return await response.json();
|
||||
}
|
||||
|
||||
/**
|
||||
@ -5361,6 +5412,7 @@ export async function sendStreamingRequest(type, data) {
|
||||
* Gets the generation endpoint URL for the specified API.
|
||||
* @param {string} api API name
|
||||
* @returns {string} Generation URL
|
||||
* @throws {Error} If the API is unknown
|
||||
*/
|
||||
function getGenerateUrl(api) {
|
||||
switch (api) {
|
||||
|
@ -37,12 +37,15 @@ const chara_note_position = {
|
||||
};
|
||||
|
||||
function setNoteTextCommand(_, text) {
|
||||
if (text) {
|
||||
$('#extension_floating_prompt').val(text).trigger('input');
|
||||
toastr.success(t`Author's Note text updated`);
|
||||
return '';
|
||||
}
|
||||
return chat_metadata[metadata_keys.prompt];
|
||||
}
|
||||
|
||||
function setNoteDepthCommand(_, text) {
|
||||
if (text) {
|
||||
const value = Number(text);
|
||||
|
||||
if (Number.isNaN(value)) {
|
||||
@ -52,10 +55,12 @@ function setNoteDepthCommand(_, text) {
|
||||
|
||||
$('#extension_floating_depth').val(Math.abs(value)).trigger('input');
|
||||
toastr.success(t`Author's Note depth updated`);
|
||||
return '';
|
||||
}
|
||||
return chat_metadata[metadata_keys.depth];
|
||||
}
|
||||
|
||||
function setNoteIntervalCommand(_, text) {
|
||||
if (text) {
|
||||
const value = Number(text);
|
||||
|
||||
if (Number.isNaN(value)) {
|
||||
@ -65,25 +70,52 @@ function setNoteIntervalCommand(_, text) {
|
||||
|
||||
$('#extension_floating_interval').val(Math.abs(value)).trigger('input');
|
||||
toastr.success(t`Author's Note frequency updated`);
|
||||
return '';
|
||||
}
|
||||
return chat_metadata[metadata_keys.interval];
|
||||
}
|
||||
|
||||
function setNotePositionCommand(_, text) {
|
||||
const validPositions = {
|
||||
'after': 0,
|
||||
'scenario': 0,
|
||||
'chat': 1,
|
||||
'before_scenario': 2,
|
||||
'before': 2,
|
||||
};
|
||||
|
||||
const position = validPositions[text?.trim()];
|
||||
if (text) {
|
||||
const position = validPositions[text?.trim()?.toLowerCase()];
|
||||
|
||||
if (Number.isNaN(position)) {
|
||||
if (typeof position === 'undefined') {
|
||||
toastr.error(t`Not a valid position`);
|
||||
return;
|
||||
}
|
||||
|
||||
$(`input[name="extension_floating_position"][value="${position}"]`).prop('checked', true).trigger('input');
|
||||
toastr.info(t`Author's Note position updated`);
|
||||
return '';
|
||||
}
|
||||
return Object.keys(validPositions).find(key => validPositions[key] == chat_metadata[metadata_keys.position]);
|
||||
}
|
||||
|
||||
function setNoteRoleCommand(_, text) {
|
||||
const validRoles = {
|
||||
'system': 0,
|
||||
'user': 1,
|
||||
'assistant': 2,
|
||||
};
|
||||
|
||||
if (text) {
|
||||
const role = validRoles[text?.trim()?.toLowerCase()];
|
||||
|
||||
if (typeof role === 'undefined') {
|
||||
toastr.error(t`Not a valid role`);
|
||||
return;
|
||||
}
|
||||
|
||||
$('#extension_floating_role').val(Math.abs(role)).trigger('input');
|
||||
toastr.info(t`Author's Note role updated`);
|
||||
}
|
||||
return Object.keys(validRoles).find(key => validRoles[key] == chat_metadata[metadata_keys.role]);
|
||||
}
|
||||
|
||||
function updateSettings() {
|
||||
@ -462,57 +494,84 @@ export function initAuthorsNote() {
|
||||
});
|
||||
$('#option_toggle_AN').on('click', onANMenuItemClick);
|
||||
|
||||
SlashCommandParser.addCommandObject(SlashCommand.fromProps({ name: 'note',
|
||||
SlashCommandParser.addCommandObject(SlashCommand.fromProps({
|
||||
name: 'note',
|
||||
callback: setNoteTextCommand,
|
||||
returns: 'current author\'s note',
|
||||
unnamedArgumentList: [
|
||||
new SlashCommandArgument(
|
||||
'text', [ARGUMENT_TYPE.STRING], true,
|
||||
'text', [ARGUMENT_TYPE.STRING], false,
|
||||
),
|
||||
],
|
||||
helpString: `
|
||||
<div>
|
||||
Sets an author's note for the currently selected chat.
|
||||
Sets an author's note for the currently selected chat if specified and returns the current note.
|
||||
</div>
|
||||
`,
|
||||
}));
|
||||
SlashCommandParser.addCommandObject(SlashCommand.fromProps({ name: 'depth',
|
||||
SlashCommandParser.addCommandObject(SlashCommand.fromProps({
|
||||
name: 'note-depth',
|
||||
aliases: ['depth'],
|
||||
callback: setNoteDepthCommand,
|
||||
returns: 'current author\'s note depth',
|
||||
unnamedArgumentList: [
|
||||
new SlashCommandArgument(
|
||||
'number', [ARGUMENT_TYPE.NUMBER], true,
|
||||
'number', [ARGUMENT_TYPE.NUMBER], false,
|
||||
),
|
||||
],
|
||||
helpString: `
|
||||
<div>
|
||||
Sets an author's note depth for in-chat positioning.
|
||||
Sets an author's note depth for in-chat positioning if specified and returns the current depth.
|
||||
</div>
|
||||
`,
|
||||
}));
|
||||
SlashCommandParser.addCommandObject(SlashCommand.fromProps({ name: 'freq',
|
||||
SlashCommandParser.addCommandObject(SlashCommand.fromProps({
|
||||
name: 'note-frequency',
|
||||
aliases: ['freq', 'note-freq'],
|
||||
callback: setNoteIntervalCommand,
|
||||
returns: 'current author\'s note insertion frequency',
|
||||
namedArgumentList: [],
|
||||
unnamedArgumentList: [
|
||||
new SlashCommandArgument(
|
||||
'number', [ARGUMENT_TYPE.NUMBER], true,
|
||||
'number', [ARGUMENT_TYPE.NUMBER], false,
|
||||
),
|
||||
],
|
||||
helpString: `
|
||||
<div>
|
||||
Sets an author's note insertion frequency.
|
||||
Sets an author's note insertion frequency if specified and returns the current frequency.
|
||||
</div>
|
||||
`,
|
||||
}));
|
||||
SlashCommandParser.addCommandObject(SlashCommand.fromProps({ name: 'pos',
|
||||
SlashCommandParser.addCommandObject(SlashCommand.fromProps({
|
||||
name: 'note-position',
|
||||
callback: setNotePositionCommand,
|
||||
aliases: ['pos', 'note-pos'],
|
||||
returns: 'current author\'s note insertion position',
|
||||
namedArgumentList: [],
|
||||
unnamedArgumentList: [
|
||||
new SlashCommandArgument(
|
||||
'position', [ARGUMENT_TYPE.STRING], true, false, null, ['chat', 'scenario'],
|
||||
'position', [ARGUMENT_TYPE.STRING], false, false, null, ['before', 'after', 'chat'],
|
||||
),
|
||||
],
|
||||
helpString: `
|
||||
<div>
|
||||
Sets an author's note position.
|
||||
Sets an author's note position if specified and returns the current position.
|
||||
</div>
|
||||
`,
|
||||
}));
|
||||
SlashCommandParser.addCommandObject(SlashCommand.fromProps({
|
||||
name: 'note-role',
|
||||
callback: setNoteRoleCommand,
|
||||
returns: 'current author\'s note chat insertion role',
|
||||
namedArgumentList: [],
|
||||
unnamedArgumentList: [
|
||||
new SlashCommandArgument(
|
||||
'position', [ARGUMENT_TYPE.STRING], false, false, null, ['system', 'user', 'assistant'],
|
||||
),
|
||||
],
|
||||
helpString: `
|
||||
<div>
|
||||
Sets an author's note chat insertion role if specified and returns the current role.
|
||||
</div>
|
||||
`,
|
||||
}));
|
||||
|
@ -89,7 +89,7 @@
|
||||
* @property {boolean} markdownOnly - Whether the script only applies to Markdown
|
||||
* @property {boolean} promptOnly - Whether the script only applies to prompts
|
||||
* @property {boolean} runOnEdit - Whether the script runs on edit
|
||||
* @property {boolean} substituteRegex - Whether the regex should be substituted
|
||||
* @property {number} substituteRegex - Whether the regex should be substituted
|
||||
* @property {number} minDepth - The minimum depth
|
||||
* @property {number} maxDepth - The maximum depth
|
||||
*/
|
||||
|
81
public/scripts/chat-templates.js
Normal file
81
public/scripts/chat-templates.js
Normal file
@ -0,0 +1,81 @@
|
||||
// the hash can be obtained from command line e.g. via: MODEL=path_to_model; python -c "import json, hashlib, sys; print(hashlib.sha256(json.load(open('"$MODEL"/tokenizer_config.json'))['chat_template'].encode()).hexdigest())"
|
||||
// note that chat templates must be trimmed to match the llama.cpp metadata value
|
||||
const hash_derivations = {
|
||||
// Meta
|
||||
'e10ca381b1ccc5cf9db52e371f3b6651576caee0a630b452e2816b2d404d4b65':
|
||||
// Meta-Llama-3.1-8B-Instruct
|
||||
// Meta-Llama-3.1-70B-Instruct
|
||||
'Llama 3 Instruct'
|
||||
,
|
||||
'5816fce10444e03c2e9ee1ef8a4a1ea61ae7e69e438613f3b17b69d0426223a4':
|
||||
// Llama-3.2-1B-Instruct
|
||||
// Llama-3.2-3B-Instruct
|
||||
'Llama 3 Instruct'
|
||||
,
|
||||
'73e87b1667d87ab7d7b579107f01151b29ce7f3ccdd1018fdc397e78be76219d':
|
||||
// Nemotron 70B
|
||||
'Llama 3 Instruct'
|
||||
,
|
||||
|
||||
// Mistral
|
||||
// Mistral Reference: https://github.com/mistralai/mistral-common
|
||||
'e16746b40344d6c5b5265988e0328a0bf7277be86f1c335156eae07e29c82826':
|
||||
// Mistral-Small-Instruct-2409
|
||||
// Mistral-Large-Instruct-2407
|
||||
'Mistral V2 & V3'
|
||||
,
|
||||
'3c4ad5fa60dd8c7ccdf82fa4225864c903e107728fcaf859fa6052cb80c92ee9':
|
||||
// Mistral-Large-Instruct-2411
|
||||
'Mistral V7' // https://huggingface.co/mistralai/Mistral-Large-Instruct-2411
|
||||
,
|
||||
'e4676cb56dffea7782fd3e2b577cfaf1e123537e6ef49b3ec7caa6c095c62272':
|
||||
// Mistral-Nemo-Instruct-2407
|
||||
'Mistral V3-Tekken'
|
||||
,
|
||||
'26a59556925c987317ce5291811ba3b7f32ec4c647c400c6cc7e3a9993007ba7':
|
||||
// Mistral-7B-Instruct-v0.3
|
||||
'Mistral V2 & V3'
|
||||
,
|
||||
|
||||
// Gemma
|
||||
'ecd6ae513fe103f0eb62e8ab5bfa8d0fe45c1074fa398b089c93a7e70c15cfd6':
|
||||
// gemma-2-9b-it
|
||||
// gemma-2-27b-it
|
||||
'Gemma 2'
|
||||
,
|
||||
'87fa45af6cdc3d6a9e4dd34a0a6848eceaa73a35dcfe976bd2946a5822a38bf3':
|
||||
// gemma-2-2b-it
|
||||
'Gemma 2'
|
||||
,
|
||||
|
||||
// Cohere
|
||||
'3b54f5c219ae1caa5c0bb2cdc7c001863ca6807cf888e4240e8739fa7eb9e02e':
|
||||
// command-r-08-2024
|
||||
'Command R'
|
||||
,
|
||||
};
|
||||
|
||||
const substr_derivations = {
|
||||
'<|im_start|>': 'ChatML', // qwen2.5, ...
|
||||
};
|
||||
|
||||
const parse_derivation = derivation => (typeof derivation === 'string') ? {
|
||||
'context': derivation,
|
||||
'instruct': derivation,
|
||||
} : derivation;
|
||||
|
||||
export async function deriveTemplatesFromChatTemplate(chat_template, hash) {
|
||||
if (hash in hash_derivations) {
|
||||
return parse_derivation(hash_derivations[hash]);
|
||||
}
|
||||
|
||||
// heuristics
|
||||
for (const [substr, derivation] of Object.entries(substr_derivations) ) {
|
||||
if (chat_template.includes(substr)) {
|
||||
return parse_derivation(derivation);
|
||||
}
|
||||
}
|
||||
|
||||
console.log(`Unknown chat template hash: ${hash} for [${chat_template}]`);
|
||||
return null;
|
||||
}
|
@ -61,6 +61,7 @@
|
||||
<option data-type="google" value="gemini-1.5-flash-8b-exp-0827">gemini-1.5-flash-8b-exp-0827</option>
|
||||
<option data-type="google" value="gemini-1.5-flash-8b-exp-0924">gemini-1.5-flash-8b-exp-0924</option>
|
||||
<option data-type="google" value="gemini-exp-1114">gemini-exp-1114</option>
|
||||
<option data-type="google" value="gemini-exp-1121">gemini-exp-1121</option>
|
||||
<option data-type="google" value="gemini-1.5-pro">gemini-1.5-pro</option>
|
||||
<option data-type="google" value="gemini-1.5-pro-latest">gemini-1.5-pro-latest</option>
|
||||
<option data-type="google" value="gemini-1.5-pro-001">gemini-1.5-pro-001</option>
|
||||
|
@ -121,12 +121,16 @@
|
||||
<input type="checkbox" name="run_on_edit" />
|
||||
<span data-i18n="Run On Edit">Run On Edit</span>
|
||||
</label>
|
||||
<label class="checkbox flex-container" data-i18n="[title]ext_regex_substitute_regex_desc" title="Substitute {{macros}} in Find Regex before running it">
|
||||
<input type="checkbox" name="substitute_regex" />
|
||||
<label class="checkbox flex-container flexNoGap marginBot5" data-i18n="[title]ext_regex_substitute_regex_desc" title="Substitute {{macros}} in Find Regex before running it">
|
||||
<span>
|
||||
<span data-i18n="Substitute Regex">Substitute Regex</span>
|
||||
<small data-i18n="Macro in Find Regex">Macros in Find Regex</small>
|
||||
<span class="fa-solid fa-circle-question note-link-span"></span>
|
||||
</span>
|
||||
<select name="substitute_regex" class="text_pole textarea_compact margin0">
|
||||
<option value="0" data-i18n="Don't substitute">Don't substitute</option>
|
||||
<option value="1" data-i18n="Substitute (raw)">Substitute (raw)</option>
|
||||
<option value="2" data-i18n="Substitute (escaped)">Substitute (escaped)</option>
|
||||
</select>
|
||||
</label>
|
||||
<span>
|
||||
<small data-i18n="ext_regex_other_options" data-i18n="Ephemerality">Ephemerality</small>
|
||||
|
@ -22,6 +22,12 @@ const regex_placement = {
|
||||
WORLD_INFO: 5,
|
||||
};
|
||||
|
||||
export const substitute_find_regex = {
|
||||
NONE: 0,
|
||||
RAW: 1,
|
||||
ESCAPED: 2,
|
||||
};
|
||||
|
||||
function sanitizeRegexMacro(x) {
|
||||
return (x && typeof x === 'string') ?
|
||||
x.replaceAll(/[\n\r\t\v\f\0.^$*+?{}[\]\\/|()]/gs, function (s) {
|
||||
@ -131,9 +137,20 @@ function runRegexScript(regexScript, rawString, { characterOverride } = {}) {
|
||||
return newString;
|
||||
}
|
||||
|
||||
const regexString = regexScript.substituteRegex
|
||||
? substituteParamsExtended(regexScript.findRegex, {}, sanitizeRegexMacro)
|
||||
: regexScript.findRegex;
|
||||
const getRegexString = () => {
|
||||
switch(Number(regexScript.substituteRegex)) {
|
||||
case substitute_find_regex.NONE:
|
||||
return regexScript.findRegex;
|
||||
case substitute_find_regex.RAW:
|
||||
return substituteParamsExtended(regexScript.findRegex);
|
||||
case substitute_find_regex.ESCAPED:
|
||||
return substituteParamsExtended(regexScript.findRegex, {}, sanitizeRegexMacro);
|
||||
default:
|
||||
console.warn(`runRegexScript: Unknown substituteRegex value ${regexScript.substituteRegex}. Using raw regex.`);
|
||||
return regexScript.findRegex;
|
||||
}
|
||||
};
|
||||
const regexString = getRegexString();
|
||||
const findRegex = regexFromString(regexString);
|
||||
|
||||
// The user skill issued. Return with nothing.
|
||||
|
@ -8,7 +8,7 @@ import { enumIcons } from '../../slash-commands/SlashCommandCommonEnumsProvider.
|
||||
import { SlashCommandEnumValue, enumTypes } from '../../slash-commands/SlashCommandEnumValue.js';
|
||||
import { SlashCommandParser } from '../../slash-commands/SlashCommandParser.js';
|
||||
import { download, getFileText, getSortableDelay, uuidv4 } from '../../utils.js';
|
||||
import { regex_placement, runRegexScript } from './engine.js';
|
||||
import { regex_placement, runRegexScript, substitute_find_regex } from './engine.js';
|
||||
import { t } from '../../i18n.js';
|
||||
|
||||
/**
|
||||
@ -227,7 +227,7 @@ async function onRegexEditorOpenClick(existingId, isScoped) {
|
||||
editorHtml.find('input[name="only_format_display"]').prop('checked', existingScript.markdownOnly ?? false);
|
||||
editorHtml.find('input[name="only_format_prompt"]').prop('checked', existingScript.promptOnly ?? false);
|
||||
editorHtml.find('input[name="run_on_edit"]').prop('checked', existingScript.runOnEdit ?? false);
|
||||
editorHtml.find('input[name="substitute_regex"]').prop('checked', existingScript.substituteRegex ?? false);
|
||||
editorHtml.find('select[name="substitute_regex"]').val(existingScript.substituteRegex ?? substitute_find_regex.NONE);
|
||||
editorHtml.find('input[name="min_depth"]').val(existingScript.minDepth ?? '');
|
||||
editorHtml.find('input[name="max_depth"]').val(existingScript.maxDepth ?? '');
|
||||
|
||||
@ -267,7 +267,7 @@ async function onRegexEditorOpenClick(existingId, isScoped) {
|
||||
findRegex: editorHtml.find('.find_regex').val(),
|
||||
replaceString: editorHtml.find('.regex_replace_string').val(),
|
||||
trimStrings: String(editorHtml.find('.regex_trim_strings').val()).split('\n').filter((e) => e.length !== 0) || [],
|
||||
substituteRegex: editorHtml.find('input[name="substitute_regex"]').prop('checked'),
|
||||
substituteRegex: Number(editorHtml.find('select[name="substitute_regex"]').val()),
|
||||
};
|
||||
const rawTestString = String(editorHtml.find('#regex_test_input').val());
|
||||
const result = runRegexScript(testScript, rawTestString);
|
||||
@ -295,7 +295,7 @@ async function onRegexEditorOpenClick(existingId, isScoped) {
|
||||
markdownOnly: editorHtml.find('input[name="only_format_display"]').prop('checked'),
|
||||
promptOnly: editorHtml.find('input[name="only_format_prompt"]').prop('checked'),
|
||||
runOnEdit: editorHtml.find('input[name="run_on_edit"]').prop('checked'),
|
||||
substituteRegex: editorHtml.find('input[name="substitute_regex"]').prop('checked'),
|
||||
substituteRegex: Number(editorHtml.find('select[name="substitute_regex"]').val()),
|
||||
minDepth: parseInt(String(editorHtml.find('input[name="min_depth"]').val())),
|
||||
maxDepth: parseInt(String(editorHtml.find('input[name="max_depth"]').val())),
|
||||
};
|
||||
|
@ -1,37 +1,62 @@
|
||||
import { Popper } from '../../../lib.js';
|
||||
import {
|
||||
saveSettingsDebounced,
|
||||
systemUserName,
|
||||
getRequestHeaders,
|
||||
event_types,
|
||||
eventSource,
|
||||
generateQuietPrompt,
|
||||
this_chid,
|
||||
getCurrentChatId,
|
||||
animation_duration,
|
||||
appendMediaToMessage,
|
||||
getUserAvatar,
|
||||
user_avatar,
|
||||
getCharacterAvatar,
|
||||
event_types,
|
||||
eventSource,
|
||||
formatCharacterAvatar,
|
||||
generateQuietPrompt,
|
||||
getCharacterAvatar,
|
||||
getCurrentChatId,
|
||||
getRequestHeaders,
|
||||
getUserAvatar,
|
||||
saveSettingsDebounced,
|
||||
substituteParams,
|
||||
substituteParamsExtended,
|
||||
systemUserName,
|
||||
this_chid,
|
||||
user_avatar,
|
||||
} from '../../../script.js';
|
||||
import { getApiUrl, getContext, extension_settings, doExtrasFetch, modules, renderExtensionTemplateAsync, writeExtensionField } from '../../extensions.js';
|
||||
import {
|
||||
doExtrasFetch,
|
||||
extension_settings,
|
||||
getApiUrl,
|
||||
getContext,
|
||||
modules,
|
||||
renderExtensionTemplateAsync,
|
||||
writeExtensionField,
|
||||
} from '../../extensions.js';
|
||||
import { selected_group } from '../../group-chats.js';
|
||||
import { stringFormat, initScrollHeight, resetScrollHeight, getCharaFilename, saveBase64AsFile, getBase64Async, delay, isTrueBoolean, debounce, isFalseBoolean, deepMerge } from '../../utils.js';
|
||||
import {
|
||||
debounce,
|
||||
deepMerge,
|
||||
delay,
|
||||
getBase64Async,
|
||||
getCharaFilename,
|
||||
initScrollHeight,
|
||||
isFalseBoolean,
|
||||
isTrueBoolean,
|
||||
resetScrollHeight,
|
||||
saveBase64AsFile,
|
||||
stringFormat,
|
||||
} from '../../utils.js';
|
||||
import { getMessageTimeStamp, humanizedDateTime } from '../../RossAscends-mods.js';
|
||||
import { SECRET_KEYS, secret_state, writeSecret } from '../../secrets.js';
|
||||
import { getNovelUnlimitedImageGeneration, getNovelAnlas, loadNovelSubscriptionData } from '../../nai-settings.js';
|
||||
import { getNovelAnlas, getNovelUnlimitedImageGeneration, loadNovelSubscriptionData } from '../../nai-settings.js';
|
||||
import { getMultimodalCaption } from '../shared.js';
|
||||
import { SlashCommandParser } from '../../slash-commands/SlashCommandParser.js';
|
||||
import { SlashCommand } from '../../slash-commands/SlashCommand.js';
|
||||
import { ARGUMENT_TYPE, SlashCommandArgument, SlashCommandNamedArgument } from '../../slash-commands/SlashCommandArgument.js';
|
||||
import {
|
||||
ARGUMENT_TYPE,
|
||||
SlashCommandArgument,
|
||||
SlashCommandNamedArgument,
|
||||
} from '../../slash-commands/SlashCommandArgument.js';
|
||||
import { debounce_timeout } from '../../constants.js';
|
||||
import { SlashCommandEnumValue } from '../../slash-commands/SlashCommandEnumValue.js';
|
||||
import { POPUP_RESULT, POPUP_TYPE, Popup, callGenericPopup } from '../../popup.js';
|
||||
import { callGenericPopup, Popup, POPUP_RESULT, POPUP_TYPE } from '../../popup.js';
|
||||
import { commonEnumProviders } from '../../slash-commands/SlashCommandCommonEnumsProvider.js';
|
||||
import { ToolManager } from '../../tool-calling.js';
|
||||
|
||||
export { MODULE_NAME };
|
||||
|
||||
const MODULE_NAME = 'sd';
|
||||
@ -118,13 +143,13 @@ const triggerWords = {
|
||||
};
|
||||
|
||||
const messageTrigger = {
|
||||
activationRegex: /\b(send|mail|imagine|generate|make|create|draw|paint|render|show)\b.{0,10}\b(pic|picture|image|drawing|painting|photo|photograph)\b(?:\s+of)?(?:\s+(?:a|an|the|this|that|those|your)?)?(.+)/i,
|
||||
activationRegex: /\b(send|mail|imagine|generate|make|create|draw|paint|render|show)\b.{0,10}\b(pic|picture|image|drawing|painting|photo|photograph)\b(?:\s+of)?(?:\s+(?:a|an|the|this|that|those|your)?\s+)?(.+)/i,
|
||||
specialCases: {
|
||||
[generationMode.CHARACTER]: ['you', 'yourself'],
|
||||
[generationMode.USER]: ['me', 'myself'],
|
||||
[generationMode.SCENARIO]: ['story', 'scenario', 'whole story'],
|
||||
[generationMode.NOW]: ['last message'],
|
||||
[generationMode.FACE]: ['your face', 'your portrait', 'your selfie'],
|
||||
[generationMode.FACE]: ['face', 'portrait', 'selfie'],
|
||||
[generationMode.BACKGROUND]: ['background', 'scene background', 'scene', 'scenery', 'surroundings', 'environment'],
|
||||
},
|
||||
};
|
||||
@ -343,7 +368,7 @@ function processTriggers(chat, _, abort) {
|
||||
return;
|
||||
}
|
||||
|
||||
console.log(`SD: Triggered by "${message}", detected subject: ${subject}"`);
|
||||
console.log(`SD: Triggered by "${message}", detected subject: "${subject}"`);
|
||||
|
||||
outer: for (const [specialMode, triggers] of Object.entries(messageTrigger.specialCases)) {
|
||||
for (const trigger of triggers) {
|
||||
@ -359,7 +384,6 @@ function processTriggers(chat, _, abort) {
|
||||
setTimeout(() => generatePicture(initiators.interactive, {}, subject, message), 1);
|
||||
} catch {
|
||||
console.log('SD: Failed to process triggers.');
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
@ -685,7 +709,7 @@ async function refinePrompt(prompt, isNegative) {
|
||||
return prompt;
|
||||
}
|
||||
|
||||
function onChatChanged() {
|
||||
async function onChatChanged() {
|
||||
if (this_chid === undefined || selected_group) {
|
||||
$('#sd_character_prompt_block').hide();
|
||||
return;
|
||||
@ -713,7 +737,7 @@ function onChatChanged() {
|
||||
$('#sd_character_prompt').val(characterPrompt);
|
||||
$('#sd_character_negative_prompt').val(negativePrompt);
|
||||
$('#sd_character_prompt_share').prop('checked', hasSharedData);
|
||||
adjustElementScrollHeight();
|
||||
await adjustElementScrollHeight();
|
||||
}
|
||||
|
||||
async function adjustElementScrollHeight() {
|
||||
@ -1285,8 +1309,7 @@ async function getAutoRemoteModel() {
|
||||
throw new Error('SD WebUI returned an error.');
|
||||
}
|
||||
|
||||
const data = await result.text();
|
||||
return data;
|
||||
return await result.text();
|
||||
} catch (error) {
|
||||
console.error(error);
|
||||
return null;
|
||||
@ -1305,9 +1328,7 @@ async function getDrawthingsRemoteModel() {
|
||||
throw new Error('SD DrawThings API returned an error.');
|
||||
}
|
||||
|
||||
const data = await result.text();
|
||||
|
||||
return data;
|
||||
return await result.text();
|
||||
} catch (error) {
|
||||
console.error(error);
|
||||
return null;
|
||||
@ -1330,8 +1351,7 @@ async function getAutoRemoteUpscalers() {
|
||||
throw new Error('SD WebUI returned an error.');
|
||||
}
|
||||
|
||||
const data = await result.json();
|
||||
return data;
|
||||
return await result.json();
|
||||
} catch (error) {
|
||||
console.error(error);
|
||||
return [extension_settings.sd.hr_upscaler];
|
||||
@ -1350,8 +1370,7 @@ async function getAutoRemoteSchedulers() {
|
||||
throw new Error('SD WebUI returned an error.');
|
||||
}
|
||||
|
||||
const data = await result.json();
|
||||
return data;
|
||||
return await result.json();
|
||||
} catch (error) {
|
||||
console.error(error);
|
||||
return ['N/A'];
|
||||
@ -1370,8 +1389,7 @@ async function getVladRemoteUpscalers() {
|
||||
throw new Error('SD.Next returned an error.');
|
||||
}
|
||||
|
||||
const data = await result.json();
|
||||
return data;
|
||||
return await result.json();
|
||||
} catch (error) {
|
||||
console.error(error);
|
||||
return [extension_settings.sd.hr_upscaler];
|
||||
@ -1504,8 +1522,7 @@ async function loadHordeSamplers() {
|
||||
});
|
||||
|
||||
if (result.ok) {
|
||||
const data = await result.json();
|
||||
return data;
|
||||
return await result.json();
|
||||
}
|
||||
|
||||
return [];
|
||||
@ -1544,8 +1561,7 @@ async function loadAutoSamplers() {
|
||||
throw new Error('SD WebUI returned an error.');
|
||||
}
|
||||
|
||||
const data = await result.json();
|
||||
return data;
|
||||
return await result.json();
|
||||
} catch (error) {
|
||||
return [];
|
||||
}
|
||||
@ -1583,8 +1599,7 @@ async function loadVladSamplers() {
|
||||
throw new Error('SD.Next returned an error.');
|
||||
}
|
||||
|
||||
const data = await result.json();
|
||||
return data;
|
||||
return await result.json();
|
||||
} catch (error) {
|
||||
return [];
|
||||
}
|
||||
@ -1723,8 +1738,7 @@ async function loadPollinationsModels() {
|
||||
});
|
||||
|
||||
if (result.ok) {
|
||||
const data = await result.json();
|
||||
return data;
|
||||
return await result.json();
|
||||
}
|
||||
|
||||
return [];
|
||||
@ -1742,8 +1756,7 @@ async function loadTogetherAIModels() {
|
||||
});
|
||||
|
||||
if (result.ok) {
|
||||
const data = await result.json();
|
||||
return data;
|
||||
return await result.json();
|
||||
}
|
||||
|
||||
return [];
|
||||
@ -1781,8 +1794,7 @@ async function loadNanoGPTModels() {
|
||||
});
|
||||
|
||||
if (result.ok) {
|
||||
const data = await result.json();
|
||||
return data;
|
||||
return await result.json();
|
||||
}
|
||||
|
||||
return [];
|
||||
@ -1798,8 +1810,10 @@ async function loadHordeModels() {
|
||||
if (result.ok) {
|
||||
const data = await result.json();
|
||||
data.sort((a, b) => b.count - a.count);
|
||||
const models = data.map(x => ({ value: x.name, text: `${x.name} (ETA: ${x.eta}s, Queue: ${x.queued}, Workers: ${x.count})` }));
|
||||
return models;
|
||||
return data.map(x => ({
|
||||
value: x.name,
|
||||
text: `${x.name} (ETA: ${x.eta}s, Queue: ${x.queued}, Workers: ${x.count})`,
|
||||
}));
|
||||
}
|
||||
|
||||
return [];
|
||||
@ -1824,8 +1838,7 @@ async function loadExtrasModels() {
|
||||
|
||||
if (getModelsResult.ok) {
|
||||
const data = await getModelsResult.json();
|
||||
const view_models = data.models.map(x => ({ value: x, text: x }));
|
||||
return view_models;
|
||||
return data.models.map(x => ({ value: x, text: x }));
|
||||
}
|
||||
|
||||
return [];
|
||||
@ -1867,8 +1880,7 @@ async function loadAutoModels() {
|
||||
}
|
||||
}
|
||||
|
||||
const data = await result.json();
|
||||
return data;
|
||||
return await result.json();
|
||||
} catch (error) {
|
||||
return [];
|
||||
}
|
||||
@ -1953,8 +1965,7 @@ async function loadVladModels() {
|
||||
}
|
||||
}
|
||||
|
||||
const data = await result.json();
|
||||
return data;
|
||||
return await result.json();
|
||||
} catch (error) {
|
||||
return [];
|
||||
}
|
||||
@ -2242,7 +2253,7 @@ async function loadComfyWorkflows() {
|
||||
$('#sd_comfy_workflow').append(option);
|
||||
}
|
||||
} catch (error) {
|
||||
return;
|
||||
console.error(`Could not load ComfyUI workflows: ${error.message}`);
|
||||
}
|
||||
}
|
||||
|
||||
@ -2361,7 +2372,8 @@ function ensureSelectionExists(setting, selector) {
|
||||
* @param {string} trigger Subject trigger word
|
||||
* @param {string} [message] Chat message
|
||||
* @param {function} [callback] Callback function
|
||||
* @returns {Promise<string>} Image path
|
||||
* @returns {Promise<string|undefined>} Image path
|
||||
* @throws {Error} If the prompt or image generation fails
|
||||
*/
|
||||
async function generatePicture(initiator, args, trigger, message, callback) {
|
||||
if (!trigger || trigger.trim().length === 0) {
|
||||
@ -2379,7 +2391,9 @@ async function generatePicture(initiator, args, trigger, message, callback) {
|
||||
|
||||
trigger = trigger.trim();
|
||||
const generationType = getGenerationType(trigger);
|
||||
console.log('Generation mode', generationType, 'triggered with', trigger);
|
||||
const generationTypeKey = Object.keys(generationMode).find(key => generationMode[key] === generationType);
|
||||
console.log(`Image generation mode ${generationTypeKey} triggered with "${trigger}"`);
|
||||
|
||||
const quietPrompt = getQuietPrompt(generationType, trigger);
|
||||
const context = getContext();
|
||||
|
||||
@ -2387,11 +2401,11 @@ async function generatePicture(initiator, args, trigger, message, callback) {
|
||||
? context.groups[Object.keys(context.groups).filter(x => context.groups[x].id === context.groupId)[0]]?.id?.toString()
|
||||
: context.characters[context.characterId]?.name;
|
||||
|
||||
if (generationType == generationMode.BACKGROUND) {
|
||||
if (generationType === generationMode.BACKGROUND) {
|
||||
const callbackOriginal = callback;
|
||||
callback = async function (prompt, imagePath, generationType, _negativePromptPrefix, _initiator, prefixedPrompt) {
|
||||
const imgUrl = `url("${encodeURI(imagePath)}")`;
|
||||
eventSource.emit(event_types.FORCE_SET_BACKGROUND, { url: imgUrl, path: imagePath });
|
||||
await eventSource.emit(event_types.FORCE_SET_BACKGROUND, { url: imgUrl, path: imagePath });
|
||||
|
||||
if (typeof callbackOriginal === 'function') {
|
||||
await callbackOriginal(prompt, imagePath, generationType, negativePromptPrefix, initiator, prefixedPrompt);
|
||||
@ -2415,6 +2429,8 @@ async function generatePicture(initiator, args, trigger, message, callback) {
|
||||
|
||||
try {
|
||||
const combineNegatives = (prefix) => { negativePromptPrefix = combinePrefixes(negativePromptPrefix, prefix); };
|
||||
|
||||
// generate the text prompt for the image
|
||||
const prompt = await getPrompt(generationType, message, trigger, quietPrompt, combineNegatives);
|
||||
console.log('Processed image prompt:', prompt);
|
||||
|
||||
@ -2425,11 +2441,16 @@ async function generatePicture(initiator, args, trigger, message, callback) {
|
||||
args._abortController.addEventListener('abort', stopListener);
|
||||
}
|
||||
|
||||
// generate the image
|
||||
imagePath = await sendGenerationRequest(generationType, prompt, negativePromptPrefix, characterName, callback, initiator, abortController.signal);
|
||||
} catch (err) {
|
||||
console.trace(err);
|
||||
toastr.error('SD prompt text generation failed. Reason: ' + err, 'Image Generation');
|
||||
throw new Error('SD prompt text generation failed. Reason: ' + err);
|
||||
// errors here are most likely due to text generation failure
|
||||
// sendGenerationRequest mostly deals with its own errors
|
||||
const reason = err.error?.message || err.message || 'Unknown error';
|
||||
const errorText = 'SD prompt text generation failed. ' + reason;
|
||||
toastr.error(errorText, 'Image Generation');
|
||||
throw new Error(errorText);
|
||||
}
|
||||
finally {
|
||||
$(stopButton).hide();
|
||||
@ -2446,12 +2467,12 @@ function setTypeSpecificDimensions(generationType) {
|
||||
const aspectRatio = extension_settings.sd.width / extension_settings.sd.height;
|
||||
|
||||
// Face images are always portrait (pun intended)
|
||||
if ((generationType == generationMode.FACE || generationType == generationMode.FACE_MULTIMODAL) && aspectRatio >= 1) {
|
||||
if ((generationType === generationMode.FACE || generationType === generationMode.FACE_MULTIMODAL) && aspectRatio >= 1) {
|
||||
// Round to nearest multiple of 64
|
||||
extension_settings.sd.height = Math.round(extension_settings.sd.width * 1.5 / 64) * 64;
|
||||
}
|
||||
|
||||
if (generationType == generationMode.BACKGROUND) {
|
||||
if (generationType === generationMode.BACKGROUND) {
|
||||
// Background images are always landscape
|
||||
if (aspectRatio <= 1) {
|
||||
// Round to nearest multiple of 64
|
||||
@ -2500,7 +2521,7 @@ function restoreOriginalDimensions(savedParams) {
|
||||
*/
|
||||
async function getPrompt(generationType, message, trigger, quietPrompt, combineNegatives) {
|
||||
let prompt;
|
||||
|
||||
console.log('getPrompt: Generation mode', generationType, 'triggered with', trigger);
|
||||
switch (generationType) {
|
||||
case generationMode.RAW_LAST:
|
||||
prompt = message || getRawLastMessage();
|
||||
@ -2537,7 +2558,7 @@ async function getPrompt(generationType, message, trigger, quietPrompt, combineN
|
||||
*/
|
||||
function generateFreeModePrompt(trigger, combineNegatives) {
|
||||
return trigger
|
||||
.replace(/(?:^char(\s|,)|\{\{charPrefix\}\})/gi, (_, suffix) => {
|
||||
.replace(/^char(\s|,)|{{charPrefix}}/gi, (_, suffix) => {
|
||||
const getLastCharacterKey = () => {
|
||||
if (typeof this_chid !== 'undefined') {
|
||||
return getCharaFilename(this_chid);
|
||||
@ -2545,9 +2566,7 @@ function generateFreeModePrompt(trigger, combineNegatives) {
|
||||
const context = getContext();
|
||||
for (let i = context.chat.length - 1; i >= 0; i--) {
|
||||
const message = context.chat[i];
|
||||
if (message.is_user || message.is_system) {
|
||||
continue;
|
||||
} else if (typeof message.original_avatar === 'string') {
|
||||
if (!message.is_user && !message.is_system && typeof message.original_avatar === 'string') {
|
||||
return message.original_avatar.replace(/\.[^/.]+$/, '');
|
||||
}
|
||||
}
|
||||
@ -2570,11 +2589,11 @@ function generateFreeModePrompt(trigger, combineNegatives) {
|
||||
async function generateMultimodalPrompt(generationType, quietPrompt) {
|
||||
let avatarUrl;
|
||||
|
||||
if (generationType == generationMode.USER_MULTIMODAL) {
|
||||
if (generationType === generationMode.USER_MULTIMODAL) {
|
||||
avatarUrl = getUserAvatarUrl();
|
||||
}
|
||||
|
||||
if (generationType == generationMode.CHARACTER_MULTIMODAL || generationType === generationMode.FACE_MULTIMODAL) {
|
||||
if (generationType === generationMode.CHARACTER_MULTIMODAL || generationType === generationMode.FACE_MULTIMODAL) {
|
||||
avatarUrl = getCharacterAvatarUrl();
|
||||
}
|
||||
|
||||
@ -2718,7 +2737,7 @@ async function sendGenerationRequest(generationType, prompt, additionalNegativeP
|
||||
throw new Error('Endpoint did not return image data.');
|
||||
}
|
||||
} catch (err) {
|
||||
console.error(err);
|
||||
console.error('Image generation request error: ', err);
|
||||
toastr.error('Image generation failed. Please try again.' + '\n\n' + String(err), 'Image Generation');
|
||||
return;
|
||||
}
|
||||
@ -3182,8 +3201,8 @@ function getNovelParams() {
|
||||
const ratio = Math.sqrt(MAX_PIXELS / (width * height));
|
||||
|
||||
// Calculate new width and height while maintaining aspect ratio.
|
||||
var newWidth = Math.round(width * ratio);
|
||||
var newHeight = Math.round(height * ratio);
|
||||
let newWidth = Math.round(width * ratio);
|
||||
let newHeight = Math.round(height * ratio);
|
||||
|
||||
// Ensure new dimensions are multiples of 64. If not, reduce accordingly.
|
||||
if (newWidth % 64 !== 0) {
|
||||
@ -3479,9 +3498,9 @@ async function onComfyOpenWorkflowEditorClick() {
|
||||
const popupResult = popup.show();
|
||||
const checkPlaceholders = () => {
|
||||
workflow = $('#sd_comfy_workflow_editor_workflow').val().toString();
|
||||
$('.sd_comfy_workflow_editor_placeholder_list > li[data-placeholder]').each(function (idx) {
|
||||
$('.sd_comfy_workflow_editor_placeholder_list > li[data-placeholder]').each(function () {
|
||||
const key = this.getAttribute('data-placeholder');
|
||||
const found = workflow.search(`"%${key}%"`) != -1;
|
||||
const found = workflow.search(`"%${key}%"`) !== -1;
|
||||
this.classList[found ? 'remove' : 'add']('sd_comfy_workflow_editor_not_found');
|
||||
});
|
||||
};
|
||||
@ -3841,7 +3860,7 @@ async function sdMessageButton(e) {
|
||||
swipes.push(image);
|
||||
|
||||
// If already contains an image and it's not inline - leave it as is
|
||||
message.extra.inline_image = message.extra.image && !message.extra.inline_image ? false : true;
|
||||
message.extra.inline_image = !(message.extra.image && !message.extra.inline_image);
|
||||
message.extra.image = image;
|
||||
message.extra.title = prompt;
|
||||
message.extra.generationType = generationType;
|
||||
|
@ -7,7 +7,7 @@
|
||||
<div class="inline-drawer-content">
|
||||
<div id="tts_status">
|
||||
</div>
|
||||
<span>Select TTS Provider</span> </br>
|
||||
<span data-i18n="Select TTS Provider">Select TTS Provider</span> </br>
|
||||
<div class="tts_block">
|
||||
<select id="tts_provider" class="flex1">
|
||||
</select>
|
||||
@ -16,49 +16,49 @@
|
||||
<div>
|
||||
<label class="checkbox_label" for="tts_enabled">
|
||||
<input type="checkbox" id="tts_enabled" name="tts_enabled">
|
||||
<small>Enabled</small>
|
||||
<small data-i18n="tts_enabled">Enabled</small>
|
||||
</label>
|
||||
<label class="checkbox_label" for="tts_narrate_user">
|
||||
<input type="checkbox" id="tts_narrate_user">
|
||||
<small>Narrate user messages</small>
|
||||
<small data-i18n="Narrate user messages">Narrate user messages</small>
|
||||
</label>
|
||||
<label class="checkbox_label" for="tts_auto_generation">
|
||||
<input type="checkbox" id="tts_auto_generation">
|
||||
<small>Auto Generation</small>
|
||||
<small data-i18n="Auto Generation">Auto Generation</small>
|
||||
</label>
|
||||
<label class="checkbox_label" for="tts_periodic_auto_generation" title="Requires auto generation to be enabled.">
|
||||
<label class="checkbox_label" for="tts_periodic_auto_generation" data-i18n="[title]Requires auto generation to be enabled." title="Requires auto generation to be enabled.">
|
||||
<input type="checkbox" id="tts_periodic_auto_generation">
|
||||
<small>Narrate by paragraphs (when streaming)</small>
|
||||
<small data-i18n="Narrate by paragraphs (when streaming)">Narrate by paragraphs (when streaming)</small>
|
||||
</label>
|
||||
<label class="checkbox_label" for="tts_narrate_quoted">
|
||||
<input type="checkbox" id="tts_narrate_quoted">
|
||||
<small>Only narrate "quotes"</small>
|
||||
<small data-i18n="Only narrate quotes">Only narrate "quotes"</small>
|
||||
</label>
|
||||
<label class="checkbox_label" for="tts_narrate_dialogues">
|
||||
<input type="checkbox" id="tts_narrate_dialogues">
|
||||
<small>Ignore *text, even "quotes", inside asterisks*</small>
|
||||
<small data-i18n="Ignore text, even quotes, inside asterisk">Ignore *text, even "quotes", inside asterisks*</small>
|
||||
</label>
|
||||
<label class="checkbox_label" for="tts_narrate_translated_only">
|
||||
<input type="checkbox" id="tts_narrate_translated_only">
|
||||
<small>Narrate only the translated text</small>
|
||||
<small data-i18n="Narrate only the translated text">Narrate only the translated text</small>
|
||||
</label>
|
||||
<label class="checkbox_label" for="tts_skip_codeblocks">
|
||||
<input type="checkbox" id="tts_skip_codeblocks">
|
||||
<small>Skip codeblocks</small>
|
||||
<small data-i18n="Skip codeblocks">Skip codeblocks</small>
|
||||
</label>
|
||||
<label class="checkbox_label" for="tts_skip_tags">
|
||||
<input type="checkbox" id="tts_skip_tags">
|
||||
<small>Skip <tagged> blocks</small>
|
||||
<small data-i18n="Skip tagged blocks">Skip <tagged> blocks</small>
|
||||
</label>
|
||||
<label class="checkbox_label" for="tts_pass_asterisks">
|
||||
<input type="checkbox" id="tts_pass_asterisks">
|
||||
<small>Pass Asterisks to TTS Engine</small>
|
||||
<small data-i18n="Pass Asterisks to TTS Engine">Pass Asterisks to TTS Engine</small>
|
||||
</label>
|
||||
</div>
|
||||
<div id="playback_rate_block" class="range-block">
|
||||
<hr>
|
||||
<div class="range-block-title justifyLeft" data-i18n="Audio Playback Speed">
|
||||
<small>Audio Playback Speed</small>
|
||||
<small data-i18n="Audio Playback Speed">Audio Playback Speed</small>
|
||||
</div>
|
||||
<div class="range-block-range-and-counter">
|
||||
<div class="range-block-range">
|
||||
|
@ -181,6 +181,14 @@ function setContextSizePreview() {
|
||||
}
|
||||
}
|
||||
|
||||
/** Generates text using the Horde API.
|
||||
* @param {string} prompt
|
||||
* @param params
|
||||
* @param signal
|
||||
* @param reportProgress
|
||||
* @returns {Promise<{text: *, workerName: string}>}
|
||||
* @throws {Error}
|
||||
*/
|
||||
async function generateHorde(prompt, params, signal, reportProgress) {
|
||||
validateHordeModel();
|
||||
delete params.prompt;
|
||||
|
@ -39,6 +39,7 @@ const controls = [
|
||||
{ id: 'instruct_first_input_sequence', property: 'first_input_sequence', isCheckbox: false },
|
||||
{ id: 'instruct_last_input_sequence', property: 'last_input_sequence', isCheckbox: false },
|
||||
{ id: 'instruct_activation_regex', property: 'activation_regex', isCheckbox: false },
|
||||
{ id: 'instruct_derived', property: 'derived', isCheckbox: true },
|
||||
{ id: 'instruct_bind_to_context', property: 'bind_to_context', isCheckbox: true },
|
||||
{ id: 'instruct_skip_examples', property: 'skip_examples', isCheckbox: true },
|
||||
{ id: 'instruct_names_behavior', property: 'names_behavior', isCheckbox: false },
|
||||
@ -100,6 +101,7 @@ export async function loadInstructMode(data) {
|
||||
|
||||
$('#instruct_enabled').parent().find('i').toggleClass('toggleEnabled', !!power_user.instruct.enabled);
|
||||
$('#instructSettingsBlock, #InstructSequencesColumn').toggleClass('disabled', !power_user.instruct.enabled);
|
||||
$('#instruct_derived').parent().find('i').toggleClass('toggleEnabled', !!power_user.instruct.derived);
|
||||
$('#instruct_bind_to_context').parent().find('i').toggleClass('toggleEnabled', !!power_user.instruct.bind_to_context);
|
||||
|
||||
controls.forEach(control => {
|
||||
@ -146,6 +148,12 @@ export async function loadInstructMode(data) {
|
||||
* @param {boolean} [options.isAuto=false] Is auto-select.
|
||||
*/
|
||||
export function selectContextPreset(preset, { quiet = false, isAuto = false } = {}) {
|
||||
const presetExists = context_presets.some(x => x.name === preset);
|
||||
if (!presetExists) {
|
||||
console.warn(`Context template "${preset}" not found`);
|
||||
return;
|
||||
}
|
||||
|
||||
// If context template is not already selected, select it
|
||||
if (preset !== power_user.context.preset) {
|
||||
$('#context_presets').val(preset).trigger('change');
|
||||
@ -163,6 +171,12 @@ export function selectContextPreset(preset, { quiet = false, isAuto = false } =
|
||||
* @param {boolean} [options.isAuto=false] Is auto-select.
|
||||
*/
|
||||
export function selectInstructPreset(preset, { quiet = false, isAuto = false } = {}) {
|
||||
const presetExists = instruct_presets.some(x => x.name === preset);
|
||||
if (!presetExists) {
|
||||
console.warn(`Instruct template "${preset}" not found`);
|
||||
return;
|
||||
}
|
||||
|
||||
// If instruct preset is not already selected, select it
|
||||
if (preset !== power_user.instruct.preset) {
|
||||
$('#instruct_presets').val(preset).trigger('change');
|
||||
@ -715,6 +729,10 @@ jQuery(() => {
|
||||
}
|
||||
});
|
||||
|
||||
$('#instruct_derived').on('change', function () {
|
||||
$('#instruct_derived').parent().find('i').toggleClass('toggleEnabled', !!power_user.instruct.derived);
|
||||
});
|
||||
|
||||
$('#instruct_bind_to_context').on('change', function () {
|
||||
$('#instruct_bind_to_context').parent().find('i').toggleClass('toggleEnabled', !!power_user.instruct.bind_to_context);
|
||||
});
|
||||
|
@ -519,14 +519,14 @@ function convertTokenIdLogprobsToText(input) {
|
||||
|
||||
const tokenizerId = getTokenizerBestMatch(api);
|
||||
|
||||
// Flatten unique token IDs across all logprobs
|
||||
/** @type {any[]} Flatten unique token IDs across all logprobs */
|
||||
const tokenIds = Array.from(new Set(input.flatMap(logprobs =>
|
||||
logprobs.topLogprobs.map(([token]) => token).concat(logprobs.token),
|
||||
)));
|
||||
|
||||
// Submit token IDs to tokenizer to get token text, then build ID->text map
|
||||
// noinspection JSCheckFunctionSignatures - mutates input in-place
|
||||
const { chunks } = decodeTextTokens(tokenizerId, tokenIds.map(parseInt));
|
||||
const { chunks } = decodeTextTokens(tokenizerId, tokenIds);
|
||||
const tokenIdText = new Map(tokenIds.map((id, i) => [id, chunks[i]]));
|
||||
|
||||
// Fixup logprobs data with token text
|
||||
|
@ -752,7 +752,8 @@ async function populateChatHistory(messages, prompts, chatCompletion, type = nul
|
||||
if (type === 'continue' && oai_settings.continue_prefill && chatPrompt === firstNonInjected) {
|
||||
// in case we are using continue_prefill and the latest message is an assistant message, we want to prepend the users assistant prefill on the message
|
||||
if (chatPrompt.role === 'assistant') {
|
||||
const continueMessage = await Message.createAsync(chatMessage.role, substituteParams(oai_settings.assistant_prefill + '\n\n') + chatMessage.content, chatMessage.identifier);
|
||||
const messageContent = [substituteParams(oai_settings.assistant_prefill), chatMessage.content].filter(x => x).join('\n\n');
|
||||
const continueMessage = await Message.createAsync(chatMessage.role, messageContent, chatMessage.identifier);
|
||||
const collection = new MessageCollection('continuePrefill', continueMessage);
|
||||
chatCompletion.add(collection, -1);
|
||||
continue;
|
||||
@ -1312,6 +1313,11 @@ export async function prepareOpenAIMessages({
|
||||
return [chat, promptManager.tokenHandler.counts];
|
||||
}
|
||||
|
||||
/**
|
||||
* Handles errors during streaming requests.
|
||||
* @param {Response} response
|
||||
* @param {string} decoded - response text or decoded stream data
|
||||
*/
|
||||
function tryParseStreamingError(response, decoded) {
|
||||
try {
|
||||
const data = JSON.parse(decoded);
|
||||
@ -1323,6 +1329,9 @@ function tryParseStreamingError(response, decoded) {
|
||||
checkQuotaError(data);
|
||||
checkModerationError(data);
|
||||
|
||||
// these do not throw correctly (equiv to Error("[object Object]"))
|
||||
// if trying to fix "[object Object]" displayed to users, start here
|
||||
|
||||
if (data.error) {
|
||||
toastr.error(data.error.message || response.statusText, 'Chat Completion API');
|
||||
throw new Error(data);
|
||||
@ -1338,15 +1347,22 @@ function tryParseStreamingError(response, decoded) {
|
||||
}
|
||||
}
|
||||
|
||||
async function checkQuotaError(data) {
|
||||
const errorText = await renderTemplateAsync('quotaError');
|
||||
|
||||
/**
|
||||
* Checks if the response contains a quota error and displays a popup if it does.
|
||||
* @param data
|
||||
* @returns {void}
|
||||
* @throws {object} - response JSON
|
||||
*/
|
||||
function checkQuotaError(data) {
|
||||
if (!data) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (data.quota_error) {
|
||||
callPopup(errorText, 'text');
|
||||
renderTemplateAsync('quotaError').then((html) => Popup.show.text('Quota Error', html));
|
||||
|
||||
// this does not throw correctly (equiv to Error("[object Object]"))
|
||||
// if trying to fix "[object Object]" displayed to users, start here
|
||||
throw new Error(data);
|
||||
}
|
||||
}
|
||||
@ -1765,6 +1781,15 @@ async function sendAltScaleRequest(messages, logit_bias, signal, type) {
|
||||
return data.output;
|
||||
}
|
||||
|
||||
/**
|
||||
* Send a chat completion request to backend
|
||||
* @param {string} type (impersonate, quiet, continue, etc)
|
||||
* @param {Array} messages
|
||||
* @param {AbortSignal?} signal
|
||||
* @returns {Promise<unknown>}
|
||||
* @throws {Error}
|
||||
*/
|
||||
|
||||
async function sendOpenAIRequest(type, messages, signal) {
|
||||
// Provide default abort signal
|
||||
if (!signal) {
|
||||
@ -2027,12 +2052,13 @@ async function sendOpenAIRequest(type, messages, signal) {
|
||||
else {
|
||||
const data = await response.json();
|
||||
|
||||
await checkQuotaError(data);
|
||||
checkQuotaError(data);
|
||||
checkModerationError(data);
|
||||
|
||||
if (data.error) {
|
||||
toastr.error(data.error.message || response.statusText, t`API returned an error`);
|
||||
throw new Error(data);
|
||||
const message = data.error.message || response.statusText || t`Unknown error`;
|
||||
toastr.error(message, t`API returned an error`);
|
||||
throw new Error(message);
|
||||
}
|
||||
|
||||
if (type !== 'quiet') {
|
||||
@ -4056,7 +4082,7 @@ async function onModelChange() {
|
||||
if (oai_settings.chat_completion_source == chat_completion_sources.MAKERSUITE) {
|
||||
if (oai_settings.max_context_unlocked) {
|
||||
$('#openai_max_context').attr('max', max_2mil);
|
||||
} else if (value.includes('gemini-exp-1114')){
|
||||
} else if (value.includes('gemini-exp-1114') || value.includes('gemini-exp-1121')) {
|
||||
$('#openai_max_context').attr('max', max_32k);
|
||||
} else if (value.includes('gemini-1.5-pro')) {
|
||||
$('#openai_max_context').attr('max', max_2mil);
|
||||
@ -4196,10 +4222,10 @@ async function onModelChange() {
|
||||
else if (['command-light-nightly', 'command-nightly'].includes(oai_settings.cohere_model)) {
|
||||
$('#openai_max_context').attr('max', max_8k);
|
||||
}
|
||||
else if (oai_settings.cohere_model.includes('command-r')) {
|
||||
else if (oai_settings.cohere_model.includes('command-r') || ['c4ai-aya-expanse-32b'].includes(oai_settings.cohere_model)) {
|
||||
$('#openai_max_context').attr('max', max_128k);
|
||||
}
|
||||
else if (['c4ai-aya-23'].includes(oai_settings.cohere_model)) {
|
||||
else if (['c4ai-aya-23', 'c4ai-aya-expanse-8b'].includes(oai_settings.cohere_model)) {
|
||||
$('#openai_max_context').attr('max', max_8k);
|
||||
}
|
||||
else {
|
||||
@ -4745,6 +4771,7 @@ export function isImageInliningSupported() {
|
||||
'gemini-1.5-flash-8b-exp-0827',
|
||||
'gemini-1.5-flash-8b-exp-0924',
|
||||
'gemini-exp-1114',
|
||||
'gemini-exp-1121',
|
||||
'gemini-1.0-pro-vision-latest',
|
||||
'gemini-1.5-pro',
|
||||
'gemini-1.5-pro-latest',
|
||||
|
@ -226,6 +226,7 @@ let power_user = {
|
||||
macro: true,
|
||||
names_behavior: names_behavior_types.FORCE,
|
||||
activation_regex: '',
|
||||
derived: false,
|
||||
bind_to_context: false,
|
||||
user_alignment_message: '',
|
||||
system_same_as_user: false,
|
||||
@ -243,6 +244,8 @@ let power_user = {
|
||||
names_as_stop_strings: true,
|
||||
},
|
||||
|
||||
context_derived: false,
|
||||
|
||||
sysprompt: {
|
||||
enabled: true,
|
||||
name: 'Neutral - Chat',
|
||||
@ -1477,6 +1480,7 @@ async function loadPowerUserSettings(settings, data) {
|
||||
$('#encode_tags').prop('checked', power_user.encode_tags);
|
||||
$('#example_messages_behavior').val(getExampleMessagesBehavior());
|
||||
$(`#example_messages_behavior option[value="${getExampleMessagesBehavior()}"]`).prop('selected', true);
|
||||
$('#context_derived').parent().find('i').toggleClass('toggleEnabled', !!power_user.context_derived);
|
||||
|
||||
$('#console_log_prompts').prop('checked', power_user.console_log_prompts);
|
||||
$('#request_token_probabilities').prop('checked', power_user.request_token_probabilities);
|
||||
@ -3062,6 +3066,16 @@ $(document).ready(() => {
|
||||
saveSettingsDebounced();
|
||||
});
|
||||
|
||||
$('#context_derived').on('input', function () {
|
||||
const value = !!$(this).prop('checked');
|
||||
power_user.context_derived = value;
|
||||
saveSettingsDebounced();
|
||||
});
|
||||
|
||||
$('#context_derived').on('change', function () {
|
||||
$('#context_derived').parent().find('i').toggleClass('toggleEnabled', !!power_user.context_derived);
|
||||
});
|
||||
|
||||
$('#always-force-name2-checkbox').change(function () {
|
||||
power_user.always_force_name2 = !!$(this).prop('checked');
|
||||
saveSettingsDebounced();
|
||||
|
@ -584,6 +584,7 @@ class PresetManager {
|
||||
'openrouter_providers',
|
||||
'openrouter_allow_fallbacks',
|
||||
'tabby_model',
|
||||
'derived',
|
||||
];
|
||||
const settings = Object.assign({}, getSettingsByApiId(this.apiId));
|
||||
|
||||
|
@ -1827,6 +1827,30 @@ export function initDefaultSlashCommands() {
|
||||
</div>
|
||||
`,
|
||||
}));
|
||||
SlashCommandParser.addCommandObject(SlashCommand.fromProps({
|
||||
name: 'upper',
|
||||
aliases: ['uppercase', 'to-upper'],
|
||||
callback: (_, text) => typeof text === 'string' ? text.toUpperCase() : '',
|
||||
returns: 'uppercase string',
|
||||
unnamedArgumentList: [
|
||||
new SlashCommandArgument(
|
||||
'string', [ARGUMENT_TYPE.STRING], true, false,
|
||||
),
|
||||
],
|
||||
helpString: 'Converts the provided string to uppercase.',
|
||||
}));
|
||||
SlashCommandParser.addCommandObject(SlashCommand.fromProps({
|
||||
name: 'lower',
|
||||
aliases: ['lowercase', 'to-lower'],
|
||||
callback: (_, text) => typeof text === 'string' ? text.toLowerCase() : '',
|
||||
returns: 'lowercase string',
|
||||
unnamedArgumentList: [
|
||||
new SlashCommandArgument(
|
||||
'string', [ARGUMENT_TYPE.STRING], true, false,
|
||||
),
|
||||
],
|
||||
helpString: 'Converts the provided string to lowercase.',
|
||||
}));
|
||||
|
||||
registerVariableCommands();
|
||||
}
|
||||
|
@ -1,5 +1,4 @@
|
||||
import { escapeRegex } from '../utils.js';
|
||||
import { SlashCommand } from './SlashCommand.js';
|
||||
import { SlashCommandParser } from './SlashCommandParser.js';
|
||||
|
||||
export class SlashCommandBrowser {
|
||||
@ -30,7 +29,7 @@ export class SlashCommandBrowser {
|
||||
this.details?.remove();
|
||||
this.details = null;
|
||||
let query = inp.value.trim();
|
||||
if (query.slice(-1) == '"' && !/(?:^|\s+)"/.test(query)) {
|
||||
if (query.slice(-1) === '"' && !/(?:^|\s+)"/.test(query)) {
|
||||
query = `"${query}`;
|
||||
}
|
||||
let fuzzyList = [];
|
||||
@ -59,7 +58,7 @@ export class SlashCommandBrowser {
|
||||
cmd.helpString,
|
||||
];
|
||||
const find = ()=>targets.find(t=>(fuzzyList.find(f=>f.test(t)) ?? quotedList.find(q=>t.includes(q))) !== undefined) !== undefined;
|
||||
if (fuzzyList.length + quotedList.length == 0 || find()) {
|
||||
if (fuzzyList.length + quotedList.length === 0 || find()) {
|
||||
this.itemMap[cmd.name].classList.remove('isFiltered');
|
||||
} else {
|
||||
this.itemMap[cmd.name].classList.add('isFiltered');
|
||||
@ -78,7 +77,7 @@ export class SlashCommandBrowser {
|
||||
list.classList.add('autoComplete');
|
||||
this.cmdList = Object
|
||||
.keys(SlashCommandParser.commands)
|
||||
.filter(key => SlashCommandParser.commands[key].name == key) // exclude aliases
|
||||
.filter(key => SlashCommandParser.commands[key].name === key) // exclude aliases
|
||||
.sort((a, b) => a.toLowerCase().localeCompare(b.toLowerCase()))
|
||||
.map(key => SlashCommandParser.commands[key])
|
||||
;
|
||||
@ -97,7 +96,7 @@ export class SlashCommandBrowser {
|
||||
}
|
||||
}
|
||||
}
|
||||
if (this.details != details) {
|
||||
if (this.details !== details) {
|
||||
Array.from(list.querySelectorAll('.selected')).forEach(it=>it.classList.remove('selected'));
|
||||
item.classList.add('selected');
|
||||
this.details?.remove();
|
||||
@ -124,7 +123,7 @@ export class SlashCommandBrowser {
|
||||
parent.append(this.dom);
|
||||
|
||||
this.mo = new MutationObserver(muts=>{
|
||||
if (muts.find(mut=>Array.from(mut.removedNodes).find(it=>it == this.dom || it.contains(this.dom)))) {
|
||||
if (muts.find(mut=>Array.from(mut.removedNodes).find(it=>it === this.dom || it.contains(this.dom)))) {
|
||||
this.mo.disconnect();
|
||||
window.removeEventListener('keydown', boundHandler);
|
||||
}
|
||||
@ -136,7 +135,7 @@ export class SlashCommandBrowser {
|
||||
}
|
||||
|
||||
handleKeyDown(evt) {
|
||||
if (!evt.shiftKey && !evt.altKey && evt.ctrlKey && evt.key.toLowerCase() == 'f') {
|
||||
if (!evt.shiftKey && !evt.altKey && evt.ctrlKey && evt.key.toLowerCase() === 'f') {
|
||||
if (!this.dom.closest('body')) return;
|
||||
if (this.dom.closest('.mes') && !this.dom.closest('.last_mes')) return;
|
||||
evt.preventDefault();
|
||||
|
@ -882,6 +882,13 @@ function setSettingByName(setting, value, trigger) {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Sends a streaming request for textgenerationwebui.
|
||||
* @param generate_data
|
||||
* @param signal
|
||||
* @returns {Promise<(function(): AsyncGenerator<{swipes: [], text: string, toolCalls: [], logprobs: {token: string, topLogprobs: Candidate[]}|null}, void, *>)|*>}
|
||||
* @throws {Error} - If the response status is not OK, or from within the generator
|
||||
*/
|
||||
async function generateTextGenWithStreaming(generate_data, signal) {
|
||||
generate_data.stream = true;
|
||||
|
||||
@ -997,6 +1004,7 @@ export function parseTabbyLogprobs(data) {
|
||||
* @param {Response} response - Response from the server.
|
||||
* @param {string} decoded - Decoded response body.
|
||||
* @returns {void} Nothing.
|
||||
* @throws {Error} If the response contains an error message, throws Error with the message.
|
||||
*/
|
||||
function tryParseStreamingError(response, decoded) {
|
||||
let data = {};
|
||||
|
@ -280,7 +280,7 @@ async function sendMakerSuiteRequest(request, response) {
|
||||
delete generationConfig.stopSequences;
|
||||
}
|
||||
|
||||
const should_use_system_prompt = (model.includes('gemini-1.5-flash') || model.includes('gemini-1.5-pro') || model.includes('gemini-exp-1114')) && request.body.use_makersuite_sysprompt;
|
||||
const should_use_system_prompt = (model.includes('gemini-1.5-flash') || model.includes('gemini-1.5-pro') || model.includes('gemini-exp-1114') || model.includes('gemini-exp-1121')) && request.body.use_makersuite_sysprompt;
|
||||
const prompt = convertGooglePrompt(request.body.messages, model, should_use_system_prompt, request.body.char_name, request.body.user_name);
|
||||
let body = {
|
||||
contents: prompt.contents,
|
||||
@ -1051,8 +1051,12 @@ router.post('/generate', jsonParser, function (request, response) {
|
||||
}
|
||||
} catch (error) {
|
||||
console.log('Generation failed', error);
|
||||
const message = error.code === 'ECONNREFUSED'
|
||||
? `Connection refused: ${error.message}`
|
||||
: error.message || 'Unknown error occurred';
|
||||
|
||||
if (!response.headersSent) {
|
||||
response.send({ error: true });
|
||||
response.status(502).send({ error: { message, ...error } });
|
||||
} else {
|
||||
response.end();
|
||||
}
|
||||
@ -1068,7 +1072,7 @@ router.post('/generate', jsonParser, function (request, response) {
|
||||
|
||||
const message = errorResponse.statusText || 'Unknown error occurred';
|
||||
const quota_error = errorResponse.status === 429 && errorData?.error?.type === 'insufficient_quota';
|
||||
console.log(message, responseText);
|
||||
console.log('Chat completion request error: ', message, responseText);
|
||||
|
||||
if (!response.headersSent) {
|
||||
response.send({ error: { message }, quota_error: quota_error });
|
||||
|
@ -16,6 +16,7 @@ import {
|
||||
} from '../../constants.js';
|
||||
import { forwardFetchResponse, trimV1, getConfigValue } from '../../util.js';
|
||||
import { setAdditionalHeaders } from '../../additional-headers.js';
|
||||
import { createHash } from 'node:crypto';
|
||||
|
||||
export const router = express.Router();
|
||||
|
||||
@ -227,6 +228,40 @@ router.post('/status', jsonParser, async function (request, response) {
|
||||
}
|
||||
});
|
||||
|
||||
router.post('/props', jsonParser, async function (request, response) {
|
||||
if (!request.body.api_server) return response.sendStatus(400);
|
||||
|
||||
try {
|
||||
const baseUrl = trimV1(request.body.api_server);
|
||||
const args = {
|
||||
headers: {},
|
||||
};
|
||||
|
||||
setAdditionalHeaders(request, args, baseUrl);
|
||||
|
||||
const apiType = request.body.api_type;
|
||||
const propsUrl = baseUrl + '/props';
|
||||
const propsReply = await fetch(propsUrl, args);
|
||||
|
||||
if (!propsReply.ok) {
|
||||
return response.status(400);
|
||||
}
|
||||
|
||||
/** @type {any} */
|
||||
const props = await propsReply.json();
|
||||
// TEMPORARY: llama.cpp's /props endpoint has a bug which replaces the last newline with a \0
|
||||
if (apiType === TEXTGEN_TYPES.LLAMACPP && props['chat_template'].endsWith('\u0000')) {
|
||||
props['chat_template'] = props['chat_template'].slice(0, -1) + '\n';
|
||||
}
|
||||
props['chat_template_hash'] = createHash('sha256').update(props['chat_template']).digest('hex');
|
||||
console.log(`Model properties: ${JSON.stringify(props)}`);
|
||||
return response.send(props);
|
||||
} catch (error) {
|
||||
console.error(error);
|
||||
return response.status(500);
|
||||
}
|
||||
});
|
||||
|
||||
router.post('/generate', jsonParser, async function (request, response) {
|
||||
if (!request.body) return response.sendStatus(400);
|
||||
|
||||
|
@ -24,6 +24,8 @@ const defaultAvatarPath = './public/img/ai4.png';
|
||||
|
||||
// KV-store for parsed character data
|
||||
const characterDataCache = new Map();
|
||||
// Some Android devices require tighter memory management
|
||||
const isAndroid = process.platform === 'android';
|
||||
|
||||
/**
|
||||
* Reads the character card from the specified image file.
|
||||
@ -39,7 +41,7 @@ async function readCharacterData(inputFile, inputFormat = 'png') {
|
||||
}
|
||||
|
||||
const result = parse(inputFile, inputFormat);
|
||||
characterDataCache.set(cacheKey, result);
|
||||
!isAndroid && characterDataCache.set(cacheKey, result);
|
||||
return result;
|
||||
}
|
||||
|
||||
|
@ -6,7 +6,7 @@ import { publicLibConfig } from '../../webpack.config.js';
|
||||
export default function getWebpackServeMiddleware() {
|
||||
const compiler = webpack(publicLibConfig);
|
||||
|
||||
if (process.env.NODE_ENV === 'production') {
|
||||
if (process.env.NODE_ENV === 'production' || process.platform === 'android') {
|
||||
compiler.hooks.done.tap('serve', () => {
|
||||
if (compiler.watching) {
|
||||
compiler.watching.close(() => { });
|
||||
|
@ -347,6 +347,7 @@ export function convertGooglePrompt(messages, model, useSysPrompt = false, charN
|
||||
'gemini-1.5-flash-8b-exp-0827',
|
||||
'gemini-1.5-flash-8b-exp-0924',
|
||||
'gemini-exp-1114',
|
||||
'gemini-exp-1121',
|
||||
'gemini-1.5-pro',
|
||||
'gemini-1.5-pro-latest',
|
||||
'gemini-1.5-pro-001',
|
||||
@ -628,11 +629,30 @@ export function convertMistralMessages(messages, charName = '', userName = '') {
|
||||
export function mergeMessages(messages, charName, userName, strict) {
|
||||
let mergedMessages = [];
|
||||
|
||||
/** @type {Map<string,object>} */
|
||||
const contentTokens = new Map();
|
||||
|
||||
// Remove names from the messages
|
||||
messages.forEach((message) => {
|
||||
if (!message.content) {
|
||||
message.content = '';
|
||||
}
|
||||
// Flatten contents and replace image URLs with random tokens
|
||||
if (Array.isArray(message.content)) {
|
||||
const text = message.content.map((content) => {
|
||||
if (content.type === 'text') {
|
||||
return content.text;
|
||||
}
|
||||
// Could be extended with other non-text types
|
||||
if (content.type === 'image_url') {
|
||||
const token = crypto.randomBytes(32).toString('base64');
|
||||
contentTokens.set(token, content);
|
||||
return token;
|
||||
}
|
||||
return '';
|
||||
}).join('\n\n');
|
||||
message.content = text;
|
||||
}
|
||||
if (message.role === 'system' && message.name === 'example_assistant') {
|
||||
if (charName && !message.content.startsWith(`${charName}: `)) {
|
||||
message.content = `${charName}: ${message.content}`;
|
||||
@ -673,6 +693,32 @@ export function mergeMessages(messages, charName, userName, strict) {
|
||||
});
|
||||
}
|
||||
|
||||
// Check for content tokens and replace them with the actual content objects
|
||||
if (contentTokens.size > 0) {
|
||||
mergedMessages.forEach((message) => {
|
||||
const hasValidToken = Array.from(contentTokens.keys()).some(token => message.content.includes(token));
|
||||
|
||||
if (hasValidToken) {
|
||||
const splitContent = message.content.split('\n\n');
|
||||
const mergedContent = [];
|
||||
|
||||
splitContent.forEach((content) => {
|
||||
if (contentTokens.has(content)) {
|
||||
mergedContent.push(contentTokens.get(content));
|
||||
} else {
|
||||
if (mergedContent.length > 0 && mergedContent[mergedContent.length - 1].type === 'text') {
|
||||
mergedContent[mergedContent.length - 1].text += `\n\n${content}`;
|
||||
} else {
|
||||
mergedContent.push({ type: 'text', text: content });
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
message.content = mergedContent;
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
if (strict) {
|
||||
for (let i = 0; i < mergedMessages.length; i++) {
|
||||
// Force mid-prompt system messages to be user messages
|
||||
|
Loading…
x
Reference in New Issue
Block a user