Merge branch 'staging' into sysprompt-divorce

This commit is contained in:
Cohee
2024-09-18 23:40:46 +03:00
21 changed files with 195 additions and 50 deletions

View File

@@ -83,6 +83,8 @@ skipContentCheck: false
disableChatBackup: false disableChatBackup: false
# Number of backups to keep for each chat and settings file # Number of backups to keep for each chat and settings file
numberOfBackups: 50 numberOfBackups: 50
# Interval in milliseconds to throttle chat backups per user
chatBackupThrottleInterval: 10000
# Allowed hosts for card downloads # Allowed hosts for card downloads
whitelistImportDomains: whitelistImportDomains:
- localhost - localhost

BIN
public/img/user-default.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 51 KiB

View File

@@ -2896,6 +2896,7 @@
<option value="mistral-large-latest">mistral-large-latest</option> <option value="mistral-large-latest">mistral-large-latest</option>
<option value="codestral-latest">codestral-latest</option> <option value="codestral-latest">codestral-latest</option>
<option value="codestral-mamba-latest">codestral-mamba-latest</option> <option value="codestral-mamba-latest">codestral-mamba-latest</option>
<option value="pixtral-latest">pixtral-latest</option>
</optgroup> </optgroup>
<optgroup label="Sub-versions"> <optgroup label="Sub-versions">
<option value="open-mistral-nemo-2407">open-mistral-nemo-2407</option> <option value="open-mistral-nemo-2407">open-mistral-nemo-2407</option>
@@ -2903,11 +2904,13 @@
<option value="mistral-tiny-2312">mistral-tiny-2312</option> <option value="mistral-tiny-2312">mistral-tiny-2312</option>
<option value="mistral-small-2312">mistral-small-2312</option> <option value="mistral-small-2312">mistral-small-2312</option>
<option value="mistral-small-2402">mistral-small-2402</option> <option value="mistral-small-2402">mistral-small-2402</option>
<option value="mistral-small-2409">mistral-small-2409</option>
<option value="mistral-medium-2312">mistral-medium-2312</option> <option value="mistral-medium-2312">mistral-medium-2312</option>
<option value="mistral-large-2402">mistral-large-2402</option> <option value="mistral-large-2402">mistral-large-2402</option>
<option value="mistral-large-2407">mistral-large-2407</option> <option value="mistral-large-2407">mistral-large-2407</option>
<option value="codestral-2405">codestral-2405</option> <option value="codestral-2405">codestral-2405</option>
<option value="codestral-mamba-2407">codestral-mamba-2407</option> <option value="codestral-mamba-2407">codestral-mamba-2407</option>
<option value="pixtral-12b-2409">pixtral-12b-2409</option>
</optgroup> </optgroup>
</select> </select>
</div> </div>

View File

@@ -510,6 +510,7 @@ let saveCharactersPage = 0;
export const default_avatar = 'img/ai4.png'; export const default_avatar = 'img/ai4.png';
export const system_avatar = 'img/five.png'; export const system_avatar = 'img/five.png';
export const comment_avatar = 'img/quill.png'; export const comment_avatar = 'img/quill.png';
export const default_user_avatar = 'img/user-default.png';
export let CLIENT_VERSION = 'SillyTavern:UNKNOWN:Cohee#1207'; // For Horde header export let CLIENT_VERSION = 'SillyTavern:UNKNOWN:Cohee#1207'; // For Horde header
let optionsPopper = Popper.createPopper(document.getElementById('options_button'), document.getElementById('options'), { let optionsPopper = Popper.createPopper(document.getElementById('options_button'), document.getElementById('options'), {
placement: 'top-start', placement: 'top-start',
@@ -3335,7 +3336,6 @@ function removeLastMessage() {
*/ */
export async function Generate(type, { automatic_trigger, force_name2, quiet_prompt, quietToLoud, skipWIAN, force_chid, signal, quietImage, quietName } = {}, dryRun = false) { export async function Generate(type, { automatic_trigger, force_name2, quiet_prompt, quietToLoud, skipWIAN, force_chid, signal, quietImage, quietName } = {}, dryRun = false) {
console.log('Generate entered'); console.log('Generate entered');
await eventSource.emit(event_types.GENERATION_STARTED, type, { automatic_trigger, force_name2, quiet_prompt, quietToLoud, skipWIAN, force_chid, signal, quietImage }, dryRun);
setGenerationProgress(0); setGenerationProgress(0);
generation_started = new Date(); generation_started = new Date();
@@ -3358,6 +3358,8 @@ export async function Generate(type, { automatic_trigger, force_name2, quiet_pro
} }
} }
await eventSource.emit(event_types.GENERATION_STARTED, type, { automatic_trigger, force_name2, quiet_prompt, quietToLoud, skipWIAN, force_chid, signal, quietImage }, dryRun);
if (main_api == 'kobold' && kai_settings.streaming_kobold && !kai_flags.can_use_streaming) { if (main_api == 'kobold' && kai_settings.streaming_kobold && !kai_flags.can_use_streaming) {
toastr.error('Streaming is enabled, but the version of Kobold used does not support token streaming.', undefined, { timeOut: 10000, preventDuplicates: true }); toastr.error('Streaming is enabled, but the version of Kobold used does not support token streaming.', undefined, { timeOut: 10000, preventDuplicates: true });
unblockGeneration(type); unblockGeneration(type);

View File

@@ -29,7 +29,6 @@ export function saveMetadataDebounced() {
const characterId = context.characterId; const characterId = context.characterId;
if (saveMetadataTimeout) { if (saveMetadataTimeout) {
console.debug('Clearing save metadata timeout');
clearTimeout(saveMetadataTimeout); clearTimeout(saveMetadataTimeout);
} }

View File

@@ -403,6 +403,7 @@ jQuery(async function () {
(extension_settings.caption.source === 'multimodal' && extension_settings.caption.multimodal_api === 'openai' && (secret_state[SECRET_KEYS.OPENAI] || extension_settings.caption.allow_reverse_proxy)) || (extension_settings.caption.source === 'multimodal' && extension_settings.caption.multimodal_api === 'openai' && (secret_state[SECRET_KEYS.OPENAI] || extension_settings.caption.allow_reverse_proxy)) ||
(extension_settings.caption.source === 'multimodal' && extension_settings.caption.multimodal_api === 'openrouter' && secret_state[SECRET_KEYS.OPENROUTER]) || (extension_settings.caption.source === 'multimodal' && extension_settings.caption.multimodal_api === 'openrouter' && secret_state[SECRET_KEYS.OPENROUTER]) ||
(extension_settings.caption.source === 'multimodal' && extension_settings.caption.multimodal_api === 'zerooneai' && secret_state[SECRET_KEYS.ZEROONEAI]) || (extension_settings.caption.source === 'multimodal' && extension_settings.caption.multimodal_api === 'zerooneai' && secret_state[SECRET_KEYS.ZEROONEAI]) ||
(extension_settings.caption.source === 'multimodal' && extension_settings.caption.multimodal_api === 'mistral' && (secret_state[SECRET_KEYS.MISTRALAI] || extension_settings.caption.allow_reverse_proxy)) ||
(extension_settings.caption.source === 'multimodal' && extension_settings.caption.multimodal_api === 'google' && (secret_state[SECRET_KEYS.MAKERSUITE] || extension_settings.caption.allow_reverse_proxy)) || (extension_settings.caption.source === 'multimodal' && extension_settings.caption.multimodal_api === 'google' && (secret_state[SECRET_KEYS.MAKERSUITE] || extension_settings.caption.allow_reverse_proxy)) ||
(extension_settings.caption.source === 'multimodal' && extension_settings.caption.multimodal_api === 'anthropic' && (secret_state[SECRET_KEYS.CLAUDE] || extension_settings.caption.allow_reverse_proxy)) || (extension_settings.caption.source === 'multimodal' && extension_settings.caption.multimodal_api === 'anthropic' && (secret_state[SECRET_KEYS.CLAUDE] || extension_settings.caption.allow_reverse_proxy)) ||
(extension_settings.caption.source === 'multimodal' && extension_settings.caption.multimodal_api === 'ollama' && textgenerationwebui_settings.server_urls[textgen_types.OLLAMA]) || (extension_settings.caption.source === 'multimodal' && extension_settings.caption.multimodal_api === 'ollama' && textgenerationwebui_settings.server_urls[textgen_types.OLLAMA]) ||

View File

@@ -23,6 +23,7 @@
<option value="google">Google AI Studio</option> <option value="google">Google AI Studio</option>
<option value="koboldcpp">KoboldCpp</option> <option value="koboldcpp">KoboldCpp</option>
<option value="llamacpp">llama.cpp</option> <option value="llamacpp">llama.cpp</option>
<option value="mistral">MistralAI</option>
<option value="ollama">Ollama</option> <option value="ollama">Ollama</option>
<option value="openai">OpenAI</option> <option value="openai">OpenAI</option>
<option value="openrouter">OpenRouter</option> <option value="openrouter">OpenRouter</option>
@@ -33,6 +34,8 @@
<div class="flex1 flex-container flexFlowColumn flexNoGap"> <div class="flex1 flex-container flexFlowColumn flexNoGap">
<label for="caption_multimodal_model" data-i18n="Model">Model</label> <label for="caption_multimodal_model" data-i18n="Model">Model</label>
<select id="caption_multimodal_model" class="flex1 text_pole"> <select id="caption_multimodal_model" class="flex1 text_pole">
<option data-type="mistral" value="pixtral-latest">pixtral-latest</option>
<option data-type="mistral" value="pixtral-12b-2409">pixtral-12b-2409</option>
<option data-type="zerooneai" value="yi-vision">yi-vision</option> <option data-type="zerooneai" value="yi-vision">yi-vision</option>
<option data-type="openai" value="gpt-4-vision-preview">gpt-4-vision-preview</option> <option data-type="openai" value="gpt-4-vision-preview">gpt-4-vision-preview</option>
<option data-type="openai" value="gpt-4-turbo">gpt-4-turbo</option> <option data-type="openai" value="gpt-4-turbo">gpt-4-turbo</option>
@@ -96,7 +99,7 @@
<div data-type="ollama"> <div data-type="ollama">
The model must be downloaded first! Do it with the <code>ollama pull</code> command or <a href="#" id="caption_ollama_pull">click here</a>. The model must be downloaded first! Do it with the <code>ollama pull</code> command or <a href="#" id="caption_ollama_pull">click here</a>.
</div> </div>
<label data-type="openai,anthropic,google" class="checkbox_label flexBasis100p" for="caption_allow_reverse_proxy" title="Allow using reverse proxy if defined and valid."> <label data-type="openai,anthropic,google,mistral" class="checkbox_label flexBasis100p" for="caption_allow_reverse_proxy" title="Allow using reverse proxy if defined and valid.">
<input id="caption_allow_reverse_proxy" type="checkbox" class="checkbox"> <input id="caption_allow_reverse_proxy" type="checkbox" class="checkbox">
<span data-i18n="Allow reverse proxy">Allow reverse proxy</span> <span data-i18n="Allow reverse proxy">Allow reverse proxy</span>
</label> </label>

View File

@@ -13,7 +13,7 @@ import { createThumbnail, isValidUrl } from '../utils.js';
*/ */
export async function getMultimodalCaption(base64Img, prompt) { export async function getMultimodalCaption(base64Img, prompt) {
const useReverseProxy = const useReverseProxy =
(['openai', 'anthropic', 'google'].includes(extension_settings.caption.multimodal_api)) (['openai', 'anthropic', 'google', 'mistral'].includes(extension_settings.caption.multimodal_api))
&& extension_settings.caption.allow_reverse_proxy && extension_settings.caption.allow_reverse_proxy
&& oai_settings.reverse_proxy && oai_settings.reverse_proxy
&& isValidUrl(oai_settings.reverse_proxy); && isValidUrl(oai_settings.reverse_proxy);
@@ -36,7 +36,7 @@ export async function getMultimodalCaption(base64Img, prompt) {
const isVllm = extension_settings.caption.multimodal_api === 'vllm'; const isVllm = extension_settings.caption.multimodal_api === 'vllm';
const base64Bytes = base64Img.length * 0.75; const base64Bytes = base64Img.length * 0.75;
const compressionLimit = 2 * 1024 * 1024; const compressionLimit = 2 * 1024 * 1024;
if ((['google', 'openrouter'].includes(extension_settings.caption.multimodal_api) && base64Bytes > compressionLimit) || isOoba || isKoboldCpp) { if ((['google', 'openrouter', 'mistral'].includes(extension_settings.caption.multimodal_api) && base64Bytes > compressionLimit) || isOoba || isKoboldCpp) {
const maxSide = 1024; const maxSide = 1024;
base64Img = await createThumbnail(base64Img, maxSide, maxSide, 'image/jpeg'); base64Img = await createThumbnail(base64Img, maxSide, maxSide, 'image/jpeg');
} }
@@ -139,6 +139,10 @@ function throwIfInvalidModel(useReverseProxy) {
throw new Error('Google AI Studio API key is not set.'); throw new Error('Google AI Studio API key is not set.');
} }
if (extension_settings.caption.multi_modal_api === 'mistral' && !secret_state[SECRET_KEYS.MISTRALAI] && !useReverseProxy) {
throw new Error('Mistral AI API key is not set.');
}
if (extension_settings.caption.multimodal_api === 'ollama' && !textgenerationwebui_settings.server_urls[textgen_types.OLLAMA]) { if (extension_settings.caption.multimodal_api === 'ollama' && !textgenerationwebui_settings.server_urls[textgen_types.OLLAMA]) {
throw new Error('Ollama server URL is not set.'); throw new Error('Ollama server URL is not set.');
} }

View File

@@ -1,10 +1,10 @@
import { import {
saveSettingsDebounced, amount_gen,
callPopup, callPopup,
setGenerationProgress,
getRequestHeaders, getRequestHeaders,
max_context, max_context,
amount_gen, saveSettingsDebounced,
setGenerationProgress,
} from '../script.js'; } from '../script.js';
import { SECRET_KEYS, writeSecret } from './secrets.js'; import { SECRET_KEYS, writeSecret } from './secrets.js';
import { delay } from './utils.js'; import { delay } from './utils.js';
@@ -45,8 +45,7 @@ async function getWorkers(force) {
headers: getRequestHeaders(), headers: getRequestHeaders(),
body: JSON.stringify({ force }), body: JSON.stringify({ force }),
}); });
const data = await response.json(); return await response.json();
return data;
} }
/** /**
@@ -61,16 +60,18 @@ async function getModels(force) {
body: JSON.stringify({ force }), body: JSON.stringify({ force }),
}); });
const data = await response.json(); const data = await response.json();
console.log('getModels', data);
return data; return data;
} }
/** /**
* Gets the status of a Horde task. * Gets the status of a Horde task.
* @param {string} taskId Task ID * @param {string} taskId Task ID
* @returns {Promise<Object>} Task status * @returns {Promise<Object>} Task status
*/ */
async function getTaskStatus(taskId) { async function getTaskStatus(taskId) {
const response = await fetch('/api/horde/task-status', { const response = await fetch('/api/horde/task-status', {
method: 'POST', method: 'POST',
headers: getRequestHeaders(), headers: getRequestHeaders(),
body: JSON.stringify({ taskId }), body: JSON.stringify({ taskId }),
@@ -80,8 +81,7 @@ async function getTaskStatus(taskId) {
throw new Error(`Failed to get task status: ${response.statusText}`); throw new Error(`Failed to get task status: ${response.statusText}`);
} }
const data = await response.json(); return await response.json();
return data;
} }
/** /**
@@ -148,7 +148,7 @@ async function adjustHordeGenerationParams(max_context_length, max_length) {
for (const model of selectedModels) { for (const model of selectedModels) {
for (const worker of workers) { for (const worker of workers) {
if (model.cluster == worker.cluster && worker.models.includes(model.name)) { if (model.cluster === worker.cluster && worker.models.includes(model.name)) {
// Skip workers that are not trusted if the option is enabled // Skip workers that are not trusted if the option is enabled
if (horde_settings.trusted_workers_only && !worker.trusted) { if (horde_settings.trusted_workers_only && !worker.trusted) {
continue; continue;
@@ -250,12 +250,10 @@ async function generateHorde(prompt, params, signal, reportProgress) {
console.log(generatedText); console.log(generatedText);
console.log(`Generated by Horde Worker: ${WorkerName} [${WorkerModel}]`); console.log(`Generated by Horde Worker: ${WorkerName} [${WorkerModel}]`);
return { text: generatedText, workerName: `Generated by Horde worker: ${WorkerName} [${WorkerModel}]` }; return { text: generatedText, workerName: `Generated by Horde worker: ${WorkerName} [${WorkerModel}]` };
} } else if (!queue_position_first) {
else if (!queue_position_first) {
queue_position_first = statusCheckJson.queue_position; queue_position_first = statusCheckJson.queue_position;
reportProgress && setGenerationProgress(0); reportProgress && setGenerationProgress(0);
} } else if (statusCheckJson.queue_position >= 0) {
else if (statusCheckJson.queue_position >= 0) {
let queue_position = statusCheckJson.queue_position; let queue_position = statusCheckJson.queue_position;
const progress = Math.round(100 - (queue_position / queue_position_first * 100)); const progress = Math.round(100 - (queue_position / queue_position_first * 100));
reportProgress && setGenerationProgress(progress); reportProgress && setGenerationProgress(progress);
@@ -268,17 +266,24 @@ async function generateHorde(prompt, params, signal, reportProgress) {
throw new Error('Horde timeout'); throw new Error('Horde timeout');
} }
/** /**
* Displays the available models in the Horde model selection dropdown. * Displays the available models in the Horde model selection dropdown.
* @param {boolean} force Force refresh of the models * @param {boolean} force Force refresh of the models
*/ */
async function getHordeModels(force) { async function getHordeModels(force) {
const sortByPerformance = (a, b) => b.performance - a.performance;
const sortByWhitelisted = (a, b) => b.is_whitelisted - a.is_whitelisted;
const sortByPopular = (a, b) => b.tags?.includes('popular') - a.tags?.includes('popular');
$('#horde_model').empty(); $('#horde_model').empty();
models = (await getModels(force)).sort((a, b) => b.performance - a.performance); models = (await getModels(force)).sort((a, b) => {
return sortByWhitelisted(a, b) || sortByPopular(a, b) || sortByPerformance(a, b);
});
for (const model of models) { for (const model of models) {
const option = document.createElement('option'); const option = document.createElement('option');
option.value = model.name; option.value = model.name;
option.innerText = `${model.name} (ETA: ${model.eta}s, Speed: ${model.performance}, Queue: ${model.queued}, Workers: ${model.count})`; option.innerText = hordeModelTextString(model);
option.selected = horde_settings.models.includes(model.name); option.selected = horde_settings.models.includes(model.name);
$('#horde_model').append(option); $('#horde_model').append(option);
} }
@@ -323,8 +328,66 @@ async function showKudos() {
toastr.info(`Kudos: ${data.kudos}`, data.username); toastr.info(`Kudos: ${data.kudos}`, data.username);
} }
function hordeModelTextString(model) {
const q = hordeModelQueueStateString(model);
return `${model.name} (${q})`;
}
function hordeModelQueueStateString(model) {
return `ETA: ${model.eta}s, Speed: ${model.performance}, Queue: ${model.queued}, Workers: ${model.count}`;
}
function getHordeModelTemplate(option) {
const model = models.find(x => x.name === option?.element?.value);
if (!option.id || !model) {
console.debug('No model found for option', option, option?.element?.value);
console.debug('Models', models);
return option.text;
}
const strip = html => {
const tmp = document.createElement('DIV');
tmp.innerHTML = html || '';
return tmp.textContent || tmp.innerText || '';
};
// how much do we trust the metadata from the models repo? about this much
const displayName = strip(model.display_name || model.name).replace(/.*\//g, '');
const description = strip(model.description);
const tags = model.tags ? model.tags.map(strip) : [];
const url = strip(model.url);
const style = strip(model.style);
const workerInfo = hordeModelQueueStateString(model);
const isPopular = model.tags?.includes('popular');
const descriptionDiv = description ? `<div class="horde-model-description">${description}</div>` : '';
const tagSpans = tags.length > 0 &&
`${tags.map(tag => `<span class="tag tag_name">${tag}</span>`).join('')}</span>` || '';
const modelDetailsLink = url && `<a href="${url}" target="_blank" rel="noopener noreferrer" class="model-details-link fa-solid fa-circle-question"> </a>`;
const capitalize = s => s ? s[0].toUpperCase() + s.slice(1) : '';
const innerContent = [
`<strong>${displayName}</strong> ${modelDetailsLink}`,
style ? `${capitalize(style)}` : '',
tagSpans ? `<span class="tags tags_inline inline-flex margin-r2">${tagSpans}</span>` : '',
].filter(Boolean).join(' | ');
return $((`
<div class="flex-container flexFlowColumn">
<div>
${isPopular ? '<span class="fa-fw fa-solid fa-star" title="Popular"></span>' : ''}
${innerContent}
</div>
${descriptionDiv}
<div><small>${workerInfo}</small></div>
</div>
`));
}
jQuery(function () { jQuery(function () {
$('#horde_model').on('mousedown change', async function (e) { $('#horde_model').on('mousedown change', async function (e) {
console.log('Horde model change', e);
horde_settings.models = $('#horde_model').val(); horde_settings.models = $('#horde_model').val();
console.log('Updated Horde models', horde_settings.models); console.log('Updated Horde models', horde_settings.models);
@@ -374,10 +437,7 @@ jQuery(function () {
// Customize the pillbox text by shortening the full text // Customize the pillbox text by shortening the full text
return data.id; return data.id;
}, },
templateResult: function (data) { templateResult: getHordeModelTemplate,
// Return the full text for the dropdown
return data.text;
},
}); });
} }
}); });

View File

@@ -2490,7 +2490,7 @@ class Message {
* @returns {Promise<string>} Compressed image as a Data URL. * @returns {Promise<string>} Compressed image as a Data URL.
*/ */
async compressImage(image) { async compressImage(image) {
if ([chat_completion_sources.OPENROUTER, chat_completion_sources.MAKERSUITE].includes(oai_settings.chat_completion_source)) { if ([chat_completion_sources.OPENROUTER, chat_completion_sources.MAKERSUITE, chat_completion_sources.MISTRALAI].includes(oai_settings.chat_completion_source)) {
const sizeThreshold = 2 * 1024 * 1024; const sizeThreshold = 2 * 1024 * 1024;
const dataSize = image.length * 0.75; const dataSize = image.length * 0.75;
const maxSide = 1024; const maxSide = 1024;
@@ -4221,6 +4221,8 @@ async function onModelChange() {
$('#openai_max_context').attr('max', max_128k); $('#openai_max_context').attr('max', max_128k);
} else if (oai_settings.mistralai_model.includes('mixtral-8x22b')) { } else if (oai_settings.mistralai_model.includes('mixtral-8x22b')) {
$('#openai_max_context').attr('max', max_64k); $('#openai_max_context').attr('max', max_64k);
} else if (oai_settings.mistralai_model.includes('pixtral')) {
$('#openai_max_context').attr('max', max_128k);
} else { } else {
$('#openai_max_context').attr('max', max_32k); $('#openai_max_context').attr('max', max_32k);
} }
@@ -4770,6 +4772,8 @@ export function isImageInliningSupported() {
'gpt-4o-mini', 'gpt-4o-mini',
'chatgpt-4o-latest', 'chatgpt-4o-latest',
'yi-vision', 'yi-vision',
'pixtral-latest',
'pixtral-12b-2409',
]; ];
switch (oai_settings.chat_completion_source) { switch (oai_settings.chat_completion_source) {
@@ -4785,6 +4789,8 @@ export function isImageInliningSupported() {
return true; return true;
case chat_completion_sources.ZEROONEAI: case chat_completion_sources.ZEROONEAI:
return visionSupportedModels.some(model => oai_settings.zerooneai_model.includes(model)); return visionSupportedModels.some(model => oai_settings.zerooneai_model.includes(model));
case chat_completion_sources.MISTRALAI:
return visionSupportedModels.some(model => oai_settings.mistralai_model.includes(model));
default: default:
return false; return false;
} }

View File

@@ -2,7 +2,7 @@ import {
characters, characters,
chat, chat,
chat_metadata, chat_metadata,
default_avatar, default_user_avatar,
eventSource, eventSource,
event_types, event_types,
getRequestHeaders, getRequestHeaders,
@@ -357,7 +357,7 @@ async function createDummyPersona() {
// Date + name (only ASCII) to make it unique // Date + name (only ASCII) to make it unique
const avatarId = `${Date.now()}-${personaName.replace(/[^a-zA-Z0-9]/g, '')}.png`; const avatarId = `${Date.now()}-${personaName.replace(/[^a-zA-Z0-9]/g, '')}.png`;
initPersona(avatarId, personaName, ''); initPersona(avatarId, personaName, '');
await uploadUserAvatar(default_avatar, avatarId); await uploadUserAvatar(default_user_avatar, avatarId);
} }
/** /**
@@ -944,7 +944,7 @@ async function onPersonasRestoreInput(e) {
// If the avatar is missing, upload it // If the avatar is missing, upload it
if (!avatarsList.includes(key)) { if (!avatarsList.includes(key)) {
warnings.push(`Persona image "${key}" (${value}) is missing, uploading default avatar`); warnings.push(`Persona image "${key}" (${value}) is missing, uploading default avatar`);
await uploadUserAvatar(default_avatar, key); await uploadUserAvatar(default_user_avatar, key);
} }
} }

View File

@@ -15,7 +15,7 @@ import { BIAS_CACHE, createNewLogitBiasEntry, displayLogitBias, getLogitBiasList
import { power_user, registerDebugFunction } from './power-user.js'; import { power_user, registerDebugFunction } from './power-user.js';
import { getEventSourceStream } from './sse-stream.js'; import { getEventSourceStream } from './sse-stream.js';
import { getCurrentDreamGenModelTokenizer, getCurrentOpenRouterModelTokenizer } from './textgen-models.js'; import { getCurrentDreamGenModelTokenizer, getCurrentOpenRouterModelTokenizer } from './textgen-models.js';
import { SENTENCEPIECE_TOKENIZERS, TEXTGEN_TOKENIZERS, getTextTokens, tokenizers } from './tokenizers.js'; import { ENCODE_TOKENIZERS, TEXTGEN_TOKENIZERS, getTextTokens, tokenizers } from './tokenizers.js';
import { getSortableDelay, onlyUnique } from './utils.js'; import { getSortableDelay, onlyUnique } from './utils.js';
export { export {
@@ -353,7 +353,7 @@ function getTokenizerForTokenIds() {
return tokenizers.API_CURRENT; return tokenizers.API_CURRENT;
} }
if (SENTENCEPIECE_TOKENIZERS.includes(power_user.tokenizer)) { if (ENCODE_TOKENIZERS.includes(power_user.tokenizer)) {
return power_user.tokenizer; return power_user.tokenizer;
} }

View File

@@ -33,18 +33,22 @@ export const tokenizers = {
BEST_MATCH: 99, BEST_MATCH: 99,
}; };
export const SENTENCEPIECE_TOKENIZERS = [ // A list of local tokenizers that support encoding and decoding token ids.
export const ENCODE_TOKENIZERS = [
tokenizers.LLAMA, tokenizers.LLAMA,
tokenizers.MISTRAL, tokenizers.MISTRAL,
tokenizers.YI, tokenizers.YI,
tokenizers.LLAMA3, tokenizers.LLAMA3,
tokenizers.GEMMA, tokenizers.GEMMA,
tokenizers.JAMBA, tokenizers.JAMBA,
tokenizers.QWEN2,
tokenizers.COMMAND_R,
// uncomment when NovelAI releases Kayra and Clio weights, lol // uncomment when NovelAI releases Kayra and Clio weights, lol
//tokenizers.NERD, //tokenizers.NERD,
//tokenizers.NERD2, //tokenizers.NERD2,
]; ];
// A list of Text Completion sources that support remote tokenization.
export const TEXTGEN_TOKENIZERS = [OOBA, TABBY, KOBOLDCPP, LLAMACPP, VLLM, APHRODITE]; export const TEXTGEN_TOKENIZERS = [OOBA, TABBY, KOBOLDCPP, LLAMACPP, VLLM, APHRODITE];
const TOKENIZER_URLS = { const TOKENIZER_URLS = {

View File

@@ -1908,13 +1908,13 @@ export function select2ChoiceClickSubscribe(control, action, { buttonStyle = fal
* @returns {string} The html representation of the highlighted regex * @returns {string} The html representation of the highlighted regex
*/ */
export function highlightRegex(regexStr) { export function highlightRegex(regexStr) {
// Function to escape HTML special characters for safety // Function to escape special characters for safety or readability
const escapeHtml = (str) => str.replace(/[&<>"']/g, match => ({ const escape = (str) => str.replace(/[&<>"'\x01]/g, match => ({
'&': '&amp;', '<': '&lt;', '>': '&gt;', '"': '&quot;', '\'': '&#39;', '&': '&amp;', '<': '&lt;', '>': '&gt;', '"': '&quot;', '\'': '&#39;', '\x01': '\\x01',
})[match]); })[match]);
// Replace special characters with their HTML-escaped forms // Replace special characters with their escaped forms
regexStr = escapeHtml(regexStr); regexStr = escape(regexStr);
// Patterns that we want to highlight only if they are not escaped // Patterns that we want to highlight only if they are not escaped
function getPatterns() { function getPatterns() {

View File

@@ -3518,6 +3518,14 @@ grammarly-extension {
column-gap: 20px; column-gap: 20px;
} }
.horde-model-description {
-webkit-line-clamp: 3;
line-clamp: 3;
font-size: 0.9em;
overflow: hidden;
text-overflow: ellipsis;
}
.drag-handle { .drag-handle {
cursor: grab; cursor: grab;
/* Make the drag handle not selectable in most browsers */ /* Make the drag handle not selectable in most browsers */

View File

@@ -5,7 +5,6 @@ const PUBLIC_DIRECTORIES = {
extensions: 'public/scripts/extensions', extensions: 'public/scripts/extensions',
}; };
const DEFAULT_AVATAR = '/img/ai4.png';
const SETTINGS_FILE = 'settings.json'; const SETTINGS_FILE = 'settings.json';
/** /**
@@ -423,7 +422,6 @@ const VLLM_KEYS = [
module.exports = { module.exports = {
DEFAULT_USER, DEFAULT_USER,
DEFAULT_AVATAR,
SETTINGS_FILE, SETTINGS_FILE,
PUBLIC_DIRECTORIES, PUBLIC_DIRECTORIES,
USER_DIRECTORY_TEMPLATE, USER_DIRECTORY_TEMPLATE,

View File

@@ -726,13 +726,12 @@ router.post('/create', urlencodedParser, async function (request, response) {
const char = JSON.stringify(charaFormatData(request.body, request.user.directories)); const char = JSON.stringify(charaFormatData(request.body, request.user.directories));
const internalName = getPngName(request.body.ch_name, request.user.directories); const internalName = getPngName(request.body.ch_name, request.user.directories);
const avatarName = `${internalName}.png`; const avatarName = `${internalName}.png`;
const defaultAvatar = './public/img/ai4.png';
const chatsPath = path.join(request.user.directories.chats, internalName); const chatsPath = path.join(request.user.directories.chats, internalName);
if (!fs.existsSync(chatsPath)) fs.mkdirSync(chatsPath); if (!fs.existsSync(chatsPath)) fs.mkdirSync(chatsPath);
if (!request.file) { if (!request.file) {
await writeCharacterData(defaultAvatar, char, internalName, request); await writeCharacterData(defaultAvatarPath, char, internalName, request);
return response.send(avatarName); return response.send(avatarName);
} else { } else {
const crop = tryParse(request.query.crop); const crop = tryParse(request.query.crop);

View File

@@ -4,6 +4,7 @@ const readline = require('readline');
const express = require('express'); const express = require('express');
const sanitize = require('sanitize-filename'); const sanitize = require('sanitize-filename');
const writeFileAtomicSync = require('write-file-atomic').sync; const writeFileAtomicSync = require('write-file-atomic').sync;
const _ = require('lodash');
const { jsonParser, urlencodedParser } = require('../express-common'); const { jsonParser, urlencodedParser } = require('../express-common');
const { getConfigValue, humanizedISO8601DateTime, tryParse, generateTimestamp, removeOldBackups } = require('../util'); const { getConfigValue, humanizedISO8601DateTime, tryParse, generateTimestamp, removeOldBackups } = require('../util');
@@ -34,6 +35,27 @@ function backupChat(directory, name, chat) {
} }
} }
const backupFunctions = new Map();
/**
* Gets a backup function for a user.
* @param {string} handle User handle
* @returns {function(string, string, string): void} Backup function
*/
function getBackupFunction(handle) {
const throttleInterval = getConfigValue('chatBackupThrottleInterval', 10_000);
if (!backupFunctions.has(handle)) {
backupFunctions.set(handle, _.throttle(backupChat, throttleInterval, { leading: true, trailing: true }));
}
return backupFunctions.get(handle);
}
process.on('exit', () => {
for (const func of backupFunctions.values()) {
func.flush();
}
});
/** /**
* Imports a chat from Ooba's format. * Imports a chat from Ooba's format.
* @param {string} userName User name * @param {string} userName User name
@@ -147,7 +169,7 @@ router.post('/save', jsonParser, function (request, response) {
const fileName = `${String(request.body.file_name)}.jsonl`; const fileName = `${String(request.body.file_name)}.jsonl`;
const filePath = path.join(request.user.directories.chats, directoryName, sanitize(fileName)); const filePath = path.join(request.user.directories.chats, directoryName, sanitize(fileName));
writeFileAtomicSync(filePath, jsonlData, 'utf8'); writeFileAtomicSync(filePath, jsonlData, 'utf8');
backupChat(request.user.directories.backups, directoryName, jsonlData); getBackupFunction(request.user.profile.handle)(request.user.directories.backups, directoryName, jsonlData);
return response.send({ result: 'ok' }); return response.send({ result: 'ok' });
} catch (error) { } catch (error) {
response.send(error); response.send(error);
@@ -446,7 +468,7 @@ router.post('/group/save', jsonParser, (request, response) => {
let chat_data = request.body.chat; let chat_data = request.body.chat;
let jsonlData = chat_data.map(JSON.stringify).join('\n'); let jsonlData = chat_data.map(JSON.stringify).join('\n');
writeFileAtomicSync(pathToFile, jsonlData, 'utf8'); writeFileAtomicSync(pathToFile, jsonlData, 'utf8');
backupChat(request.user.directories.backups, String(id), jsonlData); getBackupFunction(request.user.profile.handle)(request.user.directories.backups, String(id), jsonlData);
return response.send({ ok: true }); return response.send({ ok: true });
}); });

View File

@@ -6,6 +6,7 @@ const { readSecret, SECRET_KEYS } = require('./secrets');
const { jsonParser } = require('../express-common'); const { jsonParser } = require('../express-common');
const ANONYMOUS_KEY = '0000000000'; const ANONYMOUS_KEY = '0000000000';
const HORDE_TEXT_MODEL_METADATA_URL = 'https://raw.githubusercontent.com/db0/AI-Horde-text-model-reference/main/db.json';
const cache = new Cache(60 * 1000); const cache = new Cache(60 * 1000);
const router = express.Router(); const router = express.Router();
@@ -23,10 +24,9 @@ async function getClientAgent() {
* @returns {Promise<AIHorde>} AIHorde client * @returns {Promise<AIHorde>} AIHorde client
*/ */
async function getHordeClient() { async function getHordeClient() {
const ai_horde = new AIHorde({ return new AIHorde({
client_agent: await getClientAgent(), client_agent: await getClientAgent(),
}); });
return ai_horde;
} }
/** /**
@@ -79,10 +79,24 @@ router.post('/text-workers', jsonParser, async (request, response) => {
} }
}); });
async function getHordeTextModelMetadata() {
const response = await fetch(HORDE_TEXT_MODEL_METADATA_URL);
return await response.json();
}
async function mergeModelsAndMetadata(models, metadata) {
return models.map(model => {
const metadataModel = metadata[model.name];
if (!metadataModel) {
return { ...model, is_whitelisted: false };
}
return { ...model, ...metadataModel, is_whitelisted: true };
});
}
router.post('/text-models', jsonParser, async (request, response) => { router.post('/text-models', jsonParser, async (request, response) => {
try { try {
const cachedModels = cache.get('models'); const cachedModels = cache.get('models');
if (cachedModels && !request.body.force) { if (cachedModels && !request.body.force) {
return response.send(cachedModels); return response.send(cachedModels);
} }
@@ -94,7 +108,17 @@ router.post('/text-models', jsonParser, async (request, response) => {
}, },
}); });
const data = await fetchResult.json(); let data = await fetchResult.json();
// attempt to fetch and merge models metadata
try {
const metadata = await getHordeTextModelMetadata();
data = await mergeModelsAndMetadata(data, metadata);
}
catch (error) {
console.error('Failed to fetch metadata:', error);
}
cache.set('models', data); cache.set('models', data);
return response.send(data); return response.send(data);
} catch (error) { } catch (error) {
@@ -310,6 +334,7 @@ router.post('/generate-image', jsonParser, async (request, response) => {
console.log('Stable Horde request:', request.body); console.log('Stable Horde request:', request.body);
const ai_horde = await getHordeClient(); const ai_horde = await getHordeClient();
// noinspection JSCheckFunctionSignatures -- see @ts-ignore - use_gfpgan
const generation = await ai_horde.postAsyncImageGenerate( const generation = await ai_horde.postAsyncImageGenerate(
{ {
prompt: `${request.body.prompt} ### ${request.body.negative_prompt}`, prompt: `${request.body.prompt} ### ${request.body.negative_prompt}`,

View File

@@ -51,6 +51,10 @@ router.post('/caption-image', jsonParser, async (request, response) => {
key = readSecret(request.user.directories, SECRET_KEYS.ZEROONEAI); key = readSecret(request.user.directories, SECRET_KEYS.ZEROONEAI);
} }
if (request.body.api === 'mistral') {
key = readSecret(request.user.directories, SECRET_KEYS.MISTRALAI);
}
if (!key && !request.body.reverse_proxy && ['custom', 'ooba', 'koboldcpp', 'vllm'].includes(request.body.api) === false) { if (!key && !request.body.reverse_proxy && ['custom', 'ooba', 'koboldcpp', 'vllm'].includes(request.body.api) === false) {
console.log('No key found for API', request.body.api); console.log('No key found for API', request.body.api);
return response.sendStatus(400); return response.sendStatus(400);
@@ -107,6 +111,10 @@ router.post('/caption-image', jsonParser, async (request, response) => {
apiUrl = 'https://api.01.ai/v1/chat/completions'; apiUrl = 'https://api.01.ai/v1/chat/completions';
} }
if (request.body.api === 'mistral') {
apiUrl = 'https://api.mistral.ai/v1/chat/completions';
}
if (request.body.api === 'ooba') { if (request.body.api === 'ooba') {
apiUrl = `${trimV1(request.body.server_url)}/v1/chat/completions`; apiUrl = `${trimV1(request.body.server_url)}/v1/chat/completions`;
const imgMessage = body.messages.pop(); const imgMessage = body.messages.pop();

View File

@@ -11,7 +11,7 @@ const mime = require('mime-types');
const archiver = require('archiver'); const archiver = require('archiver');
const writeFileAtomicSync = require('write-file-atomic').sync; const writeFileAtomicSync = require('write-file-atomic').sync;
const { USER_DIRECTORY_TEMPLATE, DEFAULT_USER, PUBLIC_DIRECTORIES, DEFAULT_AVATAR, SETTINGS_FILE } = require('./constants'); const { USER_DIRECTORY_TEMPLATE, DEFAULT_USER, PUBLIC_DIRECTORIES, SETTINGS_FILE } = require('./constants');
const { getConfigValue, color, delay, setConfigValue, generateTimestamp } = require('./util'); const { getConfigValue, color, delay, setConfigValue, generateTimestamp } = require('./util');
const { readSecret, writeSecret } = require('./endpoints/secrets'); const { readSecret, writeSecret } = require('./endpoints/secrets');
@@ -25,6 +25,7 @@ const ANON_CSRF_SECRET = crypto.randomBytes(64).toString('base64');
* @type {Map<string, UserDirectoryList>} * @type {Map<string, UserDirectoryList>}
*/ */
const DIRECTORIES_CACHE = new Map(); const DIRECTORIES_CACHE = new Map();
const PUBLIC_USER_AVATAR = '/img/default-user.png';
const STORAGE_KEYS = { const STORAGE_KEYS = {
csrfSecret: 'csrfSecret', csrfSecret: 'csrfSecret',
@@ -510,11 +511,11 @@ async function getUserAvatar(handle) {
const settings = fs.existsSync(pathToSettings) ? JSON.parse(fs.readFileSync(pathToSettings, 'utf8')) : {}; const settings = fs.existsSync(pathToSettings) ? JSON.parse(fs.readFileSync(pathToSettings, 'utf8')) : {};
const avatarFile = settings?.power_user?.default_persona || settings?.user_avatar; const avatarFile = settings?.power_user?.default_persona || settings?.user_avatar;
if (!avatarFile) { if (!avatarFile) {
return DEFAULT_AVATAR; return PUBLIC_USER_AVATAR;
} }
const avatarPath = path.join(directory.avatars, avatarFile); const avatarPath = path.join(directory.avatars, avatarFile);
if (!fs.existsSync(avatarPath)) { if (!fs.existsSync(avatarPath)) {
return DEFAULT_AVATAR; return PUBLIC_USER_AVATAR;
} }
const mimeType = mime.lookup(avatarPath); const mimeType = mime.lookup(avatarPath);
const base64Content = fs.readFileSync(avatarPath, 'base64'); const base64Content = fs.readFileSync(avatarPath, 'base64');
@@ -522,7 +523,7 @@ async function getUserAvatar(handle) {
} }
catch { catch {
// Ignore errors // Ignore errors
return DEFAULT_AVATAR; return PUBLIC_USER_AVATAR;
} }
} }