Merge branch 'staging' into neo-server
This commit is contained in:
commit
16785ae005
|
@ -6357,14 +6357,20 @@ async function saveSettings(type) {
|
|||
});
|
||||
}
|
||||
|
||||
export function setGenerationParamsFromPreset(preset) {
|
||||
export function setGenerationParamsFromPreset(preset, isMancerChange = null) {
|
||||
const needsUnlock = (preset.max_length ?? max_context) > MAX_CONTEXT_DEFAULT || (preset.genamt ?? amount_gen) > MAX_RESPONSE_DEFAULT;
|
||||
$('#max_context_unlocked').prop('checked', needsUnlock).trigger('change');
|
||||
|
||||
if (preset.genamt !== undefined) {
|
||||
amount_gen = preset.genamt;
|
||||
$('#amount_gen').val(amount_gen);
|
||||
$('#amount_gen_counter').val(amount_gen);
|
||||
if (isMancerChange) {
|
||||
$('#amount_gen').attr('max', amount_gen);
|
||||
$('#amount_gen_counter').val($('#amount_gen').val());
|
||||
}
|
||||
else {
|
||||
$('#amount_gen').val(amount_gen);
|
||||
$('#amount_gen_counter').val(amount_gen);
|
||||
}
|
||||
}
|
||||
|
||||
if (preset.max_length !== undefined) {
|
||||
|
|
|
@ -1,10 +1,11 @@
|
|||
import { getBase64Async, saveBase64AsFile } from '../../utils.js';
|
||||
import { getBase64Async, isTrueBoolean, saveBase64AsFile } from '../../utils.js';
|
||||
import { getContext, getApiUrl, doExtrasFetch, extension_settings, modules } from '../../extensions.js';
|
||||
import { callPopup, getRequestHeaders, saveSettingsDebounced, substituteParams } from '../../../script.js';
|
||||
import { getMessageTimeStamp } from '../../RossAscends-mods.js';
|
||||
import { SECRET_KEYS, secret_state } from '../../secrets.js';
|
||||
import { getMultimodalCaption } from '../shared.js';
|
||||
import { textgen_types, textgenerationwebui_settings } from '../../textgen-settings.js';
|
||||
import { registerSlashCommand } from '../../slash-commands.js';
|
||||
export { MODULE_NAME };
|
||||
|
||||
const MODULE_NAME = 'caption';
|
||||
|
@ -124,9 +125,10 @@ async function sendCaptionedMessage(caption, image) {
|
|||
* Generates a caption for an image using a selected source.
|
||||
* @param {string} base64Img Base64 encoded image without the data:image/...;base64, prefix
|
||||
* @param {string} fileData Base64 encoded image with the data:image/...;base64, prefix
|
||||
* @param {string} externalPrompt Caption prompt
|
||||
* @returns {Promise<{caption: string}>} Generated caption
|
||||
*/
|
||||
async function doCaptionRequest(base64Img, fileData) {
|
||||
async function doCaptionRequest(base64Img, fileData, externalPrompt) {
|
||||
switch (extension_settings.caption.source) {
|
||||
case 'local':
|
||||
return await captionLocal(base64Img);
|
||||
|
@ -135,7 +137,7 @@ async function doCaptionRequest(base64Img, fileData) {
|
|||
case 'horde':
|
||||
return await captionHorde(base64Img);
|
||||
case 'multimodal':
|
||||
return await captionMultimodal(fileData);
|
||||
return await captionMultimodal(fileData, externalPrompt);
|
||||
default:
|
||||
throw new Error('Unknown caption source.');
|
||||
}
|
||||
|
@ -214,12 +216,13 @@ async function captionHorde(base64Img) {
|
|||
/**
|
||||
* Generates a caption for an image using a multimodal model.
|
||||
* @param {string} base64Img Base64 encoded image with the data:image/...;base64, prefix
|
||||
* @param {string} externalPrompt Caption prompt
|
||||
* @returns {Promise<{caption: string}>} Generated caption
|
||||
*/
|
||||
async function captionMultimodal(base64Img) {
|
||||
let prompt = extension_settings.caption.prompt || PROMPT_DEFAULT;
|
||||
async function captionMultimodal(base64Img, externalPrompt) {
|
||||
let prompt = externalPrompt || extension_settings.caption.prompt || PROMPT_DEFAULT;
|
||||
|
||||
if (extension_settings.caption.prompt_ask) {
|
||||
if (!externalPrompt && extension_settings.caption.prompt_ask) {
|
||||
const customPrompt = await callPopup('<h3>Enter a comment or question:</h3>', 'input', prompt, { rows: 2 });
|
||||
if (!customPrompt) {
|
||||
throw new Error('User aborted the caption sending.');
|
||||
|
@ -231,29 +234,46 @@ async function captionMultimodal(base64Img) {
|
|||
return { caption };
|
||||
}
|
||||
|
||||
async function onSelectImage(e) {
|
||||
setSpinnerIcon();
|
||||
/**
|
||||
* Handles the image selection event.
|
||||
* @param {Event} e Input event
|
||||
* @param {string} prompt Caption prompt
|
||||
* @param {boolean} quiet Suppresses sending a message
|
||||
* @returns {Promise<string>} Generated caption
|
||||
*/
|
||||
async function onSelectImage(e, prompt, quiet) {
|
||||
if (!(e.target instanceof HTMLInputElement)) {
|
||||
return '';
|
||||
}
|
||||
|
||||
const file = e.target.files[0];
|
||||
const form = e.target.form;
|
||||
|
||||
if (!file || !(file instanceof File)) {
|
||||
return;
|
||||
form && form.reset();
|
||||
return '';
|
||||
}
|
||||
|
||||
try {
|
||||
setSpinnerIcon();
|
||||
const context = getContext();
|
||||
const fileData = await getBase64Async(file);
|
||||
const base64Format = fileData.split(',')[0].split(';')[0].split('/')[1];
|
||||
const base64Data = fileData.split(',')[1];
|
||||
const { caption } = await doCaptionRequest(base64Data, fileData);
|
||||
const imagePath = await saveBase64AsFile(base64Data, context.name2, '', base64Format);
|
||||
await sendCaptionedMessage(caption, imagePath);
|
||||
const { caption } = await doCaptionRequest(base64Data, fileData, prompt);
|
||||
if (!quiet) {
|
||||
const imagePath = await saveBase64AsFile(base64Data, context.name2, '', base64Format);
|
||||
await sendCaptionedMessage(caption, imagePath);
|
||||
}
|
||||
return caption;
|
||||
}
|
||||
catch (error) {
|
||||
toastr.error('Failed to caption image.');
|
||||
console.log(error);
|
||||
return '';
|
||||
}
|
||||
finally {
|
||||
e.target.form.reset();
|
||||
form && form.reset();
|
||||
setImageIcon();
|
||||
}
|
||||
}
|
||||
|
@ -263,6 +283,26 @@ function onRefineModeInput() {
|
|||
saveSettingsDebounced();
|
||||
}
|
||||
|
||||
/**
|
||||
* Callback for the /caption command.
|
||||
* @param {object} args Named parameters
|
||||
* @param {string} prompt Caption prompt
|
||||
*/
|
||||
function captionCommandCallback(args, prompt) {
|
||||
return new Promise(resolve => {
|
||||
const quiet = isTrueBoolean(args?.quiet);
|
||||
const input = document.createElement('input');
|
||||
input.type = 'file';
|
||||
input.accept = 'image/*';
|
||||
input.onchange = async (e) => {
|
||||
const caption = await onSelectImage(e, prompt, quiet);
|
||||
resolve(caption);
|
||||
};
|
||||
input.oncancel = () => resolve('');
|
||||
input.click();
|
||||
});
|
||||
}
|
||||
|
||||
jQuery(function () {
|
||||
function addSendPictureButton() {
|
||||
const sendButton = $(`
|
||||
|
@ -302,7 +342,7 @@ jQuery(function () {
|
|||
$(imgForm).append(inputHtml);
|
||||
$(imgForm).hide();
|
||||
$('#form_sheld').append(imgForm);
|
||||
$('#img_file').on('change', onSelectImage);
|
||||
$('#img_file').on('change', (e) => onSelectImage(e.originalEvent, '', false));
|
||||
}
|
||||
function switchMultimodalBlocks() {
|
||||
const isMultimodal = extension_settings.caption.source === 'multimodal';
|
||||
|
@ -451,4 +491,6 @@ jQuery(function () {
|
|||
extension_settings.caption.prompt_ask = $('#caption_prompt_ask').prop('checked');
|
||||
saveSettingsDebounced();
|
||||
});
|
||||
|
||||
registerSlashCommand('caption', captionCommandCallback, [], '<span class="monospace">quiet=true/false [prompt]</span> - caption an image with an optional prompt and passes the caption down the pipe. Only multimodal sources support custom prompts. Set the "quiet" argument to true to suppress sending a captioned message, default: false.', true, true);
|
||||
});
|
||||
|
|
|
@ -1,9 +1,28 @@
|
|||
import { eventSource, event_types, extension_prompt_roles, extension_prompt_types, getCurrentChatId, getRequestHeaders, is_send_press, saveSettingsDebounced, setExtensionPrompt, substituteParams } from '../../../script.js';
|
||||
import { getDataBankAttachments, getFileAttachment } from '../../chats.js';
|
||||
import { ModuleWorkerWrapper, extension_settings, getContext, modules, renderExtensionTemplateAsync } from '../../extensions.js';
|
||||
import {
|
||||
eventSource,
|
||||
event_types,
|
||||
extension_prompt_types,
|
||||
extension_prompt_roles,
|
||||
getCurrentChatId,
|
||||
getRequestHeaders,
|
||||
is_send_press,
|
||||
saveSettingsDebounced,
|
||||
setExtensionPrompt,
|
||||
substituteParams,
|
||||
generateRaw,
|
||||
} from '../../../script.js';
|
||||
import {
|
||||
ModuleWorkerWrapper,
|
||||
extension_settings,
|
||||
getContext,
|
||||
modules,
|
||||
renderExtensionTemplateAsync,
|
||||
doExtrasFetch, getApiUrl,
|
||||
} from '../../extensions.js';
|
||||
import { collapseNewlines } from '../../power-user.js';
|
||||
import { SECRET_KEYS, secret_state, writeSecret } from '../../secrets.js';
|
||||
import { debounce, getStringHash as calculateHash, waitUntilCondition, onlyUnique, splitRecursive, getFileText } from '../../utils.js';
|
||||
import { getDataBankAttachments, getFileAttachment } from '../../chats.js';
|
||||
import { debounce, getStringHash as calculateHash, waitUntilCondition, onlyUnique, splitRecursive } from '../../utils.js';
|
||||
|
||||
const MODULE_NAME = 'vectors';
|
||||
|
||||
|
@ -16,6 +35,10 @@ const settings = {
|
|||
include_wi: false,
|
||||
togetherai_model: 'togethercomputer/m2-bert-80M-32k-retrieval',
|
||||
openai_model: 'text-embedding-ada-002',
|
||||
summarize: false,
|
||||
summarize_sent: false,
|
||||
summary_source: 'main',
|
||||
summary_prompt: 'Pause your roleplay. Summarize the most important parts of the message. Limit yourself to 250 words or less. Your response should include nothing but the summary.',
|
||||
|
||||
// For chats
|
||||
enabled_chats: false,
|
||||
|
@ -124,6 +147,56 @@ function splitByChunks(items) {
|
|||
return chunkedItems;
|
||||
}
|
||||
|
||||
async function summarizeExtra(hashedMessages) {
|
||||
for (const element of hashedMessages) {
|
||||
try {
|
||||
const url = new URL(getApiUrl());
|
||||
url.pathname = '/api/summarize';
|
||||
|
||||
const apiResult = await doExtrasFetch(url, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
'Bypass-Tunnel-Reminder': 'bypass',
|
||||
},
|
||||
body: JSON.stringify({
|
||||
text: element.text,
|
||||
params: {},
|
||||
}),
|
||||
});
|
||||
|
||||
if (apiResult.ok) {
|
||||
const data = await apiResult.json();
|
||||
element.text = data.summary;
|
||||
}
|
||||
}
|
||||
catch (error) {
|
||||
console.log(error);
|
||||
}
|
||||
}
|
||||
|
||||
return hashedMessages;
|
||||
}
|
||||
|
||||
async function summarizeMain(hashedMessages) {
|
||||
for (const element of hashedMessages) {
|
||||
element.text = await generateRaw(element.text, '', false, false, settings.summary_prompt);
|
||||
}
|
||||
|
||||
return hashedMessages;
|
||||
}
|
||||
|
||||
async function summarize(hashedMessages, endpoint = 'main') {
|
||||
switch (endpoint) {
|
||||
case 'main':
|
||||
return await summarizeMain(hashedMessages);
|
||||
case 'extras':
|
||||
return await summarizeExtra(hashedMessages);
|
||||
default:
|
||||
console.error('Unsupported endpoint', endpoint);
|
||||
}
|
||||
}
|
||||
|
||||
async function synchronizeChat(batchSize = 5) {
|
||||
if (!settings.enabled_chats) {
|
||||
return -1;
|
||||
|
@ -146,14 +219,20 @@ async function synchronizeChat(batchSize = 5) {
|
|||
return -1;
|
||||
}
|
||||
|
||||
const hashedMessages = context.chat.filter(x => !x.is_system).map(x => ({ text: String(x.mes), hash: getStringHash(x.mes), index: context.chat.indexOf(x) }));
|
||||
let hashedMessages = context.chat.filter(x => !x.is_system).map(x => ({ text: String(substituteParams(x.mes)), hash: getStringHash(substituteParams(x.mes)), index: context.chat.indexOf(x) }));
|
||||
const hashesInCollection = await getSavedHashes(chatId);
|
||||
|
||||
if (settings.summarize) {
|
||||
hashedMessages = await summarize(hashedMessages, settings.summary_source);
|
||||
}
|
||||
|
||||
const newVectorItems = hashedMessages.filter(x => !hashesInCollection.includes(x.hash));
|
||||
const deletedHashes = hashesInCollection.filter(x => !hashedMessages.some(y => y.hash === x));
|
||||
|
||||
|
||||
if (newVectorItems.length > 0) {
|
||||
const chunkedBatch = splitByChunks(newVectorItems.slice(0, batchSize));
|
||||
|
||||
console.log(`Vectors: Found ${newVectorItems.length} new items. Processing ${batchSize}...`);
|
||||
await insertVectorItems(chatId, chunkedBatch);
|
||||
}
|
||||
|
@ -249,7 +328,7 @@ async function processFiles(chat) {
|
|||
}
|
||||
|
||||
if (dataBankCollectionIds.length) {
|
||||
const queryText = getQueryText(chat);
|
||||
const queryText = await getQueryText(chat);
|
||||
await injectDataBankChunks(queryText, dataBankCollectionIds);
|
||||
}
|
||||
|
||||
|
@ -282,7 +361,7 @@ async function processFiles(chat) {
|
|||
await vectorizeFile(fileText, fileName, collectionId, settings.chunk_size);
|
||||
}
|
||||
|
||||
const queryText = getQueryText(chat);
|
||||
const queryText = await getQueryText(chat);
|
||||
const fileChunks = await retrieveFileChunks(queryText, collectionId);
|
||||
|
||||
message.mes = `${fileChunks}\n\n${message.mes}`;
|
||||
|
@ -390,7 +469,7 @@ async function rearrangeChat(chat) {
|
|||
return;
|
||||
}
|
||||
|
||||
const queryText = getQueryText(chat);
|
||||
const queryText = await getQueryText(chat);
|
||||
|
||||
if (queryText.length === 0) {
|
||||
console.debug('Vectors: No text to query');
|
||||
|
@ -408,7 +487,7 @@ async function rearrangeChat(chat) {
|
|||
if (retainMessages.includes(message) || !message.mes) {
|
||||
continue;
|
||||
}
|
||||
const hash = getStringHash(message.mes);
|
||||
const hash = getStringHash(substituteParams(message.mes));
|
||||
if (queryHashes.includes(hash) && !insertedHashes.has(hash)) {
|
||||
queriedMessages.push(message);
|
||||
insertedHashes.add(hash);
|
||||
|
@ -417,7 +496,7 @@ async function rearrangeChat(chat) {
|
|||
|
||||
// Rearrange queried messages to match query order
|
||||
// Order is reversed because more relevant are at the lower indices
|
||||
queriedMessages.sort((a, b) => queryHashes.indexOf(getStringHash(b.mes)) - queryHashes.indexOf(getStringHash(a.mes)));
|
||||
queriedMessages.sort((a, b) => queryHashes.indexOf(getStringHash(substituteParams(b.mes))) - queryHashes.indexOf(getStringHash(substituteParams(a.mes))));
|
||||
|
||||
// Remove queried messages from the original chat array
|
||||
for (const message of chat) {
|
||||
|
@ -456,15 +535,21 @@ const onChatEvent = debounce(async () => await moduleWorker.update(), 500);
|
|||
/**
|
||||
* Gets the text to query from the chat
|
||||
* @param {object[]} chat Chat messages
|
||||
* @returns {string} Text to query
|
||||
* @returns {Promise<string>} Text to query
|
||||
*/
|
||||
function getQueryText(chat) {
|
||||
async function getQueryText(chat) {
|
||||
let queryText = '';
|
||||
let i = 0;
|
||||
|
||||
for (const message of chat.slice().reverse()) {
|
||||
if (message.mes) {
|
||||
queryText += message.mes + '\n';
|
||||
let hashedMessages = chat.map(x => ({ text: String(substituteParams(x.mes)) }));
|
||||
|
||||
if (settings.summarize && settings.summarize_sent) {
|
||||
hashedMessages = await summarize(hashedMessages, settings.summary_source);
|
||||
}
|
||||
|
||||
for (const message of hashedMessages.slice().reverse()) {
|
||||
if (message.text) {
|
||||
queryText += message.text + '\n';
|
||||
i++;
|
||||
}
|
||||
|
||||
|
@ -733,7 +818,7 @@ async function onViewStatsClick() {
|
|||
|
||||
const chat = getContext().chat;
|
||||
for (const message of chat) {
|
||||
if (hashesInCollection.includes(getStringHash(message.mes))) {
|
||||
if (hashesInCollection.includes(getStringHash(substituteParams(message.mes)))) {
|
||||
const messageElement = $(`.mes[mesid="${chat.indexOf(message)}"]`);
|
||||
messageElement.addClass('vectorized');
|
||||
}
|
||||
|
@ -854,6 +939,30 @@ jQuery(async () => {
|
|||
saveSettingsDebounced();
|
||||
});
|
||||
|
||||
$('#vectors_summarize').prop('checked', settings.summarize).on('input', () => {
|
||||
settings.summarize = !!$('#vectors_summarize').prop('checked');
|
||||
Object.assign(extension_settings.vectors, settings);
|
||||
saveSettingsDebounced();
|
||||
});
|
||||
|
||||
$('#vectors_summarize_user').prop('checked', settings.summarize_sent).on('input', () => {
|
||||
settings.summarize_sent = !!$('#vectors_summarize_user').prop('checked');
|
||||
Object.assign(extension_settings.vectors, settings);
|
||||
saveSettingsDebounced();
|
||||
});
|
||||
|
||||
$('#vectors_summary_source').val(settings.summary_source).on('change', () => {
|
||||
settings.summary_source = String($('#vectors_summary_source').val());
|
||||
Object.assign(extension_settings.vectors, settings);
|
||||
saveSettingsDebounced();
|
||||
});
|
||||
|
||||
$('#vectors_summary_prompt').val(settings.summary_prompt).on('input', () => {
|
||||
settings.summary_prompt = String($('#vectors_summary_prompt').val());
|
||||
Object.assign(extension_settings.vectors, settings);
|
||||
saveSettingsDebounced();
|
||||
});
|
||||
|
||||
$('#vectors_message_chunk_size').val(settings.message_chunk_size).on('input', () => {
|
||||
settings.message_chunk_size = Number($('#vectors_message_chunk_size').val());
|
||||
Object.assign(extension_settings.vectors, settings);
|
||||
|
|
|
@ -73,10 +73,12 @@
|
|||
<input type="number" id="vectors_query" class="text_pole widthUnset" min="1" max="99" />
|
||||
</div>
|
||||
|
||||
<label class="checkbox_label" for="vectors_include_wi" title="Query results can activate World Info entries.">
|
||||
<input id="vectors_include_wi" type="checkbox" class="checkbox">
|
||||
Include in World Info Scanning
|
||||
</label>
|
||||
<div class="flex-container">
|
||||
<label class="checkbox_label expander" for="vectors_include_wi" title="Query results can activate World Info entries.">
|
||||
<input id="vectors_include_wi" type="checkbox" class="checkbox">
|
||||
Include in World Info Scanning
|
||||
</label>
|
||||
</div>
|
||||
|
||||
<hr>
|
||||
|
||||
|
@ -93,7 +95,7 @@
|
|||
<div class="flex justifyCenter" title="These settings apply to files attached directly to messages.">
|
||||
<span>Message attachments</span>
|
||||
</div>
|
||||
<div class="flex-container marginBot5">
|
||||
<div class="flex-container">
|
||||
<div class="flex1" title="Only files past this size will be vectorized.">
|
||||
<label for="vectors_size_threshold">
|
||||
<small>Size threshold (KB)</small>
|
||||
|
@ -177,6 +179,8 @@
|
|||
Enabled for chat messages
|
||||
</label>
|
||||
|
||||
<hr>
|
||||
|
||||
<div id="vectors_chats_settings">
|
||||
<div id="vectors_advanced_settings">
|
||||
<label for="vectors_template">
|
||||
|
@ -219,6 +223,34 @@
|
|||
<input type="number" id="vectors_insert" class="text_pole widthUnset" min="1" max="9999" />
|
||||
</div>
|
||||
</div>
|
||||
<hr class="m-b-1">
|
||||
<div class="flex-container flexFlowColumn">
|
||||
<div class="flex-container alignitemscenter justifyCenter">
|
||||
<i class="fa-solid fa-flask" title="Summarization for vectors is an experimental feature that may improve vectors or may worsen them. Use at your own discretion."></i>
|
||||
<span>Vector Summarization</span>
|
||||
</div>
|
||||
<label class="checkbox_label expander" for="vectors_summarize" title="Summarize chat messages before generating embeddings.">
|
||||
<input id="vectors_summarize" type="checkbox" class="checkbox">
|
||||
Summarize chat messages for vector generation
|
||||
</label>
|
||||
<i class="failure">Warning: This will slow down vector generation drastically, as all messages have to be summarized first.</i>
|
||||
|
||||
<label class="checkbox_label expander" for="vectors_summarize_user" title="Summarize sent chat messages before generating embeddings.">
|
||||
<input id="vectors_summarize_user" type="checkbox" class="checkbox">
|
||||
Summarize chat messages when sending
|
||||
</label>
|
||||
<i class="failure">Warning: This might cause your sent messages to take a bit to process and slow down response time.</i>
|
||||
|
||||
<label for="vectors_summary_source">Summarize with:</label>
|
||||
<select id="vectors_summary_source" class="text_pole">
|
||||
<option value="main">Main API</option>
|
||||
<option value="extras">Extras API</option>
|
||||
</select>
|
||||
|
||||
<label for="vectors_summary_prompt">Summary Prompt:</label>
|
||||
<small>Only used when Main API is selected.</small>
|
||||
<textarea id="vectors_summary_prompt" class="text_pole textarea_compact" rows="6" placeholder="This prompt will be sent to AI to request the summary generation."></textarea>
|
||||
</div>
|
||||
</div>
|
||||
<small>
|
||||
Old messages are vectorized gradually as you chat.
|
||||
|
|
|
@ -2376,8 +2376,10 @@ async function doMesCut(_, text) {
|
|||
|
||||
let totalMesToCut = (range.end - range.start) + 1;
|
||||
let mesIDToCut = range.start;
|
||||
let cutText = '';
|
||||
|
||||
for (let i = 0; i < totalMesToCut; i++) {
|
||||
cutText += (chat[mesIDToCut]?.mes || '') + '\n';
|
||||
let done = false;
|
||||
let mesToCut = $('#chat').find(`.mes[mesid=${mesIDToCut}]`);
|
||||
|
||||
|
@ -2398,6 +2400,8 @@ async function doMesCut(_, text) {
|
|||
await delay(1);
|
||||
}
|
||||
}
|
||||
|
||||
return cutText;
|
||||
}
|
||||
|
||||
async function doDelMode(_, text) {
|
||||
|
@ -3520,7 +3524,7 @@ $(document).ready(() => {
|
|||
registerSlashCommand('newchat', doNewChat, [], '– start a new chat with current character', true, true);
|
||||
registerSlashCommand('random', doRandomChat, [], '– start a new chat with a random character', true, true);
|
||||
registerSlashCommand('delmode', doDelMode, ['del'], '<span class="monospace">(optional number)</span> – enter message deletion mode, and auto-deletes last N messages if numeric argument is provided', true, true);
|
||||
registerSlashCommand('cut', doMesCut, [], '<span class="monospace">(number or range)</span> – cuts the specified message or continuous chunk from the chat, e.g. <tt>/cut 0-10</tt>. Ranges are inclusive!', true, true);
|
||||
registerSlashCommand('cut', doMesCut, [], '<span class="monospace">(number or range)</span> – cuts the specified message or continuous chunk from the chat, e.g. <tt>/cut 0-10</tt>. Ranges are inclusive! Returns the text of cut messages separated by a newline.', true, true);
|
||||
registerSlashCommand('resetpanels', doResetPanels, ['resetui'], '– resets UI panels to original state.', true, true);
|
||||
registerSlashCommand('bgcol', setAvgBG, [], '– WIP test of auto-bg avg coloring', true, true);
|
||||
registerSlashCommand('theme', setThemeCallback, [], '<span class="monospace">(name)</span> – sets a UI theme by name', true, true);
|
||||
|
|
|
@ -184,7 +184,7 @@ function onMancerModelSelect() {
|
|||
$('#api_button_textgenerationwebui').trigger('click');
|
||||
|
||||
const limits = mancerModels.find(x => x.id === modelId)?.limits;
|
||||
setGenerationParamsFromPreset({ max_length: limits.context, genamt: limits.completion });
|
||||
setGenerationParamsFromPreset({ max_length: limits.context, genamt: limits.completion }, true);
|
||||
}
|
||||
|
||||
function onTogetherModelSelect() {
|
||||
|
@ -461,7 +461,7 @@ jQuery(function () {
|
|||
searchInputPlaceholder: 'Search models...',
|
||||
searchInputCssClass: 'text_pole',
|
||||
width: '100%',
|
||||
templateResult: getDreamGenModelTemplate,
|
||||
templateResult: getDreamGenModelTemplate,
|
||||
});
|
||||
$('#openrouter_model').select2({
|
||||
placeholder: 'Select a model',
|
||||
|
|
Loading…
Reference in New Issue