Merge branch 'staging' into persona-improvements

This commit is contained in:
Wolfsblvt
2025-01-24 20:42:04 +01:00
53 changed files with 1784 additions and 619 deletions

View File

@@ -11,6 +11,7 @@ import { debounce_timeout } from './constants.js';
import { renderTemplateAsync } from './templates.js';
import { Popup } from './popup.js';
import { t } from './i18n.js';
import { isMobile } from './RossAscends-mods.js';
function debouncePromise(func, delay) {
let timeoutId;
@@ -1562,6 +1563,7 @@ class PromptManager {
listItemHtml += `
<li class="${prefix}prompt_manager_prompt ${draggableClass} ${enabledClass} ${markerClass} ${importantClass}" data-pm-identifier="${escapeHtml(prompt.identifier)}">
<span class="drag-handle">☰</span>
<span class="${prefix}prompt_manager_prompt_name" data-pm-name="${encodedName}">
${isMarkerPrompt ? '<span class="fa-fw fa-solid fa-thumb-tack" title="Marker"></span>' : ''}
${isSystemPrompt ? '<span class="fa-fw fa-solid fa-square-poll-horizontal" title="Global Prompt"></span>' : ''}
@@ -1741,6 +1743,7 @@ class PromptManager {
makeDraggable() {
$(`#${this.configuration.prefix}prompt_manager_list`).sortable({
delay: this.configuration.sortableDelay,
handle: isMobile() ? '.drag-handle' : null,
items: `.${this.configuration.prefix}prompt_manager_prompt_draggable`,
update: (event, ui) => {
const promptOrder = this.getPromptOrderForCharacter(this.activeCharacter);

View File

@@ -59,6 +59,16 @@ const hash_derivations = {
// Tulu-3-8B
// Tulu-3-70B
'Tulu'
,
// DeepSeek V2.5
'54d400beedcd17f464e10063e0577f6f798fa896266a912d8a366f8a2fcc0bca':
'DeepSeek-V2.5'
,
// DeepSeek R1
'b6835114b7303ddd78919a82e4d9f7d8c26ed0d7dfc36beeb12d524f6144eab1':
'DeepSeek-V2.5'
};
const substr_derivations = {

View File

@@ -11,6 +11,7 @@ import {
getCurrentChatId,
getRequestHeaders,
hideSwipeButtons,
name1,
name2,
reloadCurrentChat,
saveChatDebounced,
@@ -21,6 +22,7 @@ import {
chat_metadata,
neutralCharacterName,
updateChatMetadata,
system_message_types,
} from '../script.js';
import { selected_group } from './group-chats.js';
import { power_user } from './power-user.js';
@@ -34,6 +36,7 @@ import {
humanFileSize,
saveBase64AsFile,
extractTextFromOffice,
download,
} from './utils.js';
import { extension_settings, renderExtensionTemplateAsync, saveMetadataDebounced } from './extensions.js';
import { POPUP_RESULT, POPUP_TYPE, Popup, callGenericPopup } from './popup.js';
@@ -41,6 +44,7 @@ import { ScraperManager } from './scrapers.js';
import { DragAndDropHandler } from './dragdrop.js';
import { renderTemplateAsync } from './templates.js';
import { t } from './i18n.js';
import { humanizedDateTime } from './RossAscends-mods.js';
/**
* @typedef {Object} FileAttachment
@@ -1437,6 +1441,19 @@ jQuery(function () {
await viewMessageFile(messageId);
});
$(document).on('click', '.assistant_note_export', async function () {
const chatToSave = [
{
user_name: name1,
character_name: name2,
chat_metadata: chat_metadata,
},
...chat.filter(x => x?.extra?.type !== system_message_types.ASSISTANT_NOTE),
];
download(JSON.stringify(chatToSave, null, 4), `Assistant - ${humanizedDateTime()}.json`, 'application/json');
});
// Do not change. #attachFile is added by extension.
$(document).on('click', '#attachFile', function () {
$('#file_form_input').trigger('click');

View File

@@ -38,7 +38,8 @@ export let modules = [];
let activeExtensions = new Set();
const getApiUrl = () => extension_settings.apiUrl;
const sortManifests = (a, b) => parseInt(a.loading_order) - parseInt(b.loading_order) || String(a.display_name).localeCompare(String(b.display_name));
const sortManifestsByOrder = (a, b) => parseInt(a.loading_order) - parseInt(b.loading_order) || String(a.display_name).localeCompare(String(b.display_name));
const sortManifestsByName = (a, b) => String(a.display_name).localeCompare(String(b.display_name)) || parseInt(a.loading_order) - parseInt(b.loading_order);
let connectedToApi = false;
/**
@@ -355,7 +356,7 @@ async function getManifests(names) {
* @returns {Promise<void>}
*/
async function activateExtensions() {
const extensions = Object.entries(manifests).sort((a, b) => sortManifests(a[1], b[1]));
const extensions = Object.entries(manifests).sort((a, b) => sortManifestsByOrder(a[1], b[1]));
const promises = [];
for (let entry of extensions) {
@@ -712,7 +713,10 @@ async function showExtensionsDetails() {
htmlExternal.append(htmlLoading);
const extensions = Object.entries(manifests).sort((a, b) => sortManifests(a[1], b[1])).map(getExtensionData);
const sortOrderKey = 'extensions_sortByName';
const sortByName = localStorage.getItem(sortOrderKey) === 'true';
const sortFn = sortByName ? sortManifestsByName : sortManifestsByOrder;
const extensions = Object.entries(manifests).sort((a, b) => sortFn(a[1], b[1])).map(getExtensionData);
extensions.forEach(value => {
const { isExternal, extensionHtml } = value;
@@ -729,7 +733,6 @@ async function showExtensionsDetails() {
/** @type {import('./popup.js').CustomPopupButton} */
const updateAllButton = {
text: t`Update all`,
appendAtEnd: true,
action: async () => {
requiresReload = true;
await autoUpdateExtensions(true);
@@ -737,13 +740,23 @@ async function showExtensionsDetails() {
},
};
/** @type {import('./popup.js').CustomPopupButton} */
const sortOrderButton = {
text: sortByName ? t`Sort: Display Name` : t`Sort: Loading Order`,
action: async () => {
abortController.abort();
localStorage.setItem(sortOrderKey, sortByName ? 'false' : 'true');
await showExtensionsDetails();
},
};
let waitingForSave = false;
const popup = new Popup(html, POPUP_TYPE.TEXT, '', {
okButton: t`Close`,
wide: true,
large: true,
customButtons: [updateAllButton],
customButtons: [sortOrderButton, updateAllButton],
allowVerticalScrolling: true,
onClosing: async () => {
if (waitingForSave) {
@@ -762,7 +775,7 @@ async function showExtensionsDetails() {
});
popupPromise = popup.show();
popup.content.scrollTop = initialScrollTop;
checkForUpdatesManual(abortController.signal).finally(() => htmlLoading.remove());
checkForUpdatesManual(sortFn, abortController.signal).finally(() => htmlLoading.remove());
} catch (error) {
toastr.error(t`Error loading extensions. See browser console for details.`);
console.error(error);
@@ -1073,12 +1086,13 @@ function processVersionCheckQueue() {
/**
* Performs a manual check for updates on all 3rd-party extensions.
* @param {function} sortFn Sort function
* @param {AbortSignal} abortSignal Signal to abort the operation
* @returns {Promise<any[]>}
*/
async function checkForUpdatesManual(abortSignal) {
async function checkForUpdatesManual(sortFn, abortSignal) {
const promises = [];
for (const id of Object.keys(manifests).filter(x => x.startsWith('third-party')).sort((a, b) => sortManifests(manifests[a], manifests[b]))) {
for (const id of Object.keys(manifests).filter(x => x.startsWith('third-party')).sort((a, b) => sortFn(manifests[a], manifests[b]))) {
const externalId = id.replace('third-party', '');
const promise = enqueueVersionCheck(async () => {
try {
@@ -1223,7 +1237,7 @@ export async function runGenerationInterceptors(chat, contextSize, type) {
exitImmediately = immediately;
};
for (const manifest of Object.values(manifests).filter(x => x.generate_interceptor).sort((a, b) => sortManifests(a, b))) {
for (const manifest of Object.values(manifests).filter(x => x.generate_interceptor).sort((a, b) => sortManifestsByOrder(a, b))) {
const interceptorKey = manifest.generate_interceptor;
if (typeof globalThis[interceptorKey] === 'function') {
try {

View File

@@ -54,6 +54,8 @@
<option data-type="anthropic" value="claude-3-sonnet-20240229">claude-3-sonnet-20240229</option>
<option data-type="anthropic" value="claude-3-haiku-20240307">claude-3-haiku-20240307</option>
<option data-type="google" value="gemini-2.0-flash-exp">gemini-2.0-flash-exp</option>
<option data-type="google" value="gemini-2.0-flash-thinking-exp">gemini-2.0-flash-thinking-exp</option>
<option data-type="google" value="gemini-2.0-flash-thinking-exp-01-21">gemini-2.0-flash-thinking-exp-01-21</option>
<option data-type="google" value="gemini-2.0-flash-thinking-exp-1219">gemini-2.0-flash-thinking-exp-1219</option>
<option data-type="google" value="gemini-1.5-flash">gemini-1.5-flash</option>
<option data-type="google" value="gemini-1.5-flash-latest">gemini-1.5-flash-latest</option>

View File

@@ -81,6 +81,7 @@ import { t } from './i18n.js';
export {
selected_group,
openGroupId,
is_group_automode_enabled,
hideMutedSprites,
is_group_generating,

View File

@@ -1922,7 +1922,7 @@ async function sendOpenAIRequest(type, messages, signal) {
}
// Proxy is only supported for Claude, OpenAI, Mistral, and Google MakerSuite
if (oai_settings.reverse_proxy && [chat_completion_sources.CLAUDE, chat_completion_sources.OPENAI, chat_completion_sources.MISTRALAI, chat_completion_sources.MAKERSUITE].includes(oai_settings.chat_completion_source)) {
if (oai_settings.reverse_proxy && [chat_completion_sources.CLAUDE, chat_completion_sources.OPENAI, chat_completion_sources.MISTRALAI, chat_completion_sources.MAKERSUITE, chat_completion_sources.DEEPSEEK].includes(oai_settings.chat_completion_source)) {
await validateReverseProxy();
generate_data['reverse_proxy'] = oai_settings.reverse_proxy;
generate_data['proxy_password'] = oai_settings.proxy_password;
@@ -2030,6 +2030,16 @@ async function sendOpenAIRequest(type, messages, signal) {
// https://api-docs.deepseek.com/api/create-chat-completion
if (isDeepSeek) {
generate_data.top_p = generate_data.top_p || Number.EPSILON;
if (generate_data.model.endsWith('-reasoner')) {
delete generate_data.top_p;
delete generate_data.temperature;
delete generate_data.frequency_penalty;
delete generate_data.presence_penalty;
delete generate_data.top_logprobs;
delete generate_data.logprobs;
delete generate_data.logit_bias;
}
}
if ((isOAI || isOpenRouter || isMistral || isCustom || isCohere || isNano) && oai_settings.seed >= 0) {
@@ -2085,6 +2095,7 @@ async function sendOpenAIRequest(type, messages, signal) {
let text = '';
const swipes = [];
const toolCalls = [];
const state = {};
while (true) {
const { done, value } = await reader.read();
if (done) return;
@@ -2095,9 +2106,9 @@ async function sendOpenAIRequest(type, messages, signal) {
if (Array.isArray(parsed?.choices) && parsed?.choices?.[0]?.index > 0) {
const swipeIndex = parsed.choices[0].index - 1;
swipes[swipeIndex] = (swipes[swipeIndex] || '') + getStreamingReply(parsed);
swipes[swipeIndex] = (swipes[swipeIndex] || '') + getStreamingReply(parsed, state);
} else {
text += getStreamingReply(parsed);
text += getStreamingReply(parsed, state);
}
ToolManager.parseToolCalls(toolCalls, parsed);
@@ -2129,14 +2140,27 @@ async function sendOpenAIRequest(type, messages, signal) {
}
}
function getStreamingReply(data) {
/**
* Extracts the reply from the response data from a chat completions-like source
* @param {object} data Response data from the chat completions-like source
* @param {object} state Additional state to keep track of
* @returns {string} The reply extracted from the response data
*/
function getStreamingReply(data, state) {
if (oai_settings.chat_completion_source === chat_completion_sources.CLAUDE) {
return data?.delta?.text || '';
} else if (oai_settings.chat_completion_source === chat_completion_sources.MAKERSUITE) {
return data?.candidates?.[0]?.content?.parts?.filter(x => oai_settings.show_thoughts || !x.thought)?.map(x => x.text)?.filter(x => x)?.join('\n\n') || '';
} else if (oai_settings.chat_completion_source === chat_completion_sources.COHERE) {
return data?.delta?.message?.content?.text || data?.delta?.message?.tool_plan || '';
} else {
} else if (oai_settings.chat_completion_source === chat_completion_sources.DEEPSEEK) {
const hadThoughts = state.hadThoughts;
const thoughts = data.choices?.filter(x => oai_settings.show_thoughts || !x?.delta?.reasoning_content)?.[0]?.delta?.reasoning_content || '';
const content = data.choices?.[0]?.delta?.content || '';
state.hadThoughts = !!thoughts;
const separator = hadThoughts && !thoughts ? '\n\n' : '';
return [thoughts, separator, content].filter(x => x).join('\n\n');
} else {
return data.choices?.[0]?.delta?.content ?? data.choices?.[0]?.message?.content ?? data.choices?.[0]?.text ?? '';
}
}
@@ -3346,7 +3370,7 @@ async function getStatusOpen() {
chat_completion_source: oai_settings.chat_completion_source,
};
if (oai_settings.reverse_proxy && [chat_completion_sources.CLAUDE, chat_completion_sources.OPENAI, chat_completion_sources.MISTRALAI, chat_completion_sources.MAKERSUITE].includes(oai_settings.chat_completion_source)) {
if (oai_settings.reverse_proxy && [chat_completion_sources.CLAUDE, chat_completion_sources.OPENAI, chat_completion_sources.MISTRALAI, chat_completion_sources.MAKERSUITE, chat_completion_sources.DEEPSEEK].includes(oai_settings.chat_completion_source)) {
await validateReverseProxy();
}
@@ -4204,7 +4228,7 @@ async function onModelChange() {
$('#openai_max_context').attr('max', max_32k);
} else if (value.includes('gemini-1.5-pro') || value.includes('gemini-exp-1206')) {
$('#openai_max_context').attr('max', max_2mil);
} else if (value.includes('gemini-1.5-flash') || value.includes('gemini-2.0-flash-exp')) {
} else if (value.includes('gemini-1.5-flash') || value.includes('gemini-2.0-flash-exp') || value.includes('gemini-2.0-flash-thinking-exp')) {
$('#openai_max_context').attr('max', max_1mil);
} else if (value.includes('gemini-1.0-pro') || value === 'gemini-pro') {
$('#openai_max_context').attr('max', max_32k);
@@ -4488,7 +4512,7 @@ async function onModelChange() {
if (oai_settings.chat_completion_source === chat_completion_sources.DEEPSEEK) {
if (oai_settings.max_context_unlocked) {
$('#openai_max_context').attr('max', unlocked_max);
} else if (oai_settings.deepseek_model == 'deepseek-chat') {
} else if (['deepseek-reasoner', 'deepseek-chat'].includes(oai_settings.deepseek_model)) {
$('#openai_max_context').attr('max', max_64k);
} else if (oai_settings.deepseek_model == 'deepseek-coder') {
$('#openai_max_context').attr('max', max_16k);
@@ -4725,7 +4749,7 @@ async function onConnectButtonClick(e) {
await writeSecret(SECRET_KEYS.DEEPSEEK, api_key_deepseek);
}
if (!secret_state[SECRET_KEYS.DEEPSEEK]) {
if (!secret_state[SECRET_KEYS.DEEPSEEK] && !oai_settings.reverse_proxy) {
console.log('No secret key saved for DeepSeek');
return;
}
@@ -4901,6 +4925,8 @@ export function isImageInliningSupported() {
const visionSupportedModels = [
'gpt-4-vision',
'gemini-2.0-flash-thinking-exp-1219',
'gemini-2.0-flash-thinking-exp-01-21',
'gemini-2.0-flash-thinking-exp',
'gemini-2.0-flash-exp',
'gemini-1.5-flash',
'gemini-1.5-flash-latest',

View File

@@ -2536,7 +2536,7 @@ async function loadUntilMesId(mesId) {
let target;
while (getFirstDisplayedMessageId() > mesId && getFirstDisplayedMessageId() !== 0) {
showMoreMessages();
await showMoreMessages();
await delay(1);
target = $('#chat').find(`.mes[mesid=${mesId}]`);

View File

@@ -1968,8 +1968,8 @@ export function initDefaultSlashCommands() {
SlashCommandParser.addCommandObject(SlashCommand.fromProps({
name: 'chat-render',
helpString: 'Renders a specified number of messages into the chat window. Displays all messages if no argument is provided.',
callback: (args, number) => {
showMoreMessages(number && !isNaN(Number(number)) ? Number(number) : Number.MAX_SAFE_INTEGER);
callback: async (args, number) => {
await showMoreMessages(number && !isNaN(Number(number)) ? Number(number) : Number.MAX_SAFE_INTEGER);
if (isTrueBoolean(String(args?.scroll ?? ''))) {
$('#chat').scrollTop(0);
}

View File

@@ -220,6 +220,21 @@ async function* parseStreamData(json) {
}
return;
}
else if (typeof json.choices[0].delta.reasoning_content === 'string' && json.choices[0].delta.reasoning_content.length > 0) {
for (let j = 0; j < json.choices[0].delta.reasoning_content.length; j++) {
const str = json.choices[0].delta.reasoning_content[j];
const isLastSymbol = j === json.choices[0].delta.reasoning_content.length - 1;
const choiceClone = structuredClone(json.choices[0]);
choiceClone.delta.reasoning_content = str;
choiceClone.delta.content = isLastSymbol ? choiceClone.delta.content : '';
const choices = [choiceClone];
yield {
data: { ...json, choices },
chunk: str,
};
}
return;
}
else if (typeof json.choices[0].delta.content === 'string' && json.choices[0].delta.content.length > 0) {
for (let j = 0; j < json.choices[0].delta.content.length; j++) {
const str = json.choices[0].delta.content[j];

View File

@@ -12,6 +12,7 @@ import {
extension_prompts,
Generate,
generateQuietPrompt,
getCharacters,
getCurrentChatId,
getRequestHeaders,
getThumbnailUrl,
@@ -55,7 +56,7 @@ import { MacrosParser } from './macros.js';
import { oai_settings } from './openai.js';
import { callGenericPopup, Popup, POPUP_RESULT, POPUP_TYPE } from './popup.js';
import { power_user, registerDebugFunction } from './power-user.js';
import { isMobile, shouldSendOnEnter } from './RossAscends-mods.js';
import { humanizedDateTime, isMobile, shouldSendOnEnter } from './RossAscends-mods.js';
import { ScraperManager } from './scrapers.js';
import { executeSlashCommands, executeSlashCommandsWithOptions, registerSlashCommand } from './slash-commands.js';
import { SlashCommand } from './slash-commands/SlashCommand.js';
@@ -65,7 +66,7 @@ import { tag_map, tags } from './tags.js';
import { textgenerationwebui_settings } from './textgen-settings.js';
import { tokenizers, getTextTokens, getTokenCount, getTokenCountAsync, getTokenizerModel } from './tokenizers.js';
import { ToolManager } from './tool-calling.js';
import { timestampToMoment } from './utils.js';
import { timestampToMoment, uuidv4 } from './utils.js';
export function getContext() {
return {
@@ -167,6 +168,9 @@ export function getContext() {
chatCompletionSettings: oai_settings,
textCompletionSettings: textgenerationwebui_settings,
powerUserSettings: power_user,
getCharacters,
uuidv4,
humanizedDateTime,
};
}

View File

@@ -1,3 +1,9 @@
<div>
<b data-i18n="Note:">Note:</b> <span data-i18n="this chat is temporary and will be deleted as soon as you leave it.">this chat is temporary and will be deleted as soon as you leave it.</span>
<div data-type="assistant_note">
<div>
<b data-i18n="Note:">Note:</b> <span data-i18n="this chat is temporary and will be deleted as soon as you leave it.">this chat is temporary and will be deleted as soon as you leave it.</span>
<span>Click the button to save it as a file.</span>
</div>
<div class="assistant_note_export menu_button menu_button_icon" title="Export as JSONL">
<i class="fa-solid fa-file-export"></i>
</div>
</div>

View File

@@ -139,7 +139,7 @@
<div class="">{{finalPromptTokens}}</div>
</div>
<div class="flex-container wide100p">
<div class="flex1"><span data-i18n="Max Context">Max Context</span>&nbsp;<small data-i18n="(Context Size - Response Length)">(Context Size - Response Length)</small><span data-i18n=":">:</span></div>
<div class="flex1"><span data-i18n="Max Context">Max Context</span>&nbsp;<small data-i18n="(Context Size - Response Length)">(Context Size - Response Length)</small>:</div>
<div class="">{{thisPrompt_max_context}}</div>
</div>
</div>

View File

@@ -319,8 +319,6 @@ export async function loadFeatherlessModels(data) {
return;
}
// Sort the data by model id (default A-Z)
data.sort((a, b) => a.id.localeCompare(b.id));
originalModels = data; // Store the original data for search
featherlessModels = data;
@@ -334,10 +332,8 @@ export async function loadFeatherlessModels(data) {
// Retrieve the stored number of items per page or default to 10
const perPage = Number(localStorage.getItem(storageKey)) || 10;
// Initialize pagination with the full set of models
const currentModelIndex = data.findIndex(x => x.id === textgen_settings.featherless_model);
featherlessCurrentPage = currentModelIndex >= 0 ? (currentModelIndex / perPage) + 1 : 1;
setupPagination(originalModels, perPage);
// Initialize pagination
applyFiltersAndSort();
// Function to set up pagination (also used for filtered results)
function setupPagination(models, perPage, pageNumber = featherlessCurrentPage) {
@@ -383,7 +379,7 @@ export async function loadFeatherlessModels(data) {
const dateAddedDiv = document.createElement('div');
dateAddedDiv.classList.add('model-date-added');
dateAddedDiv.textContent = `Added On: ${new Date(model.updated_at).toLocaleDateString()}`;
dateAddedDiv.textContent = `Added On: ${new Date(model.created * 1000).toLocaleDateString()}`;
detailsContainer.appendChild(modelClassDiv);
detailsContainer.appendChild(contextLengthDiv);
@@ -472,6 +468,7 @@ export async function loadFeatherlessModels(data) {
featherlessTop = await fetchFeatherlessStats();
}
const featherlessIds = featherlessTop.map(stat => stat.id);
if (selectedCategory === 'New') {
featherlessNew = await fetchFeatherlessNew();
}
@@ -493,7 +490,7 @@ export async function loadFeatherlessModels(data) {
return matchesSearch && matchesClass && matchesNew;
}
else {
return matchesSearch;
return matchesSearch && matchesClass;
}
});
@@ -502,11 +499,14 @@ export async function loadFeatherlessModels(data) {
} else if (selectedSortOrder === 'desc') {
filteredModels.sort((a, b) => b.id.localeCompare(a.id));
} else if (selectedSortOrder === 'date_asc') {
filteredModels.sort((a, b) => a.updated_at.localeCompare(b.updated_at));
filteredModels.sort((a, b) => a.created - b.created);
} else if (selectedSortOrder === 'date_desc') {
filteredModels.sort((a, b) => b.updated_at.localeCompare(a.updated_at));
filteredModels.sort((a, b) => b.created - a.created);
}
const currentModelIndex = filteredModels.findIndex(x => x.id === textgen_settings.featherless_model);
featherlessCurrentPage = currentModelIndex >= 0 ? (currentModelIndex / perPage) + 1 : 1;
setupPagination(filteredModels, Number(localStorage.getItem(storageKey)) || perPage, featherlessCurrentPage);
}
@@ -528,7 +528,7 @@ async function fetchFeatherlessStats() {
}
async function fetchFeatherlessNew() {
const response = await fetch('https://api.featherless.ai/feather/models?sort=-created_at&perPage=10');
const response = await fetch('https://api.featherless.ai/feather/models?sort=-created_at&perPage=20');
const data = await response.json();
return data.items;
}

View File

@@ -1231,7 +1231,7 @@ export function getTextGenGenerationData(finalPrompt, maxTokens, isImpersonate,
'top_p': settings.top_p,
'typical_p': settings.typical_p,
'typical': settings.typical_p,
'sampler_seed': settings.seed,
'sampler_seed': settings.seed >= 0 ? settings.seed : undefined,
'min_p': settings.min_p,
'repetition_penalty': settings.rep_pen,
'frequency_penalty': settings.freq_pen,
@@ -1294,7 +1294,7 @@ export function getTextGenGenerationData(finalPrompt, maxTokens, isImpersonate,
'temperature_last': (settings.type === OOBA || settings.type === APHRODITE || settings.type == TABBY) ? settings.temperature_last : undefined,
'speculative_ngram': settings.type === TABBY ? settings.speculative_ngram : undefined,
'do_sample': settings.type === OOBA ? settings.do_sample : undefined,
'seed': settings.seed,
'seed': settings.seed >= 0 ? settings.seed : undefined,
'guidance_scale': cfgValues?.guidanceScale?.value ?? settings.guidance_scale ?? 1,
'negative_prompt': cfgValues?.negativePrompt ?? substituteParams(settings.negative_prompt) ?? '',
'grammar_string': settings.grammar_string,

View File

@@ -391,6 +391,26 @@ export function getStringHash(str, seed = 0) {
return 4294967296 * (2097151 & h2) + (h1 >>> 0);
}
/**
* Copy text to clipboard. Use navigator.clipboard.writeText if available, otherwise use document.execCommand.
* @param {string} text - The text to copy to the clipboard.
* @returns {Promise<void>} A promise that resolves when the text has been copied to the clipboard.
*/
export function copyText(text) {
if (navigator.clipboard) {
return navigator.clipboard.writeText(text);
}
const parent = document.querySelector('dialog[open]:last-of-type') ?? document.body;
const textArea = document.createElement('textarea');
textArea.value = text;
parent.appendChild(textArea);
textArea.focus();
textArea.select();
document.execCommand('copy');
parent.removeChild(textArea);
}
/**
* Map of debounced functions to their timers.
* Weak map is used to avoid memory leaks.