mirror of
https://github.com/SillyTavern/SillyTavern.git
synced 2025-06-05 21:59:27 +02:00
Merge remote-tracking branch 'upstream/staging' into staging
This commit is contained in:
@@ -382,7 +382,9 @@ function RA_autoconnect(PrevApi) {
|
||||
}
|
||||
break;
|
||||
case 'textgenerationwebui':
|
||||
if (textgen_settings.type === textgen_types.MANCER && secret_state[SECRET_KEYS.MANCER]) {
|
||||
if ((textgen_settings.type === textgen_types.MANCER && secret_state[SECRET_KEYS.MANCER]) ||
|
||||
(textgen_settings.type === textgen_types.TOGETHERAI && secret_state[SECRET_KEYS.TOGETHERAI])
|
||||
) {
|
||||
$('#api_button_textgenerationwebui').trigger('click');
|
||||
}
|
||||
else if (api_server_textgenerationwebui && isValidUrl(api_server_textgenerationwebui)) {
|
||||
|
@@ -798,6 +798,10 @@ async function qrDeleteCallback(args, label) {
|
||||
}
|
||||
|
||||
const idx = preset.quickReplySlots.findIndex(x => x.label == label);
|
||||
if (idx === -1) {
|
||||
toastr.warning('Confirm you are using proper case sensitivity!', `QR with label '${label}' not found`);
|
||||
return '';
|
||||
};
|
||||
preset.quickReplySlots.splice(idx, 1);
|
||||
preset.numberOfSlots--;
|
||||
await fetch('/savequickreply', {
|
||||
|
@@ -25,6 +25,12 @@
|
||||
<a href="javascript:;" class="notes-link"><span class="note-link-span" title="Will generate a new random seed in SillyTavern that is then used in the ComfyUI workflow.">?</span></a>
|
||||
</li>
|
||||
</ul>
|
||||
<div>Custom</div>
|
||||
<div class="sd_comfy_workflow_editor_placeholder_actions">
|
||||
<span id="sd_comfy_workflow_editor_placeholder_add" title="Add custom placeholder">+</span>
|
||||
</div>
|
||||
<ul class="sd_comfy_workflow_editor_placeholder_list" id="sd_comfy_workflow_editor_placeholder_list_custom">
|
||||
</ul>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
@@ -16,6 +16,7 @@ import {
|
||||
user_avatar,
|
||||
getCharacterAvatar,
|
||||
formatCharacterAvatar,
|
||||
substituteParams,
|
||||
} from '../../../script.js';
|
||||
import { getApiUrl, getContext, extension_settings, doExtrasFetch, modules, renderExtensionTemplate } from '../../extensions.js';
|
||||
import { selected_group } from '../../group-chats.js';
|
||||
@@ -24,6 +25,7 @@ import { getMessageTimeStamp, humanizedDateTime } from '../../RossAscends-mods.j
|
||||
import { SECRET_KEYS, secret_state } from '../../secrets.js';
|
||||
import { getNovelUnlimitedImageGeneration, getNovelAnlas, loadNovelSubscriptionData } from '../../nai-settings.js';
|
||||
import { getMultimodalCaption } from '../shared.js';
|
||||
import { registerSlashCommand } from '../../slash-commands.js';
|
||||
export { MODULE_NAME };
|
||||
|
||||
// Wraps a string into monospace font-face span
|
||||
@@ -44,6 +46,7 @@ const sources = {
|
||||
vlad: 'vlad',
|
||||
openai: 'openai',
|
||||
comfy: 'comfy',
|
||||
togetherai: 'togetherai',
|
||||
};
|
||||
|
||||
const generationMode = {
|
||||
@@ -830,6 +833,16 @@ function onComfyWorkflowChange() {
|
||||
extension_settings.sd.comfy_workflow = $('#sd_comfy_workflow').find(':selected').val();
|
||||
saveSettingsDebounced();
|
||||
}
|
||||
async function changeComfyWorkflow(_, name) {
|
||||
name = name.replace(/(\.json)?$/i, '.json');
|
||||
if ($(`#sd_comfy_workflow > [value="${name}"]`).length > 0) {
|
||||
extension_settings.sd.comfy_workflow = name;
|
||||
$('#sd_comfy_workflow').val(extension_settings.sd.comfy_workflow);
|
||||
saveSettingsDebounced();
|
||||
} else {
|
||||
toastr.error(`ComfyUI Workflow "${name}" does not exist.`);
|
||||
}
|
||||
}
|
||||
|
||||
async function validateAutoUrl() {
|
||||
try {
|
||||
@@ -905,7 +918,7 @@ async function onModelChange() {
|
||||
extension_settings.sd.model = $('#sd_model').find(':selected').val();
|
||||
saveSettingsDebounced();
|
||||
|
||||
const cloudSources = [sources.horde, sources.novel, sources.openai];
|
||||
const cloudSources = [sources.horde, sources.novel, sources.openai, sources.togetherai];
|
||||
|
||||
if (cloudSources.includes(extension_settings.sd.source)) {
|
||||
return;
|
||||
@@ -1038,11 +1051,14 @@ async function loadSamplers() {
|
||||
samplers = await loadVladSamplers();
|
||||
break;
|
||||
case sources.openai:
|
||||
samplers = await loadOpenAiSamplers();
|
||||
samplers = ['N/A'];
|
||||
break;
|
||||
case sources.comfy:
|
||||
samplers = await loadComfySamplers();
|
||||
break;
|
||||
case sources.togetherai:
|
||||
samplers = ['N/A'];
|
||||
break;
|
||||
}
|
||||
|
||||
for (const sampler of samplers) {
|
||||
@@ -1052,6 +1068,11 @@ async function loadSamplers() {
|
||||
option.selected = sampler === extension_settings.sd.sampler;
|
||||
$('#sd_sampler').append(option);
|
||||
}
|
||||
|
||||
if (!extension_settings.sd.sampler && samplers.length > 0) {
|
||||
extension_settings.sd.sampler = samplers[0];
|
||||
$('#sd_sampler').val(extension_settings.sd.sampler).trigger('change');
|
||||
}
|
||||
}
|
||||
|
||||
async function loadHordeSamplers() {
|
||||
@@ -1108,10 +1129,6 @@ async function loadAutoSamplers() {
|
||||
}
|
||||
}
|
||||
|
||||
async function loadOpenAiSamplers() {
|
||||
return ['N/A'];
|
||||
}
|
||||
|
||||
async function loadVladSamplers() {
|
||||
if (!extension_settings.sd.vlad_url) {
|
||||
return [];
|
||||
@@ -1200,6 +1217,9 @@ async function loadModels() {
|
||||
case sources.comfy:
|
||||
models = await loadComfyModels();
|
||||
break;
|
||||
case sources.togetherai:
|
||||
models = await loadTogetherAIModels();
|
||||
break;
|
||||
}
|
||||
|
||||
for (const model of models) {
|
||||
@@ -1209,6 +1229,30 @@ async function loadModels() {
|
||||
option.selected = model.value === extension_settings.sd.model;
|
||||
$('#sd_model').append(option);
|
||||
}
|
||||
|
||||
if (!extension_settings.sd.model && models.length > 0) {
|
||||
extension_settings.sd.model = models[0].value;
|
||||
$('#sd_model').val(extension_settings.sd.model).trigger('change');
|
||||
}
|
||||
}
|
||||
|
||||
async function loadTogetherAIModels() {
|
||||
if (!secret_state[SECRET_KEYS.TOGETHERAI]) {
|
||||
console.debug('TogetherAI API key is not set.');
|
||||
return [];
|
||||
}
|
||||
|
||||
const result = await fetch('/api/sd/together/models', {
|
||||
method: 'POST',
|
||||
headers: getRequestHeaders(),
|
||||
});
|
||||
|
||||
if (result.ok) {
|
||||
const data = await result.json();
|
||||
return data;
|
||||
}
|
||||
|
||||
return [];
|
||||
}
|
||||
|
||||
async function loadHordeModels() {
|
||||
@@ -1422,6 +1466,9 @@ async function loadSchedulers() {
|
||||
case sources.openai:
|
||||
schedulers = ['N/A'];
|
||||
break;
|
||||
case sources.togetherai:
|
||||
schedulers = ['N/A'];
|
||||
break;
|
||||
case sources.comfy:
|
||||
schedulers = await loadComfySchedulers();
|
||||
break;
|
||||
@@ -1481,6 +1528,9 @@ async function loadVaes() {
|
||||
case sources.openai:
|
||||
vaes = ['N/A'];
|
||||
break;
|
||||
case sources.togetherai:
|
||||
vaes = ['N/A'];
|
||||
break;
|
||||
case sources.comfy:
|
||||
vaes = await loadComfyVaes();
|
||||
break;
|
||||
@@ -1861,6 +1911,9 @@ async function sendGenerationRequest(generationType, prompt, characterName = nul
|
||||
case sources.comfy:
|
||||
result = await generateComfyImage(prefixedPrompt);
|
||||
break;
|
||||
case sources.togetherai:
|
||||
result = await generateTogetherAIImage(prefixedPrompt);
|
||||
break;
|
||||
}
|
||||
|
||||
if (!result.data) {
|
||||
@@ -1883,6 +1936,29 @@ async function sendGenerationRequest(generationType, prompt, characterName = nul
|
||||
callback ? callback(prompt, base64Image, generationType) : sendMessage(prompt, base64Image, generationType);
|
||||
}
|
||||
|
||||
async function generateTogetherAIImage(prompt) {
|
||||
const result = await fetch('/api/sd/together/generate', {
|
||||
method: 'POST',
|
||||
headers: getRequestHeaders(),
|
||||
body: JSON.stringify({
|
||||
prompt: prompt,
|
||||
negative_prompt: extension_settings.sd.negative_prompt,
|
||||
model: extension_settings.sd.model,
|
||||
steps: extension_settings.sd.steps,
|
||||
width: extension_settings.sd.width,
|
||||
height: extension_settings.sd.height,
|
||||
}),
|
||||
});
|
||||
|
||||
if (result.ok) {
|
||||
const data = await result.json();
|
||||
return { format: 'jpg', data: data?.output?.choices?.[0]?.image_base64 };
|
||||
} else {
|
||||
const text = await result.text();
|
||||
throw new Error(text);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Generates an "extras" image using a provided prompt and other settings.
|
||||
*
|
||||
@@ -2180,6 +2256,9 @@ async function generateComfyImage(prompt) {
|
||||
placeholders.forEach(ph => {
|
||||
workflow = workflow.replace(`"%${ph}%"`, JSON.stringify(extension_settings.sd[ph]));
|
||||
});
|
||||
(extension_settings.sd.comfy_placeholders ?? []).forEach(ph => {
|
||||
workflow = workflow.replace(`"%${ph.find}%"`, JSON.stringify(substituteParams(ph.replace)));
|
||||
});
|
||||
console.log(`{
|
||||
"prompt": ${workflow}
|
||||
}`);
|
||||
@@ -2216,6 +2295,50 @@ async function onComfyOpenWorkflowEditorClick() {
|
||||
};
|
||||
$('#sd_comfy_workflow_editor_name').text(extension_settings.sd.comfy_workflow);
|
||||
$('#sd_comfy_workflow_editor_workflow').val(workflow);
|
||||
const addPlaceholderDom = (placeholder) => {
|
||||
const el = $(`
|
||||
<li class="sd_comfy_workflow_editor_not_found" data-placeholder="${placeholder.find}">
|
||||
<span class="sd_comfy_workflow_editor_custom_remove" title="Remove custom placeholder">⊘</span>
|
||||
<span class="sd_comfy_workflow_editor_custom_final">"%${placeholder.find}%"</span><br>
|
||||
<input placeholder="find" title="find" type="text" class="text_pole sd_comfy_workflow_editor_custom_find" value=""><br>
|
||||
<input placeholder="replace" title="replace" type="text" class="text_pole sd_comfy_workflow_editor_custom_replace">
|
||||
</li>
|
||||
`);
|
||||
$('#sd_comfy_workflow_editor_placeholder_list_custom').append(el);
|
||||
el.find('.sd_comfy_workflow_editor_custom_find').val(placeholder.find);
|
||||
el.find('.sd_comfy_workflow_editor_custom_find').on('input', function() {
|
||||
placeholder.find = this.value;
|
||||
el.find('.sd_comfy_workflow_editor_custom_final').text(`"%${this.value}%"`);
|
||||
el.attr('data-placeholder', `${this.value}`);
|
||||
checkPlaceholders();
|
||||
saveSettingsDebounced();
|
||||
});
|
||||
el.find('.sd_comfy_workflow_editor_custom_replace').val(placeholder.replace);
|
||||
el.find('.sd_comfy_workflow_editor_custom_replace').on('input', function() {
|
||||
placeholder.replace = this.value;
|
||||
saveSettingsDebounced();
|
||||
});
|
||||
el.find('.sd_comfy_workflow_editor_custom_remove').on('click', () => {
|
||||
el.remove();
|
||||
extension_settings.sd.comfy_placeholders.splice(extension_settings.sd.comfy_placeholders.indexOf(placeholder));
|
||||
saveSettingsDebounced();
|
||||
});
|
||||
};
|
||||
$('#sd_comfy_workflow_editor_placeholder_add').on('click', () => {
|
||||
if (!extension_settings.sd.comfy_placeholders) {
|
||||
extension_settings.sd.comfy_placeholders = [];
|
||||
}
|
||||
const placeholder = {
|
||||
find: '',
|
||||
replace: '',
|
||||
};
|
||||
extension_settings.sd.comfy_placeholders.push(placeholder);
|
||||
addPlaceholderDom(placeholder);
|
||||
saveSettingsDebounced();
|
||||
});
|
||||
(extension_settings.sd.comfy_placeholders ?? []).forEach(placeholder=>{
|
||||
addPlaceholderDom(placeholder);
|
||||
});
|
||||
checkPlaceholders();
|
||||
$('#sd_comfy_workflow_editor_workflow').on('input', checkPlaceholders);
|
||||
if (await popupResult) {
|
||||
@@ -2376,6 +2499,8 @@ function isValidState() {
|
||||
return secret_state[SECRET_KEYS.OPENAI];
|
||||
case sources.comfy:
|
||||
return true;
|
||||
case sources.togetherai:
|
||||
return secret_state[SECRET_KEYS.TOGETHERAI];
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2481,7 +2606,8 @@ $('#sd_dropdown [id]').on('click', function () {
|
||||
});
|
||||
|
||||
jQuery(async () => {
|
||||
getContext().registerSlashCommand('imagine', generatePicture, ['sd', 'img', 'image'], helpString, true, true);
|
||||
registerSlashCommand('imagine', generatePicture, ['sd', 'img', 'image'], helpString, true, true);
|
||||
registerSlashCommand('imagine-comfy-workflow', changeComfyWorkflow, ['icw'], '(workflowName) - change the workflow to be used for image generation with ComfyUI, e.g. <tt>/imagine-comfy-workflow MyWorkflow</tt>')
|
||||
|
||||
$('#extensions_settings').append(renderExtensionTemplate('stable-diffusion', 'settings', defaultSettings));
|
||||
$('#sd_source').on('change', onSourceChange);
|
||||
|
@@ -35,6 +35,7 @@
|
||||
<option value="novel">NovelAI Diffusion</option>
|
||||
<option value="openai">OpenAI (DALL-E)</option>
|
||||
<option value="comfy">ComfyUI</option>
|
||||
<option value="togetherai">TogetherAI</option>
|
||||
</select>
|
||||
<div data-sd-source="auto">
|
||||
<label for="sd_auto_url">SD Web UI URL</label>
|
||||
|
@@ -82,3 +82,17 @@
|
||||
.sd_comfy_workflow_editor_placeholder_list>li>.notes-link {
|
||||
cursor: help;
|
||||
}
|
||||
|
||||
.sd_comfy_workflow_editor_placeholder_list input {
|
||||
font-size: inherit;
|
||||
margin: 0;
|
||||
}
|
||||
.sd_comfy_workflow_editor_custom_remove, #sd_comfy_workflow_editor_placeholder_add {
|
||||
cursor: pointer;
|
||||
font-weight: bold;
|
||||
width: 1em;
|
||||
opacity: 0.5;
|
||||
&:hover {
|
||||
opacity: 1;
|
||||
}
|
||||
}
|
||||
|
126
public/scripts/logit-bias.js
Normal file
126
public/scripts/logit-bias.js
Normal file
@@ -0,0 +1,126 @@
|
||||
import { saveSettingsDebounced } from '../script.js';
|
||||
import { getTextTokens } from './tokenizers.js';
|
||||
import { uuidv4 } from './utils.js';
|
||||
|
||||
export const BIAS_CACHE = new Map();
|
||||
|
||||
/**
|
||||
* Displays the logit bias list in the specified container.
|
||||
* @param {object} logitBias Logit bias object
|
||||
* @param {string} containerSelector Container element selector
|
||||
* @returns
|
||||
*/
|
||||
export function displayLogitBias(logitBias, containerSelector) {
|
||||
if (!Array.isArray(logitBias)) {
|
||||
console.log('Logit bias set not found');
|
||||
return;
|
||||
}
|
||||
|
||||
$(containerSelector).find('.logit_bias_list').empty();
|
||||
|
||||
for (const entry of logitBias) {
|
||||
if (entry) {
|
||||
createLogitBiasListItem(entry, logitBias, containerSelector);
|
||||
}
|
||||
}
|
||||
|
||||
BIAS_CACHE.delete(containerSelector);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new logit bias entry
|
||||
* @param {object[]} logitBias Array of logit bias objects
|
||||
* @param {string} containerSelector Container element ID
|
||||
*/
|
||||
export function createNewLogitBiasEntry(logitBias, containerSelector) {
|
||||
const entry = { id: uuidv4(), text: '', value: 0 };
|
||||
logitBias.push(entry);
|
||||
BIAS_CACHE.delete(containerSelector);
|
||||
createLogitBiasListItem(entry, logitBias, containerSelector);
|
||||
saveSettingsDebounced();
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a logit bias list item.
|
||||
* @param {object} entry Logit bias entry
|
||||
* @param {object[]} logitBias Array of logit bias objects
|
||||
* @param {string} containerSelector Container element ID
|
||||
*/
|
||||
function createLogitBiasListItem(entry, logitBias, containerSelector) {
|
||||
const id = entry.id;
|
||||
const template = $('#logit_bias_template .logit_bias_form').clone();
|
||||
template.data('id', id);
|
||||
template.find('.logit_bias_text').val(entry.text).on('input', function () {
|
||||
entry.text = $(this).val();
|
||||
BIAS_CACHE.delete(containerSelector);
|
||||
saveSettingsDebounced();
|
||||
});
|
||||
template.find('.logit_bias_value').val(entry.value).on('input', function () {
|
||||
entry.value = Number($(this).val());
|
||||
BIAS_CACHE.delete(containerSelector);
|
||||
saveSettingsDebounced();
|
||||
});
|
||||
template.find('.logit_bias_remove').on('click', function () {
|
||||
$(this).closest('.logit_bias_form').remove();
|
||||
const index = logitBias.indexOf(entry);
|
||||
if (index > -1) {
|
||||
logitBias.splice(index, 1);
|
||||
}
|
||||
BIAS_CACHE.delete(containerSelector);
|
||||
saveSettingsDebounced();
|
||||
});
|
||||
$(containerSelector).find('.logit_bias_list').prepend(template);
|
||||
}
|
||||
|
||||
/**
|
||||
* Populate logit bias list from preset.
|
||||
* @param {object[]} biasPreset Bias preset
|
||||
* @param {number} tokenizerType Tokenizer type (see tokenizers.js)
|
||||
* @param {(bias: number, sequence: number[]) => object} getBiasObject Transformer function to create bias object
|
||||
* @returns {object[]} Array of logit bias objects
|
||||
*/
|
||||
export function getLogitBiasListResult(biasPreset, tokenizerType, getBiasObject) {
|
||||
const result = [];
|
||||
|
||||
for (const entry of biasPreset) {
|
||||
if (entry.text?.length > 0) {
|
||||
const text = entry.text.trim();
|
||||
|
||||
// Skip empty lines
|
||||
if (text.length === 0) {
|
||||
continue;
|
||||
}
|
||||
|
||||
// Verbatim text
|
||||
if (text.startsWith('{') && text.endsWith('}')) {
|
||||
const tokens = getTextTokens(tokenizerType, text.slice(1, -1));
|
||||
result.push(getBiasObject(entry.value, tokens));
|
||||
}
|
||||
|
||||
|
||||
// Raw token ids, JSON serialized
|
||||
else if (text.startsWith('[') && text.endsWith(']')) {
|
||||
try {
|
||||
const tokens = JSON.parse(text);
|
||||
|
||||
if (Array.isArray(tokens) && tokens.every(t => Number.isInteger(t))) {
|
||||
result.push(getBiasObject(entry.value, tokens));
|
||||
} else {
|
||||
throw new Error('Not an array of integers');
|
||||
}
|
||||
} catch (err) {
|
||||
console.log(`Failed to parse logit bias token list: ${text}`, err);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
// Text with a leading space
|
||||
else {
|
||||
const biasText = ` ${text}`;
|
||||
const tokens = getTextTokens(tokenizerType, biasText);
|
||||
result.push(getBiasObject(entry.value, tokens));
|
||||
}
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
@@ -1,64 +0,0 @@
|
||||
import { setGenerationParamsFromPreset } from '../script.js';
|
||||
import { isMobile } from './RossAscends-mods.js';
|
||||
import { textgenerationwebui_settings as textgen_settings } from './textgen-settings.js';
|
||||
|
||||
let models = [];
|
||||
|
||||
export async function loadMancerModels(data) {
|
||||
if (!Array.isArray(data)) {
|
||||
console.error('Invalid Mancer models data', data);
|
||||
return;
|
||||
}
|
||||
|
||||
models = data;
|
||||
|
||||
$('#mancer_model').empty();
|
||||
for (const model of data) {
|
||||
const option = document.createElement('option');
|
||||
option.value = model.id;
|
||||
option.text = model.name;
|
||||
option.selected = model.id === textgen_settings.mancer_model;
|
||||
$('#mancer_model').append(option);
|
||||
}
|
||||
}
|
||||
|
||||
function onMancerModelSelect() {
|
||||
const modelId = String($('#mancer_model').val());
|
||||
textgen_settings.mancer_model = modelId;
|
||||
$('#api_button_textgenerationwebui').trigger('click');
|
||||
|
||||
const limits = models.find(x => x.id === modelId)?.limits;
|
||||
setGenerationParamsFromPreset({ max_length: limits.context, genamt: limits.completion });
|
||||
}
|
||||
|
||||
function getMancerModelTemplate(option) {
|
||||
const model = models.find(x => x.id === option?.element?.value);
|
||||
|
||||
if (!option.id || !model) {
|
||||
return option.text;
|
||||
}
|
||||
|
||||
const creditsPerPrompt = (model.limits?.context - model.limits?.completion) * model.pricing?.prompt;
|
||||
const creditsPerCompletion = model.limits?.completion * model.pricing?.completion;
|
||||
const creditsTotal = Math.round(creditsPerPrompt + creditsPerCompletion).toFixed(0);
|
||||
|
||||
return $((`
|
||||
<div class="flex-container flexFlowColumn">
|
||||
<div><strong>${DOMPurify.sanitize(model.name)}</strong> | <span>${model.limits?.context} ctx</span> / <span>${model.limits?.completion} res</span> | <small>Credits per request (max): ${creditsTotal}</small></div>
|
||||
</div>
|
||||
`));
|
||||
}
|
||||
|
||||
jQuery(function () {
|
||||
$('#mancer_model').on('change', onMancerModelSelect);
|
||||
|
||||
if (!isMobile()) {
|
||||
$('#mancer_model').select2({
|
||||
placeholder: 'Select a model',
|
||||
searchInputPlaceholder: 'Search models...',
|
||||
searchInputCssClass: 'text_pole',
|
||||
width: '100%',
|
||||
templateResult: getMancerModelTemplate,
|
||||
});
|
||||
}
|
||||
});
|
@@ -15,8 +15,8 @@ import {
|
||||
getSortableDelay,
|
||||
getStringHash,
|
||||
onlyUnique,
|
||||
uuidv4,
|
||||
} from './utils.js';
|
||||
import { BIAS_CACHE, createNewLogitBiasEntry, displayLogitBias, getLogitBiasListResult } from './logit-bias.js';
|
||||
|
||||
const default_preamble = '[ Style: chat, complex, sensory, visceral ]';
|
||||
const default_order = [1, 5, 0, 2, 3, 4];
|
||||
@@ -59,7 +59,7 @@ const nai_tiers = {
|
||||
|
||||
let novel_data = null;
|
||||
let badWordsCache = {};
|
||||
let biasCache = undefined;
|
||||
const BIAS_KEY = '#novel_api-settings';
|
||||
|
||||
export function setNovelData(data) {
|
||||
novel_data = data;
|
||||
@@ -145,7 +145,7 @@ export function loadNovelSettings(settings) {
|
||||
//load the rest of the Novel settings without any checks
|
||||
nai_settings.model_novel = settings.model_novel;
|
||||
$('#model_novel_select').val(nai_settings.model_novel);
|
||||
$(`#model_novel_select option[value=${nai_settings.model_novel}]`).attr('selected', true);
|
||||
$(`#model_novel_select option[value=${nai_settings.model_novel}]`).prop('selected', true);
|
||||
|
||||
if (settings.nai_preamble !== undefined) {
|
||||
nai_settings.preamble = settings.nai_preamble;
|
||||
@@ -217,7 +217,7 @@ function loadNovelSettingsUi(ui_settings) {
|
||||
|
||||
$('#streaming_novel').prop('checked', ui_settings.streaming_novel);
|
||||
sortItemsByOrder(ui_settings.order);
|
||||
displayLogitBias(ui_settings.logit_bias);
|
||||
displayLogitBias(ui_settings.logit_bias, BIAS_KEY);
|
||||
}
|
||||
|
||||
const sliders = [
|
||||
@@ -433,8 +433,8 @@ export function getNovelGenerationData(finalPrompt, settings, maxLength, isImper
|
||||
|
||||
let logitBias = [];
|
||||
if (tokenizerType !== tokenizers.NONE && Array.isArray(nai_settings.logit_bias) && nai_settings.logit_bias.length) {
|
||||
logitBias = biasCache || calculateLogitBias();
|
||||
biasCache = logitBias;
|
||||
logitBias = BIAS_CACHE.get(BIAS_KEY) || calculateLogitBias();
|
||||
BIAS_CACHE.set(BIAS_KEY, logitBias);
|
||||
}
|
||||
|
||||
return {
|
||||
@@ -525,65 +525,14 @@ function saveSamplingOrder() {
|
||||
saveSettingsDebounced();
|
||||
}
|
||||
|
||||
function displayLogitBias(logit_bias) {
|
||||
if (!Array.isArray(logit_bias)) {
|
||||
console.log('Logit bias set not found');
|
||||
return;
|
||||
}
|
||||
|
||||
$('.novelai_logit_bias_list').empty();
|
||||
|
||||
for (const entry of logit_bias) {
|
||||
if (entry) {
|
||||
createLogitBiasListItem(entry);
|
||||
}
|
||||
}
|
||||
|
||||
biasCache = undefined;
|
||||
}
|
||||
|
||||
function createNewLogitBiasEntry() {
|
||||
const entry = { id: uuidv4(), text: '', value: 0 };
|
||||
nai_settings.logit_bias.push(entry);
|
||||
biasCache = undefined;
|
||||
createLogitBiasListItem(entry);
|
||||
saveSettingsDebounced();
|
||||
}
|
||||
|
||||
function createLogitBiasListItem(entry) {
|
||||
const id = entry.id;
|
||||
const template = $('#novelai_logit_bias_template .novelai_logit_bias_form').clone();
|
||||
template.data('id', id);
|
||||
template.find('.novelai_logit_bias_text').val(entry.text).on('input', function () {
|
||||
entry.text = $(this).val();
|
||||
biasCache = undefined;
|
||||
saveSettingsDebounced();
|
||||
});
|
||||
template.find('.novelai_logit_bias_value').val(entry.value).on('input', function () {
|
||||
entry.value = Number($(this).val());
|
||||
biasCache = undefined;
|
||||
saveSettingsDebounced();
|
||||
});
|
||||
template.find('.novelai_logit_bias_remove').on('click', function () {
|
||||
$(this).closest('.novelai_logit_bias_form').remove();
|
||||
const index = nai_settings.logit_bias.indexOf(entry);
|
||||
if (index > -1) {
|
||||
nai_settings.logit_bias.splice(index, 1);
|
||||
}
|
||||
biasCache = undefined;
|
||||
saveSettingsDebounced();
|
||||
});
|
||||
$('.novelai_logit_bias_list').prepend(template);
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculates logit bias for Novel AI
|
||||
* @returns {object[]} Array of logit bias objects
|
||||
*/
|
||||
function calculateLogitBias() {
|
||||
const bias_preset = nai_settings.logit_bias;
|
||||
const biasPreset = nai_settings.logit_bias;
|
||||
|
||||
if (!Array.isArray(bias_preset) || bias_preset.length === 0) {
|
||||
if (!Array.isArray(biasPreset) || biasPreset.length === 0) {
|
||||
return [];
|
||||
}
|
||||
|
||||
@@ -605,47 +554,7 @@ function calculateLogitBias() {
|
||||
};
|
||||
}
|
||||
|
||||
const result = [];
|
||||
|
||||
for (const entry of bias_preset) {
|
||||
if (entry.text?.length > 0) {
|
||||
const text = entry.text.trim();
|
||||
|
||||
// Skip empty lines
|
||||
if (text.length === 0) {
|
||||
continue;
|
||||
}
|
||||
|
||||
// Verbatim text
|
||||
if (text.startsWith('{') && text.endsWith('}')) {
|
||||
const tokens = getTextTokens(tokenizerType, text.slice(1, -1));
|
||||
result.push(getBiasObject(entry.value, tokens));
|
||||
}
|
||||
|
||||
// Raw token ids, JSON serialized
|
||||
else if (text.startsWith('[') && text.endsWith(']')) {
|
||||
try {
|
||||
const tokens = JSON.parse(text);
|
||||
|
||||
if (Array.isArray(tokens) && tokens.every(t => Number.isInteger(t))) {
|
||||
result.push(getBiasObject(entry.value, tokens));
|
||||
} else {
|
||||
throw new Error('Not an array of integers');
|
||||
}
|
||||
} catch (err) {
|
||||
console.log(`Failed to parse logit bias token list: ${text}`, err);
|
||||
}
|
||||
}
|
||||
|
||||
// Text with a leading space
|
||||
else {
|
||||
const biasText = ` ${text}`;
|
||||
const tokens = getTextTokens(tokenizerType, biasText);
|
||||
result.push(getBiasObject(entry.value, tokens));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const result = getLogitBiasListResult(biasPreset, tokenizerType, getBiasObject);
|
||||
return result;
|
||||
}
|
||||
|
||||
@@ -778,5 +687,5 @@ jQuery(function () {
|
||||
saveSamplingOrder();
|
||||
});
|
||||
|
||||
$('#novelai_logit_bias_new_entry').on('click', createNewLogitBiasEntry);
|
||||
$('#novelai_logit_bias_new_entry').on('click', () => createNewLogitBiasEntry(nai_settings.logit_bias, BIAS_KEY));
|
||||
});
|
||||
|
@@ -15,6 +15,7 @@ export const SECRET_KEYS = {
|
||||
MAKERSUITE: 'api_key_makersuite',
|
||||
SERPAPI: 'api_key_serpapi',
|
||||
MISTRALAI: 'api_key_mistralai',
|
||||
TOGETHERAI: 'api_key_togetherai',
|
||||
};
|
||||
|
||||
const INPUT_MAP = {
|
||||
@@ -31,6 +32,7 @@ const INPUT_MAP = {
|
||||
[SECRET_KEYS.APHRODITE]: '#api_key_aphrodite',
|
||||
[SECRET_KEYS.TABBY]: '#api_key_tabby',
|
||||
[SECRET_KEYS.MISTRALAI]: '#api_key_mistralai',
|
||||
[SECRET_KEYS.TOGETHERAI]: '#api_key_togetherai',
|
||||
};
|
||||
|
||||
async function clearSecret() {
|
||||
|
120
public/scripts/textgen-models.js
Normal file
120
public/scripts/textgen-models.js
Normal file
@@ -0,0 +1,120 @@
|
||||
import { setGenerationParamsFromPreset } from '../script.js';
|
||||
import { isMobile } from './RossAscends-mods.js';
|
||||
import { textgenerationwebui_settings as textgen_settings } from './textgen-settings.js';
|
||||
|
||||
let mancerModels = [];
|
||||
let togetherModels = [];
|
||||
|
||||
export async function loadTogetherAIModels(data) {
|
||||
if (!Array.isArray(data)) {
|
||||
console.error('Invalid Together AI models data', data);
|
||||
return;
|
||||
}
|
||||
|
||||
togetherModels = data;
|
||||
|
||||
$('#model_togetherai_select').empty();
|
||||
for (const model of data) {
|
||||
// Hey buddy, I think you've got the wrong door.
|
||||
if (model.display_type === 'image') {
|
||||
continue;
|
||||
}
|
||||
|
||||
const option = document.createElement('option');
|
||||
option.value = model.name;
|
||||
option.text = model.display_name;
|
||||
option.selected = model.name === textgen_settings.togetherai_model;
|
||||
$('#model_togetherai_select').append(option);
|
||||
}
|
||||
}
|
||||
|
||||
export async function loadMancerModels(data) {
|
||||
if (!Array.isArray(data)) {
|
||||
console.error('Invalid Mancer models data', data);
|
||||
return;
|
||||
}
|
||||
|
||||
mancerModels = data;
|
||||
|
||||
$('#mancer_model').empty();
|
||||
for (const model of data) {
|
||||
const option = document.createElement('option');
|
||||
option.value = model.id;
|
||||
option.text = model.name;
|
||||
option.selected = model.id === textgen_settings.mancer_model;
|
||||
$('#mancer_model').append(option);
|
||||
}
|
||||
}
|
||||
|
||||
function onMancerModelSelect() {
|
||||
const modelId = String($('#mancer_model').val());
|
||||
textgen_settings.mancer_model = modelId;
|
||||
$('#api_button_textgenerationwebui').trigger('click');
|
||||
|
||||
const limits = mancerModels.find(x => x.id === modelId)?.limits;
|
||||
setGenerationParamsFromPreset({ max_length: limits.context, genamt: limits.completion });
|
||||
}
|
||||
|
||||
|
||||
function onTogetherModelSelect() {
|
||||
const modelName = String($('#model_togetherai_select').val());
|
||||
textgen_settings.togetherai_model = modelName;
|
||||
$('#api_button_textgenerationwebui').trigger('click');
|
||||
const model = togetherModels.find(x => x.name === modelName);
|
||||
setGenerationParamsFromPreset({ max_length: model.context_length });
|
||||
}
|
||||
|
||||
function getMancerModelTemplate(option) {
|
||||
const model = mancerModels.find(x => x.id === option?.element?.value);
|
||||
|
||||
if (!option.id || !model) {
|
||||
return option.text;
|
||||
}
|
||||
|
||||
const creditsPerPrompt = (model.limits?.context - model.limits?.completion) * model.pricing?.prompt;
|
||||
const creditsPerCompletion = model.limits?.completion * model.pricing?.completion;
|
||||
const creditsTotal = Math.round(creditsPerPrompt + creditsPerCompletion).toFixed(0);
|
||||
|
||||
return $((`
|
||||
<div class="flex-container flexFlowColumn">
|
||||
<div><strong>${DOMPurify.sanitize(model.name)}</strong> | <span>${model.limits?.context} ctx</span> / <span>${model.limits?.completion} res</span> | <small>Credits per request (max): ${creditsTotal}</small></div>
|
||||
</div>
|
||||
`));
|
||||
}
|
||||
|
||||
function getTogetherModelTemplate(option) {
|
||||
const model = togetherModels.find(x => x.name === option?.element?.value);
|
||||
|
||||
if (!option.id || !model) {
|
||||
return option.text;
|
||||
}
|
||||
|
||||
return $((`
|
||||
<div class="flex-container flexFlowColumn">
|
||||
<div><strong>${DOMPurify.sanitize(model.name)}</strong> | <span>${model.context_length || '???'} tokens</span></div>
|
||||
<div><small>${DOMPurify.sanitize(model.description)}</small></div>
|
||||
</div>
|
||||
`));
|
||||
}
|
||||
|
||||
jQuery(function () {
|
||||
$('#mancer_model').on('change', onMancerModelSelect);
|
||||
$('#model_togetherai_select').on('change', onTogetherModelSelect);
|
||||
|
||||
if (!isMobile()) {
|
||||
$('#mancer_model').select2({
|
||||
placeholder: 'Select a model',
|
||||
searchInputPlaceholder: 'Search models...',
|
||||
searchInputCssClass: 'text_pole',
|
||||
width: '100%',
|
||||
templateResult: getMancerModelTemplate,
|
||||
});
|
||||
$('#model_togetherai_select').select2({
|
||||
placeholder: 'Select a model',
|
||||
searchInputPlaceholder: 'Search models...',
|
||||
searchInputCssClass: 'text_pole',
|
||||
width: '100%',
|
||||
templateResult: getTogetherModelTemplate,
|
||||
});
|
||||
}
|
||||
});
|
@@ -9,6 +9,7 @@ import {
|
||||
setOnlineStatus,
|
||||
substituteParams,
|
||||
} from '../script.js';
|
||||
import { BIAS_CACHE, createNewLogitBiasEntry, displayLogitBias, getLogitBiasListResult } from './logit-bias.js';
|
||||
|
||||
import {
|
||||
power_user,
|
||||
@@ -31,15 +32,18 @@ export const textgen_types = {
|
||||
APHRODITE: 'aphrodite',
|
||||
TABBY: 'tabby',
|
||||
KOBOLDCPP: 'koboldcpp',
|
||||
TOGETHERAI: 'togetherai',
|
||||
};
|
||||
|
||||
const { MANCER, APHRODITE } = textgen_types;
|
||||
const { MANCER, APHRODITE, TOGETHERAI } = textgen_types;
|
||||
const BIAS_KEY = '#textgenerationwebui_api-settings';
|
||||
|
||||
// Maybe let it be configurable in the future?
|
||||
// (7 days later) The future has come.
|
||||
const MANCER_SERVER_KEY = 'mancer_server';
|
||||
const MANCER_SERVER_DEFAULT = 'https://neuro.mancer.tech';
|
||||
export let MANCER_SERVER = localStorage.getItem(MANCER_SERVER_KEY) ?? MANCER_SERVER_DEFAULT;
|
||||
let MANCER_SERVER = localStorage.getItem(MANCER_SERVER_KEY) ?? MANCER_SERVER_DEFAULT;
|
||||
let TOGETHERAI_SERVER = 'https://api.together.xyz';
|
||||
|
||||
const KOBOLDCPP_ORDER = [6, 0, 1, 3, 4, 2, 5];
|
||||
const settings = {
|
||||
@@ -89,8 +93,10 @@ const settings = {
|
||||
//prompt_log_probs_aphrodite: 0,
|
||||
type: textgen_types.OOBA,
|
||||
mancer_model: 'mytholite',
|
||||
togetherai_model: 'Gryphe/MythoMax-L2-13b',
|
||||
legacy_api: false,
|
||||
sampler_order: KOBOLDCPP_ORDER,
|
||||
logit_bias: [],
|
||||
n: 1,
|
||||
};
|
||||
|
||||
@@ -144,6 +150,7 @@ const setting_names = [
|
||||
//'prompt_log_probs_aphrodite'
|
||||
'sampler_order',
|
||||
'n',
|
||||
'logit_bias',
|
||||
];
|
||||
|
||||
async function selectPreset(name) {
|
||||
@@ -159,13 +166,14 @@ async function selectPreset(name) {
|
||||
setSettingByName(name, value, true);
|
||||
}
|
||||
setGenerationParamsFromPreset(preset);
|
||||
displayLogitBias(preset.logit_bias, BIAS_KEY);
|
||||
saveSettingsDebounced();
|
||||
}
|
||||
|
||||
function formatTextGenURL(value) {
|
||||
try {
|
||||
// Mancer doesn't need any formatting (it's hardcoded)
|
||||
if (settings.type === MANCER) {
|
||||
// Mancer/Together doesn't need any formatting (it's hardcoded)
|
||||
if (settings.type === MANCER || settings.type === TOGETHERAI) {
|
||||
return value;
|
||||
}
|
||||
|
||||
@@ -240,6 +248,42 @@ function getCustomTokenBans() {
|
||||
return result.filter(onlyUnique).map(x => String(x)).join(',');
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculates logit bias object from the logit bias list.
|
||||
* @returns {object} Logit bias object
|
||||
*/
|
||||
function calculateLogitBias() {
|
||||
if (!Array.isArray(settings.logit_bias) || settings.logit_bias.length === 0) {
|
||||
return {};
|
||||
}
|
||||
|
||||
const tokenizer = SENTENCEPIECE_TOKENIZERS.includes(power_user.tokenizer) ? power_user.tokenizer : tokenizers.LLAMA;
|
||||
const result = {};
|
||||
|
||||
/**
|
||||
* Adds bias to the logit bias object.
|
||||
* @param {number} bias
|
||||
* @param {number[]} sequence
|
||||
* @returns {object} Accumulated logit bias object
|
||||
*/
|
||||
function addBias(bias, sequence) {
|
||||
if (sequence.length === 0) {
|
||||
return;
|
||||
}
|
||||
|
||||
for (const logit of sequence) {
|
||||
const key = String(logit);
|
||||
result[key] = bias;
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
getLogitBiasListResult(settings.logit_bias, tokenizer, addBias);
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
function loadTextGenSettings(data, loadedSettings) {
|
||||
textgenerationwebui_presets = convertPresets(data.textgenerationwebui_presets);
|
||||
textgenerationwebui_preset_names = data.textgenerationwebui_preset_names ?? [];
|
||||
@@ -267,6 +311,7 @@ function loadTextGenSettings(data, loadedSettings) {
|
||||
|
||||
$('#textgen_type').val(settings.type);
|
||||
showTypeSpecificControls(settings.type);
|
||||
displayLogitBias(settings.logit_bias, BIAS_KEY);
|
||||
//this is needed because showTypeSpecificControls() does not handle NOT declarations
|
||||
if (settings.type === textgen_types.APHRODITE) {
|
||||
$('[data-forAphro=False]').each(function () {
|
||||
@@ -412,6 +457,8 @@ jQuery(function () {
|
||||
saveSettingsDebounced();
|
||||
});
|
||||
}
|
||||
|
||||
$('#textgen_logit_bias_new_entry').on('click', () => createNewLogitBiasEntry(settings.logit_bias, BIAS_KEY));
|
||||
});
|
||||
|
||||
function showTypeSpecificControls(type) {
|
||||
@@ -437,6 +484,11 @@ function setSettingByName(setting, value, trigger) {
|
||||
return;
|
||||
}
|
||||
|
||||
if ('logit_bias' === setting) {
|
||||
settings.logit_bias = Array.isArray(value) ? value : [];
|
||||
return;
|
||||
}
|
||||
|
||||
const isCheckbox = $(`#${setting}_textgenerationwebui`).attr('type') == 'checkbox';
|
||||
const isText = $(`#${setting}_textgenerationwebui`).attr('type') == 'text' || $(`#${setting}_textgenerationwebui`).is('textarea');
|
||||
if (isCheckbox) {
|
||||
@@ -546,6 +598,10 @@ function getModel() {
|
||||
return settings.mancer_model;
|
||||
}
|
||||
|
||||
if (settings.type === TOGETHERAI) {
|
||||
return settings.togetherai_model;
|
||||
}
|
||||
|
||||
if (settings.type === APHRODITE) {
|
||||
return online_status;
|
||||
}
|
||||
@@ -553,6 +609,18 @@ function getModel() {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
export function getTextGenServer() {
|
||||
if (settings.type === MANCER) {
|
||||
return MANCER_SERVER;
|
||||
}
|
||||
|
||||
if (settings.type === TOGETHERAI) {
|
||||
return TOGETHERAI_SERVER;
|
||||
}
|
||||
|
||||
return api_server_textgenerationwebui;
|
||||
}
|
||||
|
||||
export function getTextGenGenerationData(finalPrompt, maxTokens, isImpersonate, isContinue, cfgValues, type) {
|
||||
const canMultiSwipe = !isContinue && !isImpersonate && type !== 'quiet';
|
||||
let APIflags = {
|
||||
@@ -590,10 +658,8 @@ export function getTextGenGenerationData(finalPrompt, maxTokens, isImpersonate,
|
||||
toIntArray(getCustomTokenBans()) :
|
||||
getCustomTokenBans(),
|
||||
'api_type': settings.type,
|
||||
'api_server': settings.type === MANCER ?
|
||||
MANCER_SERVER :
|
||||
api_server_textgenerationwebui,
|
||||
'legacy_api': settings.legacy_api && settings.type !== MANCER,
|
||||
'api_server': getTextGenServer(),
|
||||
'legacy_api': settings.legacy_api && settings.type !== MANCER && settings.type !== TOGETHERAI,
|
||||
'sampler_order': settings.type === textgen_types.KOBOLDCPP ?
|
||||
settings.sampler_order :
|
||||
undefined,
|
||||
@@ -625,6 +691,12 @@ export function getTextGenGenerationData(finalPrompt, maxTokens, isImpersonate,
|
||||
APIflags = Object.assign(APIflags, aphroditeExclusionFlags);
|
||||
}
|
||||
|
||||
if (Array.isArray(settings.logit_bias) && settings.logit_bias.length) {
|
||||
const logitBias = BIAS_CACHE.get(BIAS_KEY) || calculateLogitBias();
|
||||
BIAS_CACHE.set(BIAS_KEY, logitBias);
|
||||
APIflags.logit_bias = logitBias;
|
||||
}
|
||||
|
||||
return APIflags;
|
||||
}
|
||||
|
||||
|
@@ -6,7 +6,7 @@ import { getStringHash } from './utils.js';
|
||||
import { kai_flags } from './kai-settings.js';
|
||||
import { textgen_types, textgenerationwebui_settings as textgen_settings } from './textgen-settings.js';
|
||||
|
||||
const { OOBA, TABBY, KOBOLDCPP, MANCER } = textgen_types;
|
||||
const { OOBA, TABBY, KOBOLDCPP, MANCER, TOGETHERAI } = textgen_types;
|
||||
|
||||
export const CHARACTERS_PER_TOKEN_RATIO = 3.35;
|
||||
const TOKENIZER_WARNING_KEY = 'tokenizationWarningShown';
|
||||
@@ -540,7 +540,8 @@ function getTextgenAPITokenizationParams(str) {
|
||||
url: api_server_textgenerationwebui,
|
||||
legacy_api:
|
||||
textgen_settings.legacy_api &&
|
||||
textgen_settings.type !== MANCER,
|
||||
textgen_settings.type !== MANCER &&
|
||||
textgen_settings.type !== TOGETHERAI,
|
||||
};
|
||||
}
|
||||
|
||||
|
Reference in New Issue
Block a user