Merge remote-tracking branch 'upstream/staging' into staging

This commit is contained in:
DonMoralez 2023-12-18 22:01:38 +02:00
commit 37807acc60
25 changed files with 788 additions and 213 deletions

View File

@ -3,7 +3,6 @@
"username": "User",
"api_server": "http://127.0.0.1:5000/api",
"api_server_textgenerationwebui": "http://127.0.0.1:5000/api",
"api_use_mancer_webui": false,
"preset_settings": "RecoveredRuins",
"user_avatar": "user-default.png",
"amount_gen": 250,

55
public/img/togetherai.svg Normal file
View File

@ -0,0 +1,55 @@
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<svg
width="32"
height="32"
viewBox="0 0 32 32"
version="1.1"
id="svg4"
sodipodi:docname="togetherai.svg"
inkscape:version="1.3 (0e150ed, 2023-07-21)"
xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape"
xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd"
xmlns="http://www.w3.org/2000/svg"
xmlns:svg="http://www.w3.org/2000/svg">
<sodipodi:namedview
id="namedview4"
pagecolor="#ffffff"
bordercolor="#000000"
borderopacity="0.25"
inkscape:showpageshadow="2"
inkscape:pageopacity="0.0"
inkscape:pagecheckerboard="0"
inkscape:deskcolor="#d1d1d1"
inkscape:zoom="7.375"
inkscape:cx="15.932203"
inkscape:cy="15.932203"
inkscape:window-width="1280"
inkscape:window-height="688"
inkscape:window-x="0"
inkscape:window-y="25"
inkscape:window-maximized="1"
inkscape:current-layer="g4" />
<g
clip-path="url(#clip0_542_18748)"
id="g4">
<path
id="rect1"
d="M 5.6464844 0 C 2.5180256 -1.1842331e-15 3.5527101e-15 2.5180256 0 5.6464844 L 0 26.353516 C -1.1842331e-15 29.481971 2.5180256 32 5.6464844 32 L 26.353516 32 C 29.481971 32 32 29.481971 32 26.353516 L 32 5.6464844 C 32 2.5180256 29.481971 3.5527101e-15 26.353516 0 L 5.6464844 0 z M 9.6464844 4 A 5.6470599 5.6470599 0 0 1 15.294922 9.6464844 A 5.6470599 5.6470599 0 0 1 9.6464844 15.294922 A 5.6470599 5.6470599 0 0 1 4 9.6464844 A 5.6470599 5.6470599 0 0 1 9.6464844 4 z M 22.824219 4 A 5.6470599 5.6470599 0 0 1 28.470703 9.6464844 A 5.6470599 5.6470599 0 0 1 22.824219 15.294922 A 5.6470599 5.6470599 0 0 1 17.175781 9.6464844 A 5.6470599 5.6470599 0 0 1 22.824219 4 z M 9.6464844 17.175781 A 5.6470599 5.6470599 0 0 1 15.294922 22.824219 A 5.6470599 5.6470599 0 0 1 9.6464844 28.470703 A 5.6470599 5.6470599 0 0 1 4 22.824219 A 5.6470599 5.6470599 0 0 1 9.6464844 17.175781 z M 22.824219 17.175781 A 5.6470599 5.6470599 0 0 1 28.470703 22.824219 A 5.6470599 5.6470599 0 0 1 22.824219 28.470703 A 5.6470599 5.6470599 0 0 1 17.175781 22.824219 A 5.6470599 5.6470599 0 0 1 22.824219 17.175781 z " />
<circle
cx="9.64706"
cy="9.64706"
r="5.64706"
opacity="0.45"
id="circle9" />
</g>
<defs
id="defs4">
<clipPath
id="clip0_542_18748">
<rect
width="32"
height="32"
id="rect4" />
</clipPath>
</defs>
</svg>

After

Width:  |  Height:  |  Size: 2.4 KiB

View File

@ -69,7 +69,7 @@
<script type="module" src="scripts/group-chats.js"></script>
<script type="module" src="scripts/kai-settings.js"></script>
<script type="module" src="scripts/textgen-settings.js"></script>
<script type="module" src="scripts/mancer-settings.js"></script>
<script type="module" src="scripts/textgen-models.js"></script>
<script type="module" src="scripts/bookmarks.js"></script>
<script type="module" src="scripts/horde.js"></script>
<script type="module" src="scripts/RossAscends-mods.js"></script>
@ -974,7 +974,7 @@
Helps to ban or reinforce the usage of certain tokens.
</div>
<div class="flex-container flexFlowColumn wide100p">
<div class="novelai_logit_bias_list"></div>
<div class="logit_bias_list"></div>
</div>
</div>
<div class="range-block">
@ -1379,6 +1379,21 @@
<textarea id="banned_tokens_textgenerationwebui" class="text_pole textarea_compact" name="banned_tokens_textgenerationwebui" rows="3" placeholder="Example:&#10;some text&#10;[42, 69, 1337]"></textarea>
</div>
</div>
<div class="range-block wide100p">
<div class="range-block-title title_restorable">
<span data-i18n="Logit Bias">Logit Bias</span>
<div id="textgen_logit_bias_new_entry" class="menu_button menu_button_icon">
<i class="fa-xs fa-solid fa-plus"></i>
<small data-i18n="Add">Add</small>
</div>
</div>
<div class="toggle-description justifyLeft" data-i18n="Helps to ban or reenforce the usage of certain words">
Helps to ban or reinforce the usage of certain tokens.
</div>
<div class="flex-container flexFlowColumn wide100p">
<div class="logit_bias_list"></div>
</div>
</div>
<div data-newbie-hidden data-forAphro=False class="wide100p">
<hr class="width100p">
<h4 data-i18n="CFG" class="textAlignCenter">CFG
@ -1607,8 +1622,8 @@
<option value="kobold"><span data-i18n="KoboldAI">KoboldAI Classic</span></option>
<option value="koboldhorde"><span data-i18n="KoboldAI Horde">KoboldAI Horde</span></option>
<option value="novel"><span data-i18n="NovelAI">NovelAI</span></option>
<option value="textgenerationwebui"><span data-i18n="Text Completion">Text Completion (ooba, Mancer, Aphrodite, TabbyAPI, KoboldCpp)</span></option>
<option value="openai"><span data-i18n="Chat Completion (OpenAI, Claude, Window/OpenRouter, Scale, AI21, Google MakerSuite, MistralAI)">Chat Completion (OpenAI, Claude, Window, OpenRouter, Scale, AI21, Google MakerSuite, MistralAI)</span></option>
<option value="textgenerationwebui"><span data-i18n="Text Completion">Text Completion</span></option>
<option value="openai"><span data-i18n="Chat Completion">Chat Completion</span></option>
</select>
</div>
<div id="kobold_horde" style="position: relative;"> <!-- shows the kobold settings -->
@ -1738,8 +1753,27 @@
<option value="aphrodite">Aphrodite</option>
<option value="tabby">TabbyAPI</option>
<option value="koboldcpp">KoboldCpp</option>
<option value="togetherai">TogetherAI</option>
</select>
</div>
<div data-tg-type="togetherai" class="flex-container flexFlowColumn">
<h4 data-i18n="TogetherAI API Key">TogetherAI API Key</h4>
<div class="flex-container">
<input id="api_key_togetherai" name="api_key_togetherai" class="text_pole flex1" maxlength="500" value="" type="text" autocomplete="off">
<div title="Clear your API key" data-i18n="[title]Clear your API key" class="menu_button fa-solid fa-circle-xmark clear-api-key" data-key="api_key_togetherai"></div>
</div>
<div data-for="api_key_togetherai" class="neutral_warning">
For privacy reasons, your API key will be hidden after you reload the page.
</div>
<div>
<h4 data-i18n="TogetherAI Model">TogetherAI Model</h4>
<select id="model_togetherai_select">
<option>
-- Connect to the API --
</option>
</select>
</div>
</div>
<div data-tg-type="mancer" class="flex-container flexFlowColumn">
<div class="flex-container flexFlowColumn">
</div>
@ -4268,11 +4302,11 @@
</div>
</div>
</div>
<div id="novelai_logit_bias_template" class="template_element">
<div class="novelai_logit_bias_form">
<input class="novelai_logit_bias_text text_pole" data-i18n="[placeholder]Type here..." placeholder="type here..." />
<input class="novelai_logit_bias_value text_pole" type="number" min="-2" value="0" max="2" step="0.01" />
<i class="menu_button fa-solid fa-xmark novelai_logit_bias_remove"></i>
<div id="logit_bias_template" class="template_element">
<div class="logit_bias_form">
<input class="logit_bias_text text_pole" data-i18n="[placeholder]Type here..." placeholder="type here..." />
<input class="logit_bias_value text_pole" type="number" min="-2" value="0" max="2" step="0.01" />
<i class="menu_button fa-solid fa-xmark logit_bias_remove"></i>
</div>
</div>
<div id="completion_prompt_manager_popup" class="drawer-content" style="display:none;">

View File

@ -19,10 +19,10 @@ import {
getTextGenUrlSourceId,
textgen_types,
textgenerationwebui_banned_in_macros,
MANCER_SERVER,
getTextGenServer,
} from './scripts/textgen-settings.js';
const { MANCER } = textgen_types;
const { MANCER, TOGETHERAI } = textgen_types;
import {
world_info,
@ -189,7 +189,7 @@ import { createPersona, initPersonas, selectCurrentPersona, setPersonaDescriptio
import { getBackgrounds, initBackgrounds, loadBackgroundSettings, background_settings } from './scripts/backgrounds.js';
import { hideLoader, showLoader } from './scripts/loader.js';
import { BulkEditOverlay, CharacterContextMenu } from './scripts/BulkEditOverlay.js';
import { loadMancerModels } from './scripts/mancer-settings.js';
import { loadMancerModels, loadTogetherAIModels } from './scripts/textgen-models.js';
import { appendFileContent, hasPendingFileAttachment, populateFileAttachment } from './scripts/chats.js';
import { replaceVariableMacros } from './scripts/variables.js';
import { initPresetManager } from './scripts/preset-manager.js';
@ -931,9 +931,7 @@ async function getStatusKobold() {
async function getStatusTextgen() {
const url = '/api/backends/text-completions/status';
let endpoint = textgen_settings.type === MANCER ?
MANCER_SERVER :
api_server_textgenerationwebui;
let endpoint = getTextGenServer();
if (!endpoint) {
console.warn('No endpoint for status check');
@ -949,7 +947,8 @@ async function getStatusTextgen() {
api_type: textgen_settings.type,
legacy_api:
textgen_settings.legacy_api &&
textgen_settings.type !== MANCER,
textgen_settings.type !== MANCER &&
textgen_settings.type !== TOGETHERAI,
}),
signal: abortStatusCheck.signal,
});
@ -959,6 +958,9 @@ async function getStatusTextgen() {
if (textgen_settings.type === MANCER) {
online_status = textgen_settings.mancer_model;
loadMancerModels(data?.data);
} else if (textgen_settings.type === TOGETHERAI) {
online_status = textgen_settings.togetherai_model;
loadTogetherAIModels(data?.data);
} else {
online_status = data?.result;
}
@ -2958,7 +2960,8 @@ async function Generate(type, { automatic_trigger, force_name2, quiet_prompt, qu
if (main_api === 'textgenerationwebui' &&
textgen_settings.streaming &&
textgen_settings.legacy_api &&
textgen_settings.type !== MANCER) {
textgen_settings.type !== MANCER &&
textgen_settings.type !== TOGETHERAI) {
toastr.error('Streaming is not supported for the Legacy API. Update Ooba and use --extensions openai to enable streaming.', undefined, { timeOut: 10000, preventDuplicates: true });
unblockGeneration();
return Promise.resolve();
@ -7514,6 +7517,11 @@ async function connectAPISlash(_, text) {
button: '#api_button_textgenerationwebui',
type: textgen_types.KOBOLDCPP,
},
'togetherai': {
selected: 'textgenerationwebui',
button: '#api_button_textgenerationwebui',
type: textgen_types.TOGETHERAI,
},
'oai': {
selected: 'openai',
source: 'openai',
@ -7840,7 +7848,7 @@ jQuery(async function () {
}
registerSlashCommand('dupe', DupeChar, [], ' duplicates the currently selected character', true, true);
registerSlashCommand('api', connectAPISlash, [], '<span class="monospace">(kobold, horde, novel, ooba, tabby, mancer, aphrodite, kcpp, oai, claude, windowai, openrouter, scale, ai21, makersuite, mistralai)</span> connect to an API', true, true);
registerSlashCommand('api', connectAPISlash, [], '<span class="monospace">(kobold, horde, novel, ooba, tabby, mancer, aphrodite, kcpp, oai, claude, windowai, openrouter, scale, ai21, makersuite, mistralai, togetherai)</span> connect to an API', true, true);
registerSlashCommand('impersonate', doImpersonate, ['imp'], ' calls an impersonation response', true, true);
registerSlashCommand('delchat', doDeleteChat, [], ' deletes the current chat', true, true);
registerSlashCommand('closechat', doCloseChat, [], ' closes the current chat', true, true);
@ -8394,6 +8402,11 @@ jQuery(async function () {
await writeSecret(SECRET_KEYS.TABBY, tabbyKey);
}
const togetherKey = String($('#api_key_togetherai').val()).trim();
if (togetherKey.length) {
await writeSecret(SECRET_KEYS.TOGETHERAI, togetherKey);
}
const urlSourceId = getTextGenUrlSourceId();
if (urlSourceId && $(urlSourceId).val() !== '') {

View File

@ -382,7 +382,9 @@ function RA_autoconnect(PrevApi) {
}
break;
case 'textgenerationwebui':
if (textgen_settings.type === textgen_types.MANCER && secret_state[SECRET_KEYS.MANCER]) {
if ((textgen_settings.type === textgen_types.MANCER && secret_state[SECRET_KEYS.MANCER]) ||
(textgen_settings.type === textgen_types.TOGETHERAI && secret_state[SECRET_KEYS.TOGETHERAI])
) {
$('#api_button_textgenerationwebui').trigger('click');
}
else if (api_server_textgenerationwebui && isValidUrl(api_server_textgenerationwebui)) {

View File

@ -798,6 +798,10 @@ async function qrDeleteCallback(args, label) {
}
const idx = preset.quickReplySlots.findIndex(x => x.label == label);
if (idx === -1) {
toastr.warning('Confirm you are using proper case sensitivity!', `QR with label '${label}' not found`);
return '';
};
preset.quickReplySlots.splice(idx, 1);
preset.numberOfSlots--;
await fetch('/savequickreply', {

View File

@ -25,6 +25,12 @@
<a href="javascript:;" class="notes-link"><span class="note-link-span" title="Will generate a new random seed in SillyTavern that is then used in the ComfyUI workflow.">?</span></a>
</li>
</ul>
<div>Custom</div>
<div class="sd_comfy_workflow_editor_placeholder_actions">
<span id="sd_comfy_workflow_editor_placeholder_add" title="Add custom placeholder">+</span>
</div>
<ul class="sd_comfy_workflow_editor_placeholder_list" id="sd_comfy_workflow_editor_placeholder_list_custom">
</ul>
</div>
</div>
</div>

View File

@ -16,6 +16,7 @@ import {
user_avatar,
getCharacterAvatar,
formatCharacterAvatar,
substituteParams,
} from '../../../script.js';
import { getApiUrl, getContext, extension_settings, doExtrasFetch, modules, renderExtensionTemplate } from '../../extensions.js';
import { selected_group } from '../../group-chats.js';
@ -24,6 +25,7 @@ import { getMessageTimeStamp, humanizedDateTime } from '../../RossAscends-mods.j
import { SECRET_KEYS, secret_state } from '../../secrets.js';
import { getNovelUnlimitedImageGeneration, getNovelAnlas, loadNovelSubscriptionData } from '../../nai-settings.js';
import { getMultimodalCaption } from '../shared.js';
import { registerSlashCommand } from '../../slash-commands.js';
export { MODULE_NAME };
// Wraps a string into monospace font-face span
@ -44,6 +46,7 @@ const sources = {
vlad: 'vlad',
openai: 'openai',
comfy: 'comfy',
togetherai: 'togetherai',
};
const generationMode = {
@ -830,6 +833,16 @@ function onComfyWorkflowChange() {
extension_settings.sd.comfy_workflow = $('#sd_comfy_workflow').find(':selected').val();
saveSettingsDebounced();
}
async function changeComfyWorkflow(_, name) {
name = name.replace(/(\.json)?$/i, '.json');
if ($(`#sd_comfy_workflow > [value="${name}"]`).length > 0) {
extension_settings.sd.comfy_workflow = name;
$('#sd_comfy_workflow').val(extension_settings.sd.comfy_workflow);
saveSettingsDebounced();
} else {
toastr.error(`ComfyUI Workflow "${name}" does not exist.`);
}
}
async function validateAutoUrl() {
try {
@ -905,7 +918,7 @@ async function onModelChange() {
extension_settings.sd.model = $('#sd_model').find(':selected').val();
saveSettingsDebounced();
const cloudSources = [sources.horde, sources.novel, sources.openai];
const cloudSources = [sources.horde, sources.novel, sources.openai, sources.togetherai];
if (cloudSources.includes(extension_settings.sd.source)) {
return;
@ -1038,11 +1051,14 @@ async function loadSamplers() {
samplers = await loadVladSamplers();
break;
case sources.openai:
samplers = await loadOpenAiSamplers();
samplers = ['N/A'];
break;
case sources.comfy:
samplers = await loadComfySamplers();
break;
case sources.togetherai:
samplers = ['N/A'];
break;
}
for (const sampler of samplers) {
@ -1052,6 +1068,11 @@ async function loadSamplers() {
option.selected = sampler === extension_settings.sd.sampler;
$('#sd_sampler').append(option);
}
if (!extension_settings.sd.sampler && samplers.length > 0) {
extension_settings.sd.sampler = samplers[0];
$('#sd_sampler').val(extension_settings.sd.sampler).trigger('change');
}
}
async function loadHordeSamplers() {
@ -1108,10 +1129,6 @@ async function loadAutoSamplers() {
}
}
async function loadOpenAiSamplers() {
return ['N/A'];
}
async function loadVladSamplers() {
if (!extension_settings.sd.vlad_url) {
return [];
@ -1200,6 +1217,9 @@ async function loadModels() {
case sources.comfy:
models = await loadComfyModels();
break;
case sources.togetherai:
models = await loadTogetherAIModels();
break;
}
for (const model of models) {
@ -1209,6 +1229,30 @@ async function loadModels() {
option.selected = model.value === extension_settings.sd.model;
$('#sd_model').append(option);
}
if (!extension_settings.sd.model && models.length > 0) {
extension_settings.sd.model = models[0].value;
$('#sd_model').val(extension_settings.sd.model).trigger('change');
}
}
async function loadTogetherAIModels() {
if (!secret_state[SECRET_KEYS.TOGETHERAI]) {
console.debug('TogetherAI API key is not set.');
return [];
}
const result = await fetch('/api/sd/together/models', {
method: 'POST',
headers: getRequestHeaders(),
});
if (result.ok) {
const data = await result.json();
return data;
}
return [];
}
async function loadHordeModels() {
@ -1422,6 +1466,9 @@ async function loadSchedulers() {
case sources.openai:
schedulers = ['N/A'];
break;
case sources.togetherai:
schedulers = ['N/A'];
break;
case sources.comfy:
schedulers = await loadComfySchedulers();
break;
@ -1481,6 +1528,9 @@ async function loadVaes() {
case sources.openai:
vaes = ['N/A'];
break;
case sources.togetherai:
vaes = ['N/A'];
break;
case sources.comfy:
vaes = await loadComfyVaes();
break;
@ -1861,6 +1911,9 @@ async function sendGenerationRequest(generationType, prompt, characterName = nul
case sources.comfy:
result = await generateComfyImage(prefixedPrompt);
break;
case sources.togetherai:
result = await generateTogetherAIImage(prefixedPrompt);
break;
}
if (!result.data) {
@ -1883,6 +1936,29 @@ async function sendGenerationRequest(generationType, prompt, characterName = nul
callback ? callback(prompt, base64Image, generationType) : sendMessage(prompt, base64Image, generationType);
}
async function generateTogetherAIImage(prompt) {
const result = await fetch('/api/sd/together/generate', {
method: 'POST',
headers: getRequestHeaders(),
body: JSON.stringify({
prompt: prompt,
negative_prompt: extension_settings.sd.negative_prompt,
model: extension_settings.sd.model,
steps: extension_settings.sd.steps,
width: extension_settings.sd.width,
height: extension_settings.sd.height,
}),
});
if (result.ok) {
const data = await result.json();
return { format: 'jpg', data: data?.output?.choices?.[0]?.image_base64 };
} else {
const text = await result.text();
throw new Error(text);
}
}
/**
* Generates an "extras" image using a provided prompt and other settings.
*
@ -2180,6 +2256,9 @@ async function generateComfyImage(prompt) {
placeholders.forEach(ph => {
workflow = workflow.replace(`"%${ph}%"`, JSON.stringify(extension_settings.sd[ph]));
});
(extension_settings.sd.comfy_placeholders ?? []).forEach(ph => {
workflow = workflow.replace(`"%${ph.find}%"`, JSON.stringify(substituteParams(ph.replace)));
});
console.log(`{
"prompt": ${workflow}
}`);
@ -2216,6 +2295,50 @@ async function onComfyOpenWorkflowEditorClick() {
};
$('#sd_comfy_workflow_editor_name').text(extension_settings.sd.comfy_workflow);
$('#sd_comfy_workflow_editor_workflow').val(workflow);
const addPlaceholderDom = (placeholder) => {
const el = $(`
<li class="sd_comfy_workflow_editor_not_found" data-placeholder="${placeholder.find}">
<span class="sd_comfy_workflow_editor_custom_remove" title="Remove custom placeholder"></span>
<span class="sd_comfy_workflow_editor_custom_final">"%${placeholder.find}%"</span><br>
<input placeholder="find" title="find" type="text" class="text_pole sd_comfy_workflow_editor_custom_find" value=""><br>
<input placeholder="replace" title="replace" type="text" class="text_pole sd_comfy_workflow_editor_custom_replace">
</li>
`);
$('#sd_comfy_workflow_editor_placeholder_list_custom').append(el);
el.find('.sd_comfy_workflow_editor_custom_find').val(placeholder.find);
el.find('.sd_comfy_workflow_editor_custom_find').on('input', function() {
placeholder.find = this.value;
el.find('.sd_comfy_workflow_editor_custom_final').text(`"%${this.value}%"`);
el.attr('data-placeholder', `${this.value}`);
checkPlaceholders();
saveSettingsDebounced();
});
el.find('.sd_comfy_workflow_editor_custom_replace').val(placeholder.replace);
el.find('.sd_comfy_workflow_editor_custom_replace').on('input', function() {
placeholder.replace = this.value;
saveSettingsDebounced();
});
el.find('.sd_comfy_workflow_editor_custom_remove').on('click', () => {
el.remove();
extension_settings.sd.comfy_placeholders.splice(extension_settings.sd.comfy_placeholders.indexOf(placeholder));
saveSettingsDebounced();
});
};
$('#sd_comfy_workflow_editor_placeholder_add').on('click', () => {
if (!extension_settings.sd.comfy_placeholders) {
extension_settings.sd.comfy_placeholders = [];
}
const placeholder = {
find: '',
replace: '',
};
extension_settings.sd.comfy_placeholders.push(placeholder);
addPlaceholderDom(placeholder);
saveSettingsDebounced();
});
(extension_settings.sd.comfy_placeholders ?? []).forEach(placeholder=>{
addPlaceholderDom(placeholder);
});
checkPlaceholders();
$('#sd_comfy_workflow_editor_workflow').on('input', checkPlaceholders);
if (await popupResult) {
@ -2376,6 +2499,8 @@ function isValidState() {
return secret_state[SECRET_KEYS.OPENAI];
case sources.comfy:
return true;
case sources.togetherai:
return secret_state[SECRET_KEYS.TOGETHERAI];
}
}
@ -2481,7 +2606,8 @@ $('#sd_dropdown [id]').on('click', function () {
});
jQuery(async () => {
getContext().registerSlashCommand('imagine', generatePicture, ['sd', 'img', 'image'], helpString, true, true);
registerSlashCommand('imagine', generatePicture, ['sd', 'img', 'image'], helpString, true, true);
registerSlashCommand('imagine-comfy-workflow', changeComfyWorkflow, ['icw'], '(workflowName) - change the workflow to be used for image generation with ComfyUI, e.g. <tt>/imagine-comfy-workflow MyWorkflow</tt>')
$('#extensions_settings').append(renderExtensionTemplate('stable-diffusion', 'settings', defaultSettings));
$('#sd_source').on('change', onSourceChange);

View File

@ -35,6 +35,7 @@
<option value="novel">NovelAI Diffusion</option>
<option value="openai">OpenAI (DALL-E)</option>
<option value="comfy">ComfyUI</option>
<option value="togetherai">TogetherAI</option>
</select>
<div data-sd-source="auto">
<label for="sd_auto_url">SD Web UI URL</label>

View File

@ -82,3 +82,17 @@
.sd_comfy_workflow_editor_placeholder_list>li>.notes-link {
cursor: help;
}
.sd_comfy_workflow_editor_placeholder_list input {
font-size: inherit;
margin: 0;
}
.sd_comfy_workflow_editor_custom_remove, #sd_comfy_workflow_editor_placeholder_add {
cursor: pointer;
font-weight: bold;
width: 1em;
opacity: 0.5;
&:hover {
opacity: 1;
}
}

View File

@ -0,0 +1,126 @@
import { saveSettingsDebounced } from '../script.js';
import { getTextTokens } from './tokenizers.js';
import { uuidv4 } from './utils.js';
export const BIAS_CACHE = new Map();
/**
* Displays the logit bias list in the specified container.
* @param {object} logitBias Logit bias object
* @param {string} containerSelector Container element selector
* @returns
*/
export function displayLogitBias(logitBias, containerSelector) {
if (!Array.isArray(logitBias)) {
console.log('Logit bias set not found');
return;
}
$(containerSelector).find('.logit_bias_list').empty();
for (const entry of logitBias) {
if (entry) {
createLogitBiasListItem(entry, logitBias, containerSelector);
}
}
BIAS_CACHE.delete(containerSelector);
}
/**
* Creates a new logit bias entry
* @param {object[]} logitBias Array of logit bias objects
* @param {string} containerSelector Container element ID
*/
export function createNewLogitBiasEntry(logitBias, containerSelector) {
const entry = { id: uuidv4(), text: '', value: 0 };
logitBias.push(entry);
BIAS_CACHE.delete(containerSelector);
createLogitBiasListItem(entry, logitBias, containerSelector);
saveSettingsDebounced();
}
/**
* Creates a logit bias list item.
* @param {object} entry Logit bias entry
* @param {object[]} logitBias Array of logit bias objects
* @param {string} containerSelector Container element ID
*/
function createLogitBiasListItem(entry, logitBias, containerSelector) {
const id = entry.id;
const template = $('#logit_bias_template .logit_bias_form').clone();
template.data('id', id);
template.find('.logit_bias_text').val(entry.text).on('input', function () {
entry.text = $(this).val();
BIAS_CACHE.delete(containerSelector);
saveSettingsDebounced();
});
template.find('.logit_bias_value').val(entry.value).on('input', function () {
entry.value = Number($(this).val());
BIAS_CACHE.delete(containerSelector);
saveSettingsDebounced();
});
template.find('.logit_bias_remove').on('click', function () {
$(this).closest('.logit_bias_form').remove();
const index = logitBias.indexOf(entry);
if (index > -1) {
logitBias.splice(index, 1);
}
BIAS_CACHE.delete(containerSelector);
saveSettingsDebounced();
});
$(containerSelector).find('.logit_bias_list').prepend(template);
}
/**
* Populate logit bias list from preset.
* @param {object[]} biasPreset Bias preset
* @param {number} tokenizerType Tokenizer type (see tokenizers.js)
* @param {(bias: number, sequence: number[]) => object} getBiasObject Transformer function to create bias object
* @returns {object[]} Array of logit bias objects
*/
export function getLogitBiasListResult(biasPreset, tokenizerType, getBiasObject) {
const result = [];
for (const entry of biasPreset) {
if (entry.text?.length > 0) {
const text = entry.text.trim();
// Skip empty lines
if (text.length === 0) {
continue;
}
// Verbatim text
if (text.startsWith('{') && text.endsWith('}')) {
const tokens = getTextTokens(tokenizerType, text.slice(1, -1));
result.push(getBiasObject(entry.value, tokens));
}
// Raw token ids, JSON serialized
else if (text.startsWith('[') && text.endsWith(']')) {
try {
const tokens = JSON.parse(text);
if (Array.isArray(tokens) && tokens.every(t => Number.isInteger(t))) {
result.push(getBiasObject(entry.value, tokens));
} else {
throw new Error('Not an array of integers');
}
} catch (err) {
console.log(`Failed to parse logit bias token list: ${text}`, err);
}
}
// Text with a leading space
else {
const biasText = ` ${text}`;
const tokens = getTextTokens(tokenizerType, biasText);
result.push(getBiasObject(entry.value, tokens));
}
}
}
return result;
}

View File

@ -1,64 +0,0 @@
import { setGenerationParamsFromPreset } from '../script.js';
import { isMobile } from './RossAscends-mods.js';
import { textgenerationwebui_settings as textgen_settings } from './textgen-settings.js';
let models = [];
export async function loadMancerModels(data) {
if (!Array.isArray(data)) {
console.error('Invalid Mancer models data', data);
return;
}
models = data;
$('#mancer_model').empty();
for (const model of data) {
const option = document.createElement('option');
option.value = model.id;
option.text = model.name;
option.selected = model.id === textgen_settings.mancer_model;
$('#mancer_model').append(option);
}
}
function onMancerModelSelect() {
const modelId = String($('#mancer_model').val());
textgen_settings.mancer_model = modelId;
$('#api_button_textgenerationwebui').trigger('click');
const limits = models.find(x => x.id === modelId)?.limits;
setGenerationParamsFromPreset({ max_length: limits.context, genamt: limits.completion });
}
function getMancerModelTemplate(option) {
const model = models.find(x => x.id === option?.element?.value);
if (!option.id || !model) {
return option.text;
}
const creditsPerPrompt = (model.limits?.context - model.limits?.completion) * model.pricing?.prompt;
const creditsPerCompletion = model.limits?.completion * model.pricing?.completion;
const creditsTotal = Math.round(creditsPerPrompt + creditsPerCompletion).toFixed(0);
return $((`
<div class="flex-container flexFlowColumn">
<div><strong>${DOMPurify.sanitize(model.name)}</strong> | <span>${model.limits?.context} ctx</span> / <span>${model.limits?.completion} res</span> | <small>Credits per request (max): ${creditsTotal}</small></div>
</div>
`));
}
jQuery(function () {
$('#mancer_model').on('change', onMancerModelSelect);
if (!isMobile()) {
$('#mancer_model').select2({
placeholder: 'Select a model',
searchInputPlaceholder: 'Search models...',
searchInputCssClass: 'text_pole',
width: '100%',
templateResult: getMancerModelTemplate,
});
}
});

View File

@ -15,8 +15,8 @@ import {
getSortableDelay,
getStringHash,
onlyUnique,
uuidv4,
} from './utils.js';
import { BIAS_CACHE, createNewLogitBiasEntry, displayLogitBias, getLogitBiasListResult } from './logit-bias.js';
const default_preamble = '[ Style: chat, complex, sensory, visceral ]';
const default_order = [1, 5, 0, 2, 3, 4];
@ -59,7 +59,7 @@ const nai_tiers = {
let novel_data = null;
let badWordsCache = {};
let biasCache = undefined;
const BIAS_KEY = '#novel_api-settings';
export function setNovelData(data) {
novel_data = data;
@ -145,7 +145,7 @@ export function loadNovelSettings(settings) {
//load the rest of the Novel settings without any checks
nai_settings.model_novel = settings.model_novel;
$('#model_novel_select').val(nai_settings.model_novel);
$(`#model_novel_select option[value=${nai_settings.model_novel}]`).attr('selected', true);
$(`#model_novel_select option[value=${nai_settings.model_novel}]`).prop('selected', true);
if (settings.nai_preamble !== undefined) {
nai_settings.preamble = settings.nai_preamble;
@ -217,7 +217,7 @@ function loadNovelSettingsUi(ui_settings) {
$('#streaming_novel').prop('checked', ui_settings.streaming_novel);
sortItemsByOrder(ui_settings.order);
displayLogitBias(ui_settings.logit_bias);
displayLogitBias(ui_settings.logit_bias, BIAS_KEY);
}
const sliders = [
@ -433,8 +433,8 @@ export function getNovelGenerationData(finalPrompt, settings, maxLength, isImper
let logitBias = [];
if (tokenizerType !== tokenizers.NONE && Array.isArray(nai_settings.logit_bias) && nai_settings.logit_bias.length) {
logitBias = biasCache || calculateLogitBias();
biasCache = logitBias;
logitBias = BIAS_CACHE.get(BIAS_KEY) || calculateLogitBias();
BIAS_CACHE.set(BIAS_KEY, logitBias);
}
return {
@ -525,65 +525,14 @@ function saveSamplingOrder() {
saveSettingsDebounced();
}
function displayLogitBias(logit_bias) {
if (!Array.isArray(logit_bias)) {
console.log('Logit bias set not found');
return;
}
$('.novelai_logit_bias_list').empty();
for (const entry of logit_bias) {
if (entry) {
createLogitBiasListItem(entry);
}
}
biasCache = undefined;
}
function createNewLogitBiasEntry() {
const entry = { id: uuidv4(), text: '', value: 0 };
nai_settings.logit_bias.push(entry);
biasCache = undefined;
createLogitBiasListItem(entry);
saveSettingsDebounced();
}
function createLogitBiasListItem(entry) {
const id = entry.id;
const template = $('#novelai_logit_bias_template .novelai_logit_bias_form').clone();
template.data('id', id);
template.find('.novelai_logit_bias_text').val(entry.text).on('input', function () {
entry.text = $(this).val();
biasCache = undefined;
saveSettingsDebounced();
});
template.find('.novelai_logit_bias_value').val(entry.value).on('input', function () {
entry.value = Number($(this).val());
biasCache = undefined;
saveSettingsDebounced();
});
template.find('.novelai_logit_bias_remove').on('click', function () {
$(this).closest('.novelai_logit_bias_form').remove();
const index = nai_settings.logit_bias.indexOf(entry);
if (index > -1) {
nai_settings.logit_bias.splice(index, 1);
}
biasCache = undefined;
saveSettingsDebounced();
});
$('.novelai_logit_bias_list').prepend(template);
}
/**
* Calculates logit bias for Novel AI
* @returns {object[]} Array of logit bias objects
*/
function calculateLogitBias() {
const bias_preset = nai_settings.logit_bias;
const biasPreset = nai_settings.logit_bias;
if (!Array.isArray(bias_preset) || bias_preset.length === 0) {
if (!Array.isArray(biasPreset) || biasPreset.length === 0) {
return [];
}
@ -605,47 +554,7 @@ function calculateLogitBias() {
};
}
const result = [];
for (const entry of bias_preset) {
if (entry.text?.length > 0) {
const text = entry.text.trim();
// Skip empty lines
if (text.length === 0) {
continue;
}
// Verbatim text
if (text.startsWith('{') && text.endsWith('}')) {
const tokens = getTextTokens(tokenizerType, text.slice(1, -1));
result.push(getBiasObject(entry.value, tokens));
}
// Raw token ids, JSON serialized
else if (text.startsWith('[') && text.endsWith(']')) {
try {
const tokens = JSON.parse(text);
if (Array.isArray(tokens) && tokens.every(t => Number.isInteger(t))) {
result.push(getBiasObject(entry.value, tokens));
} else {
throw new Error('Not an array of integers');
}
} catch (err) {
console.log(`Failed to parse logit bias token list: ${text}`, err);
}
}
// Text with a leading space
else {
const biasText = ` ${text}`;
const tokens = getTextTokens(tokenizerType, biasText);
result.push(getBiasObject(entry.value, tokens));
}
}
}
const result = getLogitBiasListResult(biasPreset, tokenizerType, getBiasObject);
return result;
}
@ -778,5 +687,5 @@ jQuery(function () {
saveSamplingOrder();
});
$('#novelai_logit_bias_new_entry').on('click', createNewLogitBiasEntry);
$('#novelai_logit_bias_new_entry').on('click', () => createNewLogitBiasEntry(nai_settings.logit_bias, BIAS_KEY));
});

View File

@ -15,6 +15,7 @@ export const SECRET_KEYS = {
MAKERSUITE: 'api_key_makersuite',
SERPAPI: 'api_key_serpapi',
MISTRALAI: 'api_key_mistralai',
TOGETHERAI: 'api_key_togetherai',
};
const INPUT_MAP = {
@ -31,6 +32,7 @@ const INPUT_MAP = {
[SECRET_KEYS.APHRODITE]: '#api_key_aphrodite',
[SECRET_KEYS.TABBY]: '#api_key_tabby',
[SECRET_KEYS.MISTRALAI]: '#api_key_mistralai',
[SECRET_KEYS.TOGETHERAI]: '#api_key_togetherai',
};
async function clearSecret() {

View File

@ -0,0 +1,120 @@
import { setGenerationParamsFromPreset } from '../script.js';
import { isMobile } from './RossAscends-mods.js';
import { textgenerationwebui_settings as textgen_settings } from './textgen-settings.js';
let mancerModels = [];
let togetherModels = [];
export async function loadTogetherAIModels(data) {
if (!Array.isArray(data)) {
console.error('Invalid Together AI models data', data);
return;
}
togetherModels = data;
$('#model_togetherai_select').empty();
for (const model of data) {
// Hey buddy, I think you've got the wrong door.
if (model.display_type === 'image') {
continue;
}
const option = document.createElement('option');
option.value = model.name;
option.text = model.display_name;
option.selected = model.name === textgen_settings.togetherai_model;
$('#model_togetherai_select').append(option);
}
}
export async function loadMancerModels(data) {
if (!Array.isArray(data)) {
console.error('Invalid Mancer models data', data);
return;
}
mancerModels = data;
$('#mancer_model').empty();
for (const model of data) {
const option = document.createElement('option');
option.value = model.id;
option.text = model.name;
option.selected = model.id === textgen_settings.mancer_model;
$('#mancer_model').append(option);
}
}
function onMancerModelSelect() {
const modelId = String($('#mancer_model').val());
textgen_settings.mancer_model = modelId;
$('#api_button_textgenerationwebui').trigger('click');
const limits = mancerModels.find(x => x.id === modelId)?.limits;
setGenerationParamsFromPreset({ max_length: limits.context, genamt: limits.completion });
}
function onTogetherModelSelect() {
const modelName = String($('#model_togetherai_select').val());
textgen_settings.togetherai_model = modelName;
$('#api_button_textgenerationwebui').trigger('click');
const model = togetherModels.find(x => x.name === modelName);
setGenerationParamsFromPreset({ max_length: model.context_length });
}
function getMancerModelTemplate(option) {
const model = mancerModels.find(x => x.id === option?.element?.value);
if (!option.id || !model) {
return option.text;
}
const creditsPerPrompt = (model.limits?.context - model.limits?.completion) * model.pricing?.prompt;
const creditsPerCompletion = model.limits?.completion * model.pricing?.completion;
const creditsTotal = Math.round(creditsPerPrompt + creditsPerCompletion).toFixed(0);
return $((`
<div class="flex-container flexFlowColumn">
<div><strong>${DOMPurify.sanitize(model.name)}</strong> | <span>${model.limits?.context} ctx</span> / <span>${model.limits?.completion} res</span> | <small>Credits per request (max): ${creditsTotal}</small></div>
</div>
`));
}
function getTogetherModelTemplate(option) {
const model = togetherModels.find(x => x.name === option?.element?.value);
if (!option.id || !model) {
return option.text;
}
return $((`
<div class="flex-container flexFlowColumn">
<div><strong>${DOMPurify.sanitize(model.name)}</strong> | <span>${model.context_length || '???'} tokens</span></div>
<div><small>${DOMPurify.sanitize(model.description)}</small></div>
</div>
`));
}
jQuery(function () {
$('#mancer_model').on('change', onMancerModelSelect);
$('#model_togetherai_select').on('change', onTogetherModelSelect);
if (!isMobile()) {
$('#mancer_model').select2({
placeholder: 'Select a model',
searchInputPlaceholder: 'Search models...',
searchInputCssClass: 'text_pole',
width: '100%',
templateResult: getMancerModelTemplate,
});
$('#model_togetherai_select').select2({
placeholder: 'Select a model',
searchInputPlaceholder: 'Search models...',
searchInputCssClass: 'text_pole',
width: '100%',
templateResult: getTogetherModelTemplate,
});
}
});

View File

@ -9,6 +9,7 @@ import {
setOnlineStatus,
substituteParams,
} from '../script.js';
import { BIAS_CACHE, createNewLogitBiasEntry, displayLogitBias, getLogitBiasListResult } from './logit-bias.js';
import {
power_user,
@ -31,15 +32,18 @@ export const textgen_types = {
APHRODITE: 'aphrodite',
TABBY: 'tabby',
KOBOLDCPP: 'koboldcpp',
TOGETHERAI: 'togetherai',
};
const { MANCER, APHRODITE } = textgen_types;
const { MANCER, APHRODITE, TOGETHERAI } = textgen_types;
const BIAS_KEY = '#textgenerationwebui_api-settings';
// Maybe let it be configurable in the future?
// (7 days later) The future has come.
const MANCER_SERVER_KEY = 'mancer_server';
const MANCER_SERVER_DEFAULT = 'https://neuro.mancer.tech';
export let MANCER_SERVER = localStorage.getItem(MANCER_SERVER_KEY) ?? MANCER_SERVER_DEFAULT;
let MANCER_SERVER = localStorage.getItem(MANCER_SERVER_KEY) ?? MANCER_SERVER_DEFAULT;
let TOGETHERAI_SERVER = 'https://api.together.xyz';
const KOBOLDCPP_ORDER = [6, 0, 1, 3, 4, 2, 5];
const settings = {
@ -89,8 +93,10 @@ const settings = {
//prompt_log_probs_aphrodite: 0,
type: textgen_types.OOBA,
mancer_model: 'mytholite',
togetherai_model: 'Gryphe/MythoMax-L2-13b',
legacy_api: false,
sampler_order: KOBOLDCPP_ORDER,
logit_bias: [],
n: 1,
};
@ -144,6 +150,7 @@ const setting_names = [
//'prompt_log_probs_aphrodite'
'sampler_order',
'n',
'logit_bias',
];
async function selectPreset(name) {
@ -159,13 +166,14 @@ async function selectPreset(name) {
setSettingByName(name, value, true);
}
setGenerationParamsFromPreset(preset);
displayLogitBias(preset.logit_bias, BIAS_KEY);
saveSettingsDebounced();
}
function formatTextGenURL(value) {
try {
// Mancer doesn't need any formatting (it's hardcoded)
if (settings.type === MANCER) {
// Mancer/Together doesn't need any formatting (it's hardcoded)
if (settings.type === MANCER || settings.type === TOGETHERAI) {
return value;
}
@ -240,6 +248,42 @@ function getCustomTokenBans() {
return result.filter(onlyUnique).map(x => String(x)).join(',');
}
/**
* Calculates logit bias object from the logit bias list.
* @returns {object} Logit bias object
*/
function calculateLogitBias() {
if (!Array.isArray(settings.logit_bias) || settings.logit_bias.length === 0) {
return {};
}
const tokenizer = SENTENCEPIECE_TOKENIZERS.includes(power_user.tokenizer) ? power_user.tokenizer : tokenizers.LLAMA;
const result = {};
/**
* Adds bias to the logit bias object.
* @param {number} bias
* @param {number[]} sequence
* @returns {object} Accumulated logit bias object
*/
function addBias(bias, sequence) {
if (sequence.length === 0) {
return;
}
for (const logit of sequence) {
const key = String(logit);
result[key] = bias;
}
return result;
}
getLogitBiasListResult(settings.logit_bias, tokenizer, addBias);
return result;
}
function loadTextGenSettings(data, loadedSettings) {
textgenerationwebui_presets = convertPresets(data.textgenerationwebui_presets);
textgenerationwebui_preset_names = data.textgenerationwebui_preset_names ?? [];
@ -267,6 +311,7 @@ function loadTextGenSettings(data, loadedSettings) {
$('#textgen_type').val(settings.type);
showTypeSpecificControls(settings.type);
displayLogitBias(settings.logit_bias, BIAS_KEY);
//this is needed because showTypeSpecificControls() does not handle NOT declarations
if (settings.type === textgen_types.APHRODITE) {
$('[data-forAphro=False]').each(function () {
@ -412,6 +457,8 @@ jQuery(function () {
saveSettingsDebounced();
});
}
$('#textgen_logit_bias_new_entry').on('click', () => createNewLogitBiasEntry(settings.logit_bias, BIAS_KEY));
});
function showTypeSpecificControls(type) {
@ -437,6 +484,11 @@ function setSettingByName(setting, value, trigger) {
return;
}
if ('logit_bias' === setting) {
settings.logit_bias = Array.isArray(value) ? value : [];
return;
}
const isCheckbox = $(`#${setting}_textgenerationwebui`).attr('type') == 'checkbox';
const isText = $(`#${setting}_textgenerationwebui`).attr('type') == 'text' || $(`#${setting}_textgenerationwebui`).is('textarea');
if (isCheckbox) {
@ -546,6 +598,10 @@ function getModel() {
return settings.mancer_model;
}
if (settings.type === TOGETHERAI) {
return settings.togetherai_model;
}
if (settings.type === APHRODITE) {
return online_status;
}
@ -553,6 +609,18 @@ function getModel() {
return undefined;
}
export function getTextGenServer() {
if (settings.type === MANCER) {
return MANCER_SERVER;
}
if (settings.type === TOGETHERAI) {
return TOGETHERAI_SERVER;
}
return api_server_textgenerationwebui;
}
export function getTextGenGenerationData(finalPrompt, maxTokens, isImpersonate, isContinue, cfgValues, type) {
const canMultiSwipe = !isContinue && !isImpersonate && type !== 'quiet';
let APIflags = {
@ -590,10 +658,8 @@ export function getTextGenGenerationData(finalPrompt, maxTokens, isImpersonate,
toIntArray(getCustomTokenBans()) :
getCustomTokenBans(),
'api_type': settings.type,
'api_server': settings.type === MANCER ?
MANCER_SERVER :
api_server_textgenerationwebui,
'legacy_api': settings.legacy_api && settings.type !== MANCER,
'api_server': getTextGenServer(),
'legacy_api': settings.legacy_api && settings.type !== MANCER && settings.type !== TOGETHERAI,
'sampler_order': settings.type === textgen_types.KOBOLDCPP ?
settings.sampler_order :
undefined,
@ -625,6 +691,12 @@ export function getTextGenGenerationData(finalPrompt, maxTokens, isImpersonate,
APIflags = Object.assign(APIflags, aphroditeExclusionFlags);
}
if (Array.isArray(settings.logit_bias) && settings.logit_bias.length) {
const logitBias = BIAS_CACHE.get(BIAS_KEY) || calculateLogitBias();
BIAS_CACHE.set(BIAS_KEY, logitBias);
APIflags.logit_bias = logitBias;
}
return APIflags;
}

View File

@ -6,7 +6,7 @@ import { getStringHash } from './utils.js';
import { kai_flags } from './kai-settings.js';
import { textgen_types, textgenerationwebui_settings as textgen_settings } from './textgen-settings.js';
const { OOBA, TABBY, KOBOLDCPP, MANCER } = textgen_types;
const { OOBA, TABBY, KOBOLDCPP, MANCER, TOGETHERAI } = textgen_types;
export const CHARACTERS_PER_TOKEN_RATIO = 3.35;
const TOKENIZER_WARNING_KEY = 'tokenizationWarningShown';
@ -540,7 +540,8 @@ function getTextgenAPITokenizationParams(str) {
url: api_server_textgenerationwebui,
legacy_api:
textgen_settings.legacy_api &&
textgen_settings.type !== MANCER,
textgen_settings.type !== MANCER &&
textgen_settings.type !== TOGETHERAI,
};
}

View File

@ -3448,30 +3448,30 @@ a {
height: 100%;
}
.novelai_logit_bias_form {
.logit_bias_form {
display: flex;
flex-direction: row;
column-gap: 10px;
align-items: center;
}
.novelai_logit_bias_text,
.novelai_logit_bias_value {
.logit_bias_text,
.logit_bias_value {
flex: 1;
}
.novelai_logit_bias_list {
.logit_bias_list {
display: flex;
flex-direction: column;
gap: 10px;
}
.novelai_logit_bias_list:empty {
.logit_bias_list:empty {
width: 100%;
height: 100%;
}
.novelai_logit_bias_list:empty::before {
.logit_bias_list:empty::before {
display: flex;
align-items: center;
justify-content: center;
@ -3480,7 +3480,7 @@ a {
width: 100%;
height: 100%;
opacity: 0.8;
min-height: 2.5rem;
min-height: 2.5em;
}
.openai_logit_bias_preset_form {

View File

@ -11,6 +11,14 @@ function getMancerHeaders() {
}) : {};
}
function getTogetherAIHeaders() {
const apiKey = readSecret(SECRET_KEYS.TOGETHERAI);
return apiKey ? ({
'Authorization': `Bearer ${apiKey}`,
}) : {};
}
function getAphroditeHeaders() {
const apiKey = readSecret(SECRET_KEYS.APHRODITE);
@ -58,6 +66,9 @@ function setAdditionalHeaders(request, args, server) {
case TEXTGEN_TYPES.TABBY:
headers = getTabbyHeaders();
break;
case TEXTGEN_TYPES.TOGETHERAI:
headers = getTogetherAIHeaders();
break;
default:
headers = server ? getOverrideHeaders((new URL(server))?.host) : {};
break;

View File

@ -171,8 +171,21 @@ const TEXTGEN_TYPES = {
APHRODITE: 'aphrodite',
TABBY: 'tabby',
KOBOLDCPP: 'koboldcpp',
TOGETHERAI: 'togetherai',
};
// https://docs.together.ai/reference/completions
const TOGETHERAI_KEYS = [
'model',
'prompt',
'max_tokens',
'temperature',
'top_p',
'top_k',
'repetition_penalty',
'stream',
];
const AVATAR_WIDTH = 400;
const AVATAR_HEIGHT = 600;
@ -186,4 +199,5 @@ module.exports = {
CHAT_COMPLETION_SOURCES,
AVATAR_WIDTH,
AVATAR_HEIGHT,
TOGETHERAI_KEYS,
};

View File

@ -442,12 +442,12 @@ async function sendMistralAIRequest(request, response) {
const messages = Array.isArray(request.body.messages) ? request.body.messages : [];
const lastMsg = messages[messages.length - 1];
if (messages.length > 0 && lastMsg && (lastMsg.role === 'system' || lastMsg.role === 'assistant')) {
lastMsg.role = 'user';
if (lastMsg.role === 'assistant') {
lastMsg.content = lastMsg.name + ': ' + lastMsg.content;
} else if (lastMsg.role === 'system') {
lastMsg.content = '[INST] ' + lastMsg.content + ' [/INST]';
}
lastMsg.role = 'user';
}
//system prompts can be stacked at the start, but any futher sys prompts after the first user/assistant message will break the model

View File

@ -1,8 +1,9 @@
const express = require('express');
const fetch = require('node-fetch').default;
const _ = require('lodash');
const { jsonParser } = require('../../express-common');
const { TEXTGEN_TYPES } = require('../../constants');
const { TEXTGEN_TYPES, TOGETHERAI_KEYS } = require('../../constants');
const { forwardFetchResponse } = require('../../util');
const { setAdditionalHeaders } = require('../../additional-headers');
@ -46,6 +47,9 @@ router.post('/status', jsonParser, async function (request, response) {
case TEXTGEN_TYPES.TABBY:
url += '/v1/model/list';
break;
case TEXTGEN_TYPES.TOGETHERAI:
url += '/api/models?&info';
break;
}
}
@ -56,13 +60,18 @@ router.post('/status', jsonParser, async function (request, response) {
return response.status(400);
}
const data = await modelsReply.json();
let data = await modelsReply.json();
if (request.body.legacy_api) {
console.log('Legacy API response:', data);
return response.send({ result: data?.result });
}
// Rewrap to OAI-like response
if (request.body.api_type === TEXTGEN_TYPES.TOGETHERAI && Array.isArray(data)) {
data = { data: data.map(x => ({ id: x.name, ...x })) };
}
if (!Array.isArray(data.data)) {
console.log('Models response is not an array.');
return response.status(400);
@ -145,6 +154,7 @@ router.post('/generate', jsonParser, async function (request, response_generate)
case TEXTGEN_TYPES.OOBA:
case TEXTGEN_TYPES.TABBY:
case TEXTGEN_TYPES.KOBOLDCPP:
case TEXTGEN_TYPES.TOGETHERAI:
url += '/v1/completions';
break;
case TEXTGEN_TYPES.MANCER:
@ -163,6 +173,15 @@ router.post('/generate', jsonParser, async function (request, response_generate)
setAdditionalHeaders(request, args, baseUrl);
if (request.body.api_type === TEXTGEN_TYPES.TOGETHERAI) {
const stop = Array.isArray(request.body.stop) ? request.body.stop[0] : '';
request.body = _.pickBy(request.body, (_, key) => TOGETHERAI_KEYS.includes(key));
if (typeof stop === 'string' && stop.length > 0) {
request.body.stop = stop;
}
args.body = JSON.stringify(request.body);
}
if (request.body.stream) {
const completionsStream = await fetch(url, args);
// Pipe remote SSE stream to Express response

View File

@ -25,6 +25,7 @@ const SECRET_KEYS = {
DEEPLX_URL: 'deeplx_url',
MAKERSUITE: 'api_key_makersuite',
SERPAPI: 'api_key_serpapi',
TOGETHERAI: 'api_key_togetherai',
MISTRALAI: 'api_key_mistralai',
};

View File

@ -1,11 +1,12 @@
const express = require('express');
const fetch = require('node-fetch').default;
const sanitize = require('sanitize-filename');
const { getBasicAuthHeader, delay } = require('../util.js');
const { getBasicAuthHeader, delay, getHexString } = require('../util.js');
const fs = require('fs');
const { DIRECTORIES } = require('../constants.js');
const writeFileAtomicSync = require('write-file-atomic').sync;
const { jsonParser } = require('../express-common');
const { readSecret, SECRET_KEYS } = require('./secrets.js');
/**
* Sanitizes a string.
@ -545,6 +546,99 @@ comfy.post('/generate', jsonParser, async (request, response) => {
}
});
const together = express.Router();
together.post('/models', jsonParser, async (_, response) => {
try {
const key = readSecret(SECRET_KEYS.TOGETHERAI);
if (!key) {
console.log('TogetherAI key not found.');
return response.sendStatus(400);
}
const modelsResponse = await fetch('https://api.together.xyz/api/models', {
method: 'GET',
headers: {
'Authorization': `Bearer ${key}`,
},
});
if (!modelsResponse.ok) {
console.log('TogetherAI returned an error.');
return response.sendStatus(500);
}
const data = await modelsResponse.json();
if (!Array.isArray(data)) {
console.log('TogetherAI returned invalid data.');
return response.sendStatus(500);
}
const models = data
.filter(x => x.display_type === 'image')
.map(x => ({ value: x.name, text: x.display_name }));
return response.send(models);
} catch (error) {
console.log(error);
return response.sendStatus(500);
}
});
together.post('/generate', jsonParser, async (request, response) => {
try {
const key = readSecret(SECRET_KEYS.TOGETHERAI);
if (!key) {
console.log('TogetherAI key not found.');
return response.sendStatus(400);
}
console.log('TogetherAI request:', request.body);
const result = await fetch('https://api.together.xyz/api/inference', {
method: 'POST',
body: JSON.stringify({
request_type: 'image-model-inference',
prompt: request.body.prompt,
negative_prompt: request.body.negative_prompt,
height: request.body.height,
width: request.body.width,
model: request.body.model,
steps: request.body.steps,
n: 1,
seed: Math.floor(Math.random() * 10_000_000), // Limited to 10000 on playground, works fine with more.
sessionKey: getHexString(40), // Don't know if that's supposed to be random or not. It works either way.
}),
headers: {
'Content-Type': 'application/json',
'Authorization': `Bearer ${key}`,
},
});
if (!result.ok) {
console.log('TogetherAI returned an error.');
return response.sendStatus(500);
}
const data = await result.json();
console.log('TogetherAI response:', data);
if (data.status !== 'finished') {
console.log('TogetherAI job failed.');
return response.sendStatus(500);
}
return response.send(data);
} catch (error) {
console.log(error);
return response.sendStatus(500);
}
});
router.use('/comfy', comfy);
router.use('/together', together);
module.exports = { router };

View File

@ -105,6 +105,21 @@ function delay(ms) {
return new Promise(resolve => setTimeout(resolve, ms));
}
/**
* Generates a random hex string of the given length.
* @param {number} length String length
* @returns {string} Random hex string
* @example getHexString(8) // 'a1b2c3d4'
*/
function getHexString(length) {
const chars = '0123456789abcdef';
let result = '';
for (let i = 0; i < length; i++) {
result += chars[Math.floor(Math.random() * chars.length)];
}
return result;
}
/**
* Extracts a file with given extension from an ArrayBuffer containing a ZIP archive.
* @param {ArrayBuffer} archiveBuffer Buffer containing a ZIP archive
@ -404,4 +419,5 @@ module.exports = {
removeOldBackups,
getImages,
forwardFetchResponse,
getHexString,
};