mirror of
https://github.com/SillyTavern/SillyTavern.git
synced 2025-03-25 16:30:09 +01:00
Re-add Together as a text completion source
This commit is contained in:
parent
180061337e
commit
b0d9f14534
@ -3,7 +3,6 @@
|
||||
"username": "User",
|
||||
"api_server": "http://127.0.0.1:5000/api",
|
||||
"api_server_textgenerationwebui": "http://127.0.0.1:5000/api",
|
||||
"api_use_mancer_webui": false,
|
||||
"preset_settings": "RecoveredRuins",
|
||||
"user_avatar": "user-default.png",
|
||||
"amount_gen": 250,
|
||||
|
55
public/img/togetherai.svg
Normal file
55
public/img/togetherai.svg
Normal file
@ -0,0 +1,55 @@
|
||||
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
|
||||
<svg
|
||||
width="32"
|
||||
height="32"
|
||||
viewBox="0 0 32 32"
|
||||
version="1.1"
|
||||
id="svg4"
|
||||
sodipodi:docname="togetherai.svg"
|
||||
inkscape:version="1.3 (0e150ed, 2023-07-21)"
|
||||
xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape"
|
||||
xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd"
|
||||
xmlns="http://www.w3.org/2000/svg"
|
||||
xmlns:svg="http://www.w3.org/2000/svg">
|
||||
<sodipodi:namedview
|
||||
id="namedview4"
|
||||
pagecolor="#ffffff"
|
||||
bordercolor="#000000"
|
||||
borderopacity="0.25"
|
||||
inkscape:showpageshadow="2"
|
||||
inkscape:pageopacity="0.0"
|
||||
inkscape:pagecheckerboard="0"
|
||||
inkscape:deskcolor="#d1d1d1"
|
||||
inkscape:zoom="7.375"
|
||||
inkscape:cx="15.932203"
|
||||
inkscape:cy="15.932203"
|
||||
inkscape:window-width="1280"
|
||||
inkscape:window-height="688"
|
||||
inkscape:window-x="0"
|
||||
inkscape:window-y="25"
|
||||
inkscape:window-maximized="1"
|
||||
inkscape:current-layer="g4" />
|
||||
<g
|
||||
clip-path="url(#clip0_542_18748)"
|
||||
id="g4">
|
||||
<path
|
||||
id="rect1"
|
||||
d="M 5.6464844 0 C 2.5180256 -1.1842331e-15 3.5527101e-15 2.5180256 0 5.6464844 L 0 26.353516 C -1.1842331e-15 29.481971 2.5180256 32 5.6464844 32 L 26.353516 32 C 29.481971 32 32 29.481971 32 26.353516 L 32 5.6464844 C 32 2.5180256 29.481971 3.5527101e-15 26.353516 0 L 5.6464844 0 z M 9.6464844 4 A 5.6470599 5.6470599 0 0 1 15.294922 9.6464844 A 5.6470599 5.6470599 0 0 1 9.6464844 15.294922 A 5.6470599 5.6470599 0 0 1 4 9.6464844 A 5.6470599 5.6470599 0 0 1 9.6464844 4 z M 22.824219 4 A 5.6470599 5.6470599 0 0 1 28.470703 9.6464844 A 5.6470599 5.6470599 0 0 1 22.824219 15.294922 A 5.6470599 5.6470599 0 0 1 17.175781 9.6464844 A 5.6470599 5.6470599 0 0 1 22.824219 4 z M 9.6464844 17.175781 A 5.6470599 5.6470599 0 0 1 15.294922 22.824219 A 5.6470599 5.6470599 0 0 1 9.6464844 28.470703 A 5.6470599 5.6470599 0 0 1 4 22.824219 A 5.6470599 5.6470599 0 0 1 9.6464844 17.175781 z M 22.824219 17.175781 A 5.6470599 5.6470599 0 0 1 28.470703 22.824219 A 5.6470599 5.6470599 0 0 1 22.824219 28.470703 A 5.6470599 5.6470599 0 0 1 17.175781 22.824219 A 5.6470599 5.6470599 0 0 1 22.824219 17.175781 z " />
|
||||
<circle
|
||||
cx="9.64706"
|
||||
cy="9.64706"
|
||||
r="5.64706"
|
||||
opacity="0.45"
|
||||
id="circle9" />
|
||||
</g>
|
||||
<defs
|
||||
id="defs4">
|
||||
<clipPath
|
||||
id="clip0_542_18748">
|
||||
<rect
|
||||
width="32"
|
||||
height="32"
|
||||
id="rect4" />
|
||||
</clipPath>
|
||||
</defs>
|
||||
</svg>
|
After Width: | Height: | Size: 2.4 KiB |
@ -69,7 +69,7 @@
|
||||
<script type="module" src="scripts/group-chats.js"></script>
|
||||
<script type="module" src="scripts/kai-settings.js"></script>
|
||||
<script type="module" src="scripts/textgen-settings.js"></script>
|
||||
<script type="module" src="scripts/mancer-settings.js"></script>
|
||||
<script type="module" src="scripts/textgen-models.js"></script>
|
||||
<script type="module" src="scripts/bookmarks.js"></script>
|
||||
<script type="module" src="scripts/horde.js"></script>
|
||||
<script type="module" src="scripts/RossAscends-mods.js"></script>
|
||||
@ -509,7 +509,7 @@
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<div data-newbie-hidden class="range-block" data-source="openai,claude,openrouter,ai21,scale,makersuite,mistralai,togetherai">
|
||||
<div data-newbie-hidden class="range-block" data-source="openai,claude,openrouter,ai21,scale,makersuite,mistralai">
|
||||
<div class="range-block-title" data-i18n="Top-p">
|
||||
Top P
|
||||
</div>
|
||||
@ -749,7 +749,7 @@
|
||||
<div id="openai_proxy_password_show" title="Peek a password" class="menu_button fa-solid fa-eye-slash fa-fw"></div>
|
||||
</div>
|
||||
</div>
|
||||
<div data-newbie-hidden class="range-block" data-source="openai,openrouter,mistralai,togetherai">
|
||||
<div data-newbie-hidden class="range-block" data-source="openai,openrouter,mistralai">
|
||||
<div class="range-block-title justifyLeft" data-i18n="Seed">
|
||||
Seed
|
||||
</div>
|
||||
@ -1592,8 +1592,8 @@
|
||||
<option value="kobold"><span data-i18n="KoboldAI">KoboldAI Classic</span></option>
|
||||
<option value="koboldhorde"><span data-i18n="KoboldAI Horde">KoboldAI Horde</span></option>
|
||||
<option value="novel"><span data-i18n="NovelAI">NovelAI</span></option>
|
||||
<option value="textgenerationwebui"><span data-i18n="Text Completion">Text Completion (ooba, Mancer, Aphrodite, TabbyAPI, KoboldCpp)</span></option>
|
||||
<option value="openai"><span data-i18n="Chat Completion (OpenAI, Claude, Window/OpenRouter, Scale, AI21, Google MakerSuite, MistralAI, TogetherAI)">Chat Completion (OpenAI, Claude, Window, OpenRouter, Scale, AI21, Google MakerSuite, MistralAI)</span></option>
|
||||
<option value="textgenerationwebui"><span data-i18n="Text Completion">Text Completion</span></option>
|
||||
<option value="openai"><span data-i18n="Chat Completion">Chat Completion</span></option>
|
||||
</select>
|
||||
</div>
|
||||
<div id="kobold_horde" style="position: relative;"> <!-- shows the kobold settings -->
|
||||
@ -1723,8 +1723,27 @@
|
||||
<option value="aphrodite">Aphrodite</option>
|
||||
<option value="tabby">TabbyAPI</option>
|
||||
<option value="koboldcpp">KoboldCpp</option>
|
||||
<option value="togetherai">TogetherAI</option>
|
||||
</select>
|
||||
</div>
|
||||
<div data-tg-type="togetherai" class="flex-container flexFlowColumn">
|
||||
<h4 data-i18n="TogetherAI API Key">TogetherAI API Key</h4>
|
||||
<div class="flex-container">
|
||||
<input id="api_key_togetherai" name="api_key_togetherai" class="text_pole flex1" maxlength="500" value="" type="text" autocomplete="off">
|
||||
<div title="Clear your API key" data-i18n="[title]Clear your API key" class="menu_button fa-solid fa-circle-xmark clear-api-key" data-key="api_key_togetherai"></div>
|
||||
</div>
|
||||
<div data-for="api_key_togetherai" class="neutral_warning">
|
||||
For privacy reasons, your API key will be hidden after you reload the page.
|
||||
</div>
|
||||
<div>
|
||||
<h4 data-i18n="TogetherAI Model">TogetherAI Model</h4>
|
||||
<select id="model_togetherai_select">
|
||||
<option>
|
||||
-- Connect to the API --
|
||||
</option>
|
||||
</select>
|
||||
</div>
|
||||
</div>
|
||||
<div data-tg-type="mancer" class="flex-container flexFlowColumn">
|
||||
<div class="flex-container flexFlowColumn">
|
||||
</div>
|
||||
@ -1843,7 +1862,6 @@
|
||||
<option value="ai21">AI21</option>
|
||||
<option value="makersuite">Google MakerSuite</option>
|
||||
<option value="mistralai">MistralAI</option>
|
||||
<option value="togetherai">TogetherAI</option>
|
||||
</select>
|
||||
<form id="openai_form" data-source="openai" action="javascript:void(null);" method="post" enctype="multipart/form-data">
|
||||
<h4><span data-i18n="OpenAI API key">OpenAI API key</span></h4>
|
||||
@ -2148,23 +2166,6 @@
|
||||
</select>
|
||||
</div>
|
||||
</form>
|
||||
<form id="togetherai_form" data-source="togetherai" action="javascript:void(null);" method="post" enctype="multipart/form-data">
|
||||
<h4 data-i18n="TogetherAI API Key">TogetherAI API Key</h4>
|
||||
<div class="flex-container">
|
||||
<input id="api_key_togetherai" name="api_key_togetherai" class="text_pole flex1" maxlength="500" value="" type="text" autocomplete="off">
|
||||
<div title="Clear your API key" data-i18n="[title]Clear your API key" class="menu_button fa-solid fa-circle-xmark clear-api-key" data-key="api_key_togetherai"></div>
|
||||
</div>
|
||||
<div data-for="api_key_togetherai" class="neutral_warning">
|
||||
For privacy reasons, your API key will be hidden after you reload the page.
|
||||
</div>
|
||||
<div>
|
||||
<h4 data-i18n="TogetherAI Model">TogetherAI Model</h4>
|
||||
<select id="model_togetherai_select">
|
||||
<optgroup label="Latest">
|
||||
<option value="GPT-NeoXT-Chat-Base (20B)">togethercomputer/GPT-NeoXT-Chat-Base-20B</option> <!-- unsure about this one honestly -->
|
||||
</optgroup>
|
||||
</select>
|
||||
</div>
|
||||
<div class="flex-container flex">
|
||||
<div id="api_button_openai" class="api_button menu_button menu_button_icon" type="submit" data-i18n="Connect">Connect</div>
|
||||
<div class="api_loading menu_button" data-i18n="Cancel">Cancel</div>
|
||||
|
@ -19,10 +19,10 @@ import {
|
||||
getTextGenUrlSourceId,
|
||||
textgen_types,
|
||||
textgenerationwebui_banned_in_macros,
|
||||
MANCER_SERVER,
|
||||
getTextGenServer,
|
||||
} from './scripts/textgen-settings.js';
|
||||
|
||||
const { MANCER } = textgen_types;
|
||||
const { MANCER, TOGETHERAI } = textgen_types;
|
||||
|
||||
import {
|
||||
world_info,
|
||||
@ -189,7 +189,7 @@ import { createPersona, initPersonas, selectCurrentPersona, setPersonaDescriptio
|
||||
import { getBackgrounds, initBackgrounds, loadBackgroundSettings, background_settings } from './scripts/backgrounds.js';
|
||||
import { hideLoader, showLoader } from './scripts/loader.js';
|
||||
import { BulkEditOverlay, CharacterContextMenu } from './scripts/BulkEditOverlay.js';
|
||||
import { loadMancerModels } from './scripts/mancer-settings.js';
|
||||
import { loadMancerModels, loadTogetherAIModels } from './scripts/textgen-models.js';
|
||||
import { appendFileContent, hasPendingFileAttachment, populateFileAttachment } from './scripts/chats.js';
|
||||
import { replaceVariableMacros } from './scripts/variables.js';
|
||||
import { initPresetManager } from './scripts/preset-manager.js';
|
||||
@ -931,9 +931,7 @@ async function getStatusKobold() {
|
||||
async function getStatusTextgen() {
|
||||
const url = '/api/backends/text-completions/status';
|
||||
|
||||
let endpoint = textgen_settings.type === MANCER ?
|
||||
MANCER_SERVER :
|
||||
api_server_textgenerationwebui;
|
||||
let endpoint = getTextGenServer();
|
||||
|
||||
if (!endpoint) {
|
||||
console.warn('No endpoint for status check');
|
||||
@ -949,7 +947,8 @@ async function getStatusTextgen() {
|
||||
api_type: textgen_settings.type,
|
||||
legacy_api:
|
||||
textgen_settings.legacy_api &&
|
||||
textgen_settings.type !== MANCER,
|
||||
textgen_settings.type !== MANCER &&
|
||||
textgen_settings.type !== TOGETHERAI,
|
||||
}),
|
||||
signal: abortStatusCheck.signal,
|
||||
});
|
||||
@ -959,6 +958,9 @@ async function getStatusTextgen() {
|
||||
if (textgen_settings.type === MANCER) {
|
||||
online_status = textgen_settings.mancer_model;
|
||||
loadMancerModels(data?.data);
|
||||
} else if (textgen_settings.type === TOGETHERAI) {
|
||||
online_status = textgen_settings.togetherai_model;
|
||||
loadTogetherAIModels(data?.data);
|
||||
} else {
|
||||
online_status = data?.result;
|
||||
}
|
||||
@ -2958,7 +2960,8 @@ async function Generate(type, { automatic_trigger, force_name2, quiet_prompt, qu
|
||||
if (main_api === 'textgenerationwebui' &&
|
||||
textgen_settings.streaming &&
|
||||
textgen_settings.legacy_api &&
|
||||
textgen_settings.type !== MANCER) {
|
||||
textgen_settings.type !== MANCER &&
|
||||
textgen_settings.type !== TOGETHERAI) {
|
||||
toastr.error('Streaming is not supported for the Legacy API. Update Ooba and use --extensions openai to enable streaming.', undefined, { timeOut: 10000, preventDuplicates: true });
|
||||
unblockGeneration();
|
||||
return Promise.resolve();
|
||||
@ -5399,7 +5402,6 @@ function changeMainAPI() {
|
||||
case chat_completion_sources.AI21:
|
||||
case chat_completion_sources.MAKERSUITE:
|
||||
case chat_completion_sources.MISTRALAI:
|
||||
case chat_completion_sources.TOGETHERAI:
|
||||
default:
|
||||
setupChatCompletionPromptManager(oai_settings);
|
||||
break;
|
||||
@ -7515,6 +7517,11 @@ async function connectAPISlash(_, text) {
|
||||
button: '#api_button_textgenerationwebui',
|
||||
type: textgen_types.KOBOLDCPP,
|
||||
},
|
||||
'togetherai': {
|
||||
selected: 'textgenerationwebui',
|
||||
button: '#api_button_textgenerationwebui',
|
||||
type: textgen_types.TOGETHERAI,
|
||||
},
|
||||
'oai': {
|
||||
selected: 'openai',
|
||||
source: 'openai',
|
||||
@ -7555,11 +7562,6 @@ async function connectAPISlash(_, text) {
|
||||
source: 'mistralai',
|
||||
button: '#api_button_openai',
|
||||
},
|
||||
'togetherai': {
|
||||
selected: 'openai',
|
||||
source: 'togetherai',
|
||||
button: '#api_button_openai',
|
||||
},
|
||||
};
|
||||
|
||||
const apiConfig = apiMap[text.toLowerCase()];
|
||||
@ -8400,6 +8402,11 @@ jQuery(async function () {
|
||||
await writeSecret(SECRET_KEYS.TABBY, tabbyKey);
|
||||
}
|
||||
|
||||
const togetherKey = String($('#api_key_togetherai').val()).trim();
|
||||
if (togetherKey.length) {
|
||||
await writeSecret(SECRET_KEYS.TOGETHERAI, togetherKey);
|
||||
}
|
||||
|
||||
const urlSourceId = getTextGenUrlSourceId();
|
||||
|
||||
if (urlSourceId && $(urlSourceId).val() !== '') {
|
||||
|
@ -382,7 +382,9 @@ function RA_autoconnect(PrevApi) {
|
||||
}
|
||||
break;
|
||||
case 'textgenerationwebui':
|
||||
if (textgen_settings.type === textgen_types.MANCER && secret_state[SECRET_KEYS.MANCER]) {
|
||||
if ((textgen_settings.type === textgen_types.MANCER && secret_state[SECRET_KEYS.MANCER]) ||
|
||||
(textgen_settings.type === textgen_types.TOGETHERAI && secret_state[SECRET_KEYS.TOGETHERAI])
|
||||
) {
|
||||
$('#api_button_textgenerationwebui').trigger('click');
|
||||
}
|
||||
else if (api_server_textgenerationwebui && isValidUrl(api_server_textgenerationwebui)) {
|
||||
@ -398,7 +400,6 @@ function RA_autoconnect(PrevApi) {
|
||||
|| (secret_state[SECRET_KEYS.AI21] && oai_settings.chat_completion_source == chat_completion_sources.AI21)
|
||||
|| (secret_state[SECRET_KEYS.MAKERSUITE] && oai_settings.chat_completion_source == chat_completion_sources.MAKERSUITE)
|
||||
|| (secret_state[SECRET_KEYS.MISTRALAI] && oai_settings.chat_completion_source == chat_completion_sources.MISTRALAI)
|
||||
|| (secret_state[SECRET_KEYS.TOGETHERAI] && oai_settings.chat_completion_source == chat_completion_sources.TOGETHERAI)
|
||||
) {
|
||||
$('#api_button_openai').trigger('click');
|
||||
}
|
||||
|
@ -395,8 +395,7 @@ async function getSavedHashes(collectionId) {
|
||||
async function insertVectorItems(collectionId, items) {
|
||||
if (settings.source === 'openai' && !secret_state[SECRET_KEYS.OPENAI] ||
|
||||
settings.source === 'palm' && !secret_state[SECRET_KEYS.MAKERSUITE] ||
|
||||
settings.source === 'mistral' && !secret_state[SECRET_KEYS.MISTRALAI] ||
|
||||
settings.source === 'togetherai' && !secret_state[SECRET_KEYS.TOGETHERAI]) {
|
||||
settings.source === 'mistral' && !secret_state[SECRET_KEYS.MISTRALAI]) {
|
||||
throw new Error('Vectors: API key missing', { cause: 'api_key_missing' });
|
||||
}
|
||||
|
||||
|
@ -14,7 +14,6 @@
|
||||
<option value="openai">OpenAI</option>
|
||||
<option value="palm">Google MakerSuite (PaLM)</option>
|
||||
<option value="mistral">MistralAI</option>
|
||||
<option value="togetherai">Together AI</option>
|
||||
</select>
|
||||
</div>
|
||||
|
||||
|
@ -1,64 +0,0 @@
|
||||
import { setGenerationParamsFromPreset } from '../script.js';
|
||||
import { isMobile } from './RossAscends-mods.js';
|
||||
import { textgenerationwebui_settings as textgen_settings } from './textgen-settings.js';
|
||||
|
||||
let models = [];
|
||||
|
||||
export async function loadMancerModels(data) {
|
||||
if (!Array.isArray(data)) {
|
||||
console.error('Invalid Mancer models data', data);
|
||||
return;
|
||||
}
|
||||
|
||||
models = data;
|
||||
|
||||
$('#mancer_model').empty();
|
||||
for (const model of data) {
|
||||
const option = document.createElement('option');
|
||||
option.value = model.id;
|
||||
option.text = model.name;
|
||||
option.selected = model.id === textgen_settings.mancer_model;
|
||||
$('#mancer_model').append(option);
|
||||
}
|
||||
}
|
||||
|
||||
function onMancerModelSelect() {
|
||||
const modelId = String($('#mancer_model').val());
|
||||
textgen_settings.mancer_model = modelId;
|
||||
$('#api_button_textgenerationwebui').trigger('click');
|
||||
|
||||
const limits = models.find(x => x.id === modelId)?.limits;
|
||||
setGenerationParamsFromPreset({ max_length: limits.context, genamt: limits.completion });
|
||||
}
|
||||
|
||||
function getMancerModelTemplate(option) {
|
||||
const model = models.find(x => x.id === option?.element?.value);
|
||||
|
||||
if (!option.id || !model) {
|
||||
return option.text;
|
||||
}
|
||||
|
||||
const creditsPerPrompt = (model.limits?.context - model.limits?.completion) * model.pricing?.prompt;
|
||||
const creditsPerCompletion = model.limits?.completion * model.pricing?.completion;
|
||||
const creditsTotal = Math.round(creditsPerPrompt + creditsPerCompletion).toFixed(0);
|
||||
|
||||
return $((`
|
||||
<div class="flex-container flexFlowColumn">
|
||||
<div><strong>${DOMPurify.sanitize(model.name)}</strong> | <span>${model.limits?.context} ctx</span> / <span>${model.limits?.completion} res</span> | <small>Credits per request (max): ${creditsTotal}</small></div>
|
||||
</div>
|
||||
`));
|
||||
}
|
||||
|
||||
jQuery(function () {
|
||||
$('#mancer_model').on('change', onMancerModelSelect);
|
||||
|
||||
if (!isMobile()) {
|
||||
$('#mancer_model').select2({
|
||||
placeholder: 'Select a model',
|
||||
searchInputPlaceholder: 'Search models...',
|
||||
searchInputCssClass: 'text_pole',
|
||||
width: '100%',
|
||||
templateResult: getMancerModelTemplate,
|
||||
});
|
||||
}
|
||||
});
|
@ -165,7 +165,6 @@ export const chat_completion_sources = {
|
||||
AI21: 'ai21',
|
||||
MAKERSUITE: 'makersuite',
|
||||
MISTRALAI: 'mistralai',
|
||||
TOGETHERAI: 'togetherai',
|
||||
};
|
||||
|
||||
const prefixMap = selected_group ? {
|
||||
@ -210,7 +209,6 @@ const default_settings = {
|
||||
claude_model: 'claude-instant-v1',
|
||||
google_model: 'gemini-pro',
|
||||
ai21_model: 'j2-ultra',
|
||||
togetherai_model: 'togethercomputer/GPT-NeoXT-Chat-Base-20B', // unsure here
|
||||
mistralai_model: 'mistral-medium',
|
||||
windowai_model: '',
|
||||
openrouter_model: openrouter_website_model,
|
||||
@ -267,7 +265,6 @@ const oai_settings = {
|
||||
claude_model: 'claude-instant-v1',
|
||||
google_model: 'gemini-pro',
|
||||
ai21_model: 'j2-ultra',
|
||||
togetherai_model: 'togethercomputer/GPT-NeoXT-Chat-Base-20B', // unsure here
|
||||
mistralai_model: 'mistral-medium',
|
||||
windowai_model: '',
|
||||
openrouter_model: openrouter_website_model,
|
||||
@ -1267,8 +1264,6 @@ function getChatCompletionModel() {
|
||||
return oai_settings.openrouter_model !== openrouter_website_model ? oai_settings.openrouter_model : null;
|
||||
case chat_completion_sources.AI21:
|
||||
return oai_settings.ai21_model;
|
||||
case chat_completion_sources.TOGETHERAI:
|
||||
return oai_settings.togetherai_model;
|
||||
case chat_completion_sources.MISTRALAI:
|
||||
return oai_settings.mistralai_model;
|
||||
default:
|
||||
@ -1458,7 +1453,6 @@ async function sendOpenAIRequest(type, messages, signal) {
|
||||
const isAI21 = oai_settings.chat_completion_source == chat_completion_sources.AI21;
|
||||
const isGoogle = oai_settings.chat_completion_source == chat_completion_sources.MAKERSUITE;
|
||||
const isOAI = oai_settings.chat_completion_source == chat_completion_sources.OPENAI;
|
||||
const isTogetherAI = oai_settings.chat_completion_source == chat_completion_sources.TOGETHERAI;
|
||||
const isMistral = oai_settings.chat_completion_source == chat_completion_sources.MISTRALAI;
|
||||
const isTextCompletion = (isOAI && textCompletionModels.includes(oai_settings.openai_model)) || (isOpenRouter && oai_settings.openrouter_force_instruct && power_user.instruct.enabled);
|
||||
const isQuiet = type === 'quiet';
|
||||
@ -1571,7 +1565,7 @@ async function sendOpenAIRequest(type, messages, signal) {
|
||||
generate_data['safe_mode'] = false; // already defaults to false, but just incase they change that in the future.
|
||||
}
|
||||
|
||||
if ((isOAI || isOpenRouter || isMistral || isTogetherAI) && oai_settings.seed >= 0) {
|
||||
if ((isOAI || isOpenRouter || isMistral) && oai_settings.seed >= 0) {
|
||||
generate_data['seed'] = oai_settings.seed;
|
||||
}
|
||||
|
||||
@ -2325,7 +2319,6 @@ function loadOpenAISettings(data, settings) {
|
||||
oai_settings.assistant_prefill = settings.assistant_prefill ?? default_settings.assistant_prefill;
|
||||
oai_settings.image_inlining = settings.image_inlining ?? default_settings.image_inlining;
|
||||
oai_settings.bypass_status_check = settings.bypass_status_check ?? default_settings.bypass_status_check;
|
||||
oai_settings.togetherai_model = settings.togetherai_model ?? default_settings.togetherai_model;
|
||||
|
||||
oai_settings.prompts = settings.prompts ?? default_settings.prompts;
|
||||
oai_settings.prompt_order = settings.prompt_order ?? default_settings.prompt_order;
|
||||
@ -2360,8 +2353,6 @@ function loadOpenAISettings(data, settings) {
|
||||
$(`#model_google_select option[value="${oai_settings.google_model}"`).attr('selected', true);
|
||||
$('#model_ai21_select').val(oai_settings.ai21_model);
|
||||
$(`#model_ai21_select option[value="${oai_settings.ai21_model}"`).attr('selected', true);
|
||||
$('#model_togetherai_select').val(oai_settings.togetherai_model);
|
||||
$(`#model_togetherai_select option[value="${oai_settings.togetherai_model}"`).attr('selected', true);
|
||||
$('#model_mistralai_select').val(oai_settings.mistralai_model);
|
||||
$(`#model_mistralai_select option[value="${oai_settings.mistralai_model}"`).attr('selected', true);
|
||||
$('#openai_max_context').val(oai_settings.openai_max_context);
|
||||
@ -2541,7 +2532,6 @@ async function saveOpenAIPreset(name, settings, triggerUi = true) {
|
||||
openrouter_group_models: settings.openrouter_group_models,
|
||||
openrouter_sort_models: settings.openrouter_sort_models,
|
||||
ai21_model: settings.ai21_model,
|
||||
togetherai_model: settings.togetherai_model,
|
||||
mistralai_model: settings.mistralai_model,
|
||||
google_model: settings.google_model,
|
||||
temperature: settings.temp_openai,
|
||||
@ -2914,7 +2904,6 @@ function onSettingsPresetChange() {
|
||||
openrouter_group_models: ['#openrouter_group_models', 'openrouter_group_models', false],
|
||||
openrouter_sort_models: ['#openrouter_sort_models', 'openrouter_sort_models', false],
|
||||
ai21_model: ['#model_ai21_select', 'ai21_model', false],
|
||||
togetherai_model: ['#model_togetherai_select', 'togetherai_model', false],
|
||||
mistralai_model: ['#model_mistralai_select', 'mistralai_model', false],
|
||||
google_model: ['#model_google_select', 'google_model', false],
|
||||
openai_max_context: ['#openai_max_context', 'openai_max_context', false],
|
||||
@ -3094,12 +3083,7 @@ async function onModelChange() {
|
||||
console.log('AI21 model changed to', value);
|
||||
oai_settings.ai21_model = value;
|
||||
}
|
||||
|
||||
if ($(this).is('#model_togetherai_select')) {
|
||||
console.log('TogetherAI model changed to', value);
|
||||
oai_settings.togetherai_model = value;
|
||||
}
|
||||
|
||||
|
||||
if ($(this).is('#model_google_select')) {
|
||||
console.log('Google model changed to', value);
|
||||
oai_settings.google_model = value;
|
||||
@ -3204,14 +3188,6 @@ async function onModelChange() {
|
||||
}
|
||||
}
|
||||
|
||||
// not sure if this is enough
|
||||
if (oai_settings.chat_completion_source == chat_completion_sources.TOGETHERAI) {
|
||||
$('#openai_max_context').attr('max', max_2k); // assuming togethercomputer/GPT-NeoXT-Chat-Base-20B
|
||||
oai_settings.openai_max_context = Math.min(oai_settings.openai_max_context, Number($('#openai_max_context').attr('max')));
|
||||
$('#openai_max_context').val(oai_settings.openai_max_context).trigger('input');
|
||||
|
||||
}
|
||||
|
||||
if (oai_settings.chat_completion_source == chat_completion_sources.OPENAI) {
|
||||
$('#openai_max_context').attr('max', getMaxContextOpenAI(value));
|
||||
oai_settings.openai_max_context = Math.min(oai_settings.openai_max_context, Number($('#openai_max_context').attr('max')));
|
||||
@ -3394,18 +3370,6 @@ async function onConnectButtonClick(e) {
|
||||
}
|
||||
}
|
||||
|
||||
if (oai_settings.chat_completion_source == chat_completion_sources.TOGETHERAI) {
|
||||
const api_key_togetherai = String($('#api_key_togetherai').val()).trim();
|
||||
|
||||
if (api_key_togetherai.length) {
|
||||
await writeSecret(SECRET_KEYS.TOGETHERAI, api_key_togetherai);
|
||||
}
|
||||
|
||||
if (!secret_state[SECRET_KEYS.TOGETHERAI]) {
|
||||
console.log('No secret key saved for TogetherAI');
|
||||
}
|
||||
}
|
||||
|
||||
if (oai_settings.chat_completion_source == chat_completion_sources.MISTRALAI) {
|
||||
const api_key_mistralai = String($('#api_key_mistralai').val()).trim();
|
||||
|
||||
@ -3451,9 +3415,6 @@ function toggleChatCompletionForms() {
|
||||
else if (oai_settings.chat_completion_source == chat_completion_sources.AI21) {
|
||||
$('#model_ai21_select').trigger('change');
|
||||
}
|
||||
else if (oai_settings.chat_completion_source == chat_completion_sources.TOGETHERAI) {
|
||||
$('#model_togetherai_select').trigger('change');
|
||||
}
|
||||
else if (oai_settings.chat_completion_source == chat_completion_sources.MISTRALAI) {
|
||||
$('#model_mistralai_select').trigger('change');
|
||||
}
|
||||
@ -3834,7 +3795,6 @@ $(document).ready(async function () {
|
||||
$('#openrouter_group_models').on('change', onOpenrouterModelSortChange);
|
||||
$('#openrouter_sort_models').on('change', onOpenrouterModelSortChange);
|
||||
$('#model_ai21_select').on('change', onModelChange);
|
||||
$('#model_togetherai_select').on('change', onModelChange);
|
||||
$('#model_mistralai_select').on('change', onModelChange);
|
||||
$('#settings_preset_openai').on('change', onSettingsPresetChange);
|
||||
$('#new_oai_preset').on('click', onNewPresetClick);
|
||||
|
120
public/scripts/textgen-models.js
Normal file
120
public/scripts/textgen-models.js
Normal file
@ -0,0 +1,120 @@
|
||||
import { setGenerationParamsFromPreset } from '../script.js';
|
||||
import { isMobile } from './RossAscends-mods.js';
|
||||
import { textgenerationwebui_settings as textgen_settings } from './textgen-settings.js';
|
||||
|
||||
let mancerModels = [];
|
||||
let togetherModels = [];
|
||||
|
||||
export async function loadTogetherAIModels(data) {
|
||||
if (!Array.isArray(data)) {
|
||||
console.error('Invalid Together AI models data', data);
|
||||
return;
|
||||
}
|
||||
|
||||
togetherModels = data;
|
||||
|
||||
$('#model_togetherai_select').empty();
|
||||
for (const model of data) {
|
||||
// Hey buddy, I think you've got the wrong door.
|
||||
if (model.display_type === 'image') {
|
||||
continue;
|
||||
}
|
||||
|
||||
const option = document.createElement('option');
|
||||
option.value = model.name;
|
||||
option.text = model.display_name;
|
||||
option.selected = model.name === textgen_settings.togetherai_model;
|
||||
$('#model_togetherai_select').append(option);
|
||||
}
|
||||
}
|
||||
|
||||
export async function loadMancerModels(data) {
|
||||
if (!Array.isArray(data)) {
|
||||
console.error('Invalid Mancer models data', data);
|
||||
return;
|
||||
}
|
||||
|
||||
mancerModels = data;
|
||||
|
||||
$('#mancer_model').empty();
|
||||
for (const model of data) {
|
||||
const option = document.createElement('option');
|
||||
option.value = model.id;
|
||||
option.text = model.name;
|
||||
option.selected = model.id === textgen_settings.mancer_model;
|
||||
$('#mancer_model').append(option);
|
||||
}
|
||||
}
|
||||
|
||||
function onMancerModelSelect() {
|
||||
const modelId = String($('#mancer_model').val());
|
||||
textgen_settings.mancer_model = modelId;
|
||||
$('#api_button_textgenerationwebui').trigger('click');
|
||||
|
||||
const limits = mancerModels.find(x => x.id === modelId)?.limits;
|
||||
setGenerationParamsFromPreset({ max_length: limits.context, genamt: limits.completion });
|
||||
}
|
||||
|
||||
|
||||
function onTogetherModelSelect() {
|
||||
const modelName = String($('#model_togetherai_select').val());
|
||||
textgen_settings.togetherai_model = modelName;
|
||||
$('#api_button_textgenerationwebui').trigger('click');
|
||||
const model = togetherModels.find(x => x.name === modelName);
|
||||
setGenerationParamsFromPreset({ max_length: model.context_length });
|
||||
}
|
||||
|
||||
function getMancerModelTemplate(option) {
|
||||
const model = mancerModels.find(x => x.id === option?.element?.value);
|
||||
|
||||
if (!option.id || !model) {
|
||||
return option.text;
|
||||
}
|
||||
|
||||
const creditsPerPrompt = (model.limits?.context - model.limits?.completion) * model.pricing?.prompt;
|
||||
const creditsPerCompletion = model.limits?.completion * model.pricing?.completion;
|
||||
const creditsTotal = Math.round(creditsPerPrompt + creditsPerCompletion).toFixed(0);
|
||||
|
||||
return $((`
|
||||
<div class="flex-container flexFlowColumn">
|
||||
<div><strong>${DOMPurify.sanitize(model.name)}</strong> | <span>${model.limits?.context} ctx</span> / <span>${model.limits?.completion} res</span> | <small>Credits per request (max): ${creditsTotal}</small></div>
|
||||
</div>
|
||||
`));
|
||||
}
|
||||
|
||||
function getTogetherModelTemplate(option) {
|
||||
const model = togetherModels.find(x => x.name === option?.element?.value);
|
||||
|
||||
if (!option.id || !model) {
|
||||
return option.text;
|
||||
}
|
||||
|
||||
return $((`
|
||||
<div class="flex-container flexFlowColumn">
|
||||
<div><strong>${DOMPurify.sanitize(model.name)}</strong> | <span>${model.context_length || '???'} tokens</span></div>
|
||||
<div><small>${DOMPurify.sanitize(model.description)}</small></div>
|
||||
</div>
|
||||
`));
|
||||
}
|
||||
|
||||
jQuery(function () {
|
||||
$('#mancer_model').on('change', onMancerModelSelect);
|
||||
$('#model_togetherai_select').on('change', onTogetherModelSelect);
|
||||
|
||||
if (!isMobile()) {
|
||||
$('#mancer_model').select2({
|
||||
placeholder: 'Select a model',
|
||||
searchInputPlaceholder: 'Search models...',
|
||||
searchInputCssClass: 'text_pole',
|
||||
width: '100%',
|
||||
templateResult: getMancerModelTemplate,
|
||||
});
|
||||
$('#model_togetherai_select').select2({
|
||||
placeholder: 'Select a model',
|
||||
searchInputPlaceholder: 'Search models...',
|
||||
searchInputCssClass: 'text_pole',
|
||||
width: '100%',
|
||||
templateResult: getTogetherModelTemplate,
|
||||
});
|
||||
}
|
||||
});
|
@ -31,15 +31,17 @@ export const textgen_types = {
|
||||
APHRODITE: 'aphrodite',
|
||||
TABBY: 'tabby',
|
||||
KOBOLDCPP: 'koboldcpp',
|
||||
TOGETHERAI: 'togetherai',
|
||||
};
|
||||
|
||||
const { MANCER, APHRODITE } = textgen_types;
|
||||
const { MANCER, APHRODITE, TOGETHERAI } = textgen_types;
|
||||
|
||||
// Maybe let it be configurable in the future?
|
||||
// (7 days later) The future has come.
|
||||
const MANCER_SERVER_KEY = 'mancer_server';
|
||||
const MANCER_SERVER_DEFAULT = 'https://neuro.mancer.tech';
|
||||
export let MANCER_SERVER = localStorage.getItem(MANCER_SERVER_KEY) ?? MANCER_SERVER_DEFAULT;
|
||||
let MANCER_SERVER = localStorage.getItem(MANCER_SERVER_KEY) ?? MANCER_SERVER_DEFAULT;
|
||||
let TOGETHERAI_SERVER = 'https://api.together.xyz';
|
||||
|
||||
const KOBOLDCPP_ORDER = [6, 0, 1, 3, 4, 2, 5];
|
||||
const settings = {
|
||||
@ -89,6 +91,7 @@ const settings = {
|
||||
//prompt_log_probs_aphrodite: 0,
|
||||
type: textgen_types.OOBA,
|
||||
mancer_model: 'mytholite',
|
||||
togetherai_model: 'Gryphe/MythoMax-L2-13b',
|
||||
legacy_api: false,
|
||||
sampler_order: KOBOLDCPP_ORDER,
|
||||
n: 1,
|
||||
@ -164,8 +167,8 @@ async function selectPreset(name) {
|
||||
|
||||
function formatTextGenURL(value) {
|
||||
try {
|
||||
// Mancer doesn't need any formatting (it's hardcoded)
|
||||
if (settings.type === MANCER) {
|
||||
// Mancer/Together doesn't need any formatting (it's hardcoded)
|
||||
if (settings.type === MANCER || settings.type === TOGETHERAI) {
|
||||
return value;
|
||||
}
|
||||
|
||||
@ -546,6 +549,10 @@ function getModel() {
|
||||
return settings.mancer_model;
|
||||
}
|
||||
|
||||
if (settings.type === TOGETHERAI) {
|
||||
return settings.togetherai_model;
|
||||
}
|
||||
|
||||
if (settings.type === APHRODITE) {
|
||||
return online_status;
|
||||
}
|
||||
@ -553,6 +560,18 @@ function getModel() {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
export function getTextGenServer() {
|
||||
if (settings.type === MANCER) {
|
||||
return MANCER_SERVER;
|
||||
}
|
||||
|
||||
if (settings.type === TOGETHERAI) {
|
||||
return TOGETHERAI_SERVER;
|
||||
}
|
||||
|
||||
return api_server_textgenerationwebui;
|
||||
}
|
||||
|
||||
export function getTextGenGenerationData(finalPrompt, maxTokens, isImpersonate, isContinue, cfgValues, type) {
|
||||
const canMultiSwipe = !isContinue && !isImpersonate && type !== 'quiet';
|
||||
let APIflags = {
|
||||
@ -590,10 +609,8 @@ export function getTextGenGenerationData(finalPrompt, maxTokens, isImpersonate,
|
||||
toIntArray(getCustomTokenBans()) :
|
||||
getCustomTokenBans(),
|
||||
'api_type': settings.type,
|
||||
'api_server': settings.type === MANCER ?
|
||||
MANCER_SERVER :
|
||||
api_server_textgenerationwebui,
|
||||
'legacy_api': settings.legacy_api && settings.type !== MANCER,
|
||||
'api_server': getTextGenServer(),
|
||||
'legacy_api': settings.legacy_api && settings.type !== MANCER && settings.type !== TOGETHERAI,
|
||||
'sampler_order': settings.type === textgen_types.KOBOLDCPP ?
|
||||
settings.sampler_order :
|
||||
undefined,
|
||||
|
@ -6,7 +6,7 @@ import { getStringHash } from './utils.js';
|
||||
import { kai_flags } from './kai-settings.js';
|
||||
import { textgen_types, textgenerationwebui_settings as textgen_settings } from './textgen-settings.js';
|
||||
|
||||
const { OOBA, TABBY, KOBOLDCPP, MANCER } = textgen_types;
|
||||
const { OOBA, TABBY, KOBOLDCPP, MANCER, TOGETHERAI } = textgen_types;
|
||||
|
||||
export const CHARACTERS_PER_TOKEN_RATIO = 3.35;
|
||||
const TOKENIZER_WARNING_KEY = 'tokenizationWarningShown';
|
||||
@ -540,7 +540,8 @@ function getTextgenAPITokenizationParams(str) {
|
||||
url: api_server_textgenerationwebui,
|
||||
legacy_api:
|
||||
textgen_settings.legacy_api &&
|
||||
textgen_settings.type !== MANCER,
|
||||
textgen_settings.type !== MANCER &&
|
||||
textgen_settings.type !== TOGETHERAI,
|
||||
};
|
||||
}
|
||||
|
||||
|
@ -11,6 +11,14 @@ function getMancerHeaders() {
|
||||
}) : {};
|
||||
}
|
||||
|
||||
function getTogetherAIHeaders() {
|
||||
const apiKey = readSecret(SECRET_KEYS.TOGETHERAI);
|
||||
|
||||
return apiKey ? ({
|
||||
'Authorization': `Bearer ${apiKey}`,
|
||||
}) : {};
|
||||
}
|
||||
|
||||
function getAphroditeHeaders() {
|
||||
const apiKey = readSecret(SECRET_KEYS.APHRODITE);
|
||||
|
||||
@ -58,6 +66,9 @@ function setAdditionalHeaders(request, args, server) {
|
||||
case TEXTGEN_TYPES.TABBY:
|
||||
headers = getTabbyHeaders();
|
||||
break;
|
||||
case TEXTGEN_TYPES.TOGETHERAI:
|
||||
headers = getTogetherAIHeaders();
|
||||
break;
|
||||
default:
|
||||
headers = server ? getOverrideHeaders((new URL(server))?.host) : {};
|
||||
break;
|
||||
|
@ -158,7 +158,6 @@ const CHAT_COMPLETION_SOURCES = {
|
||||
SCALE: 'scale',
|
||||
OPENROUTER: 'openrouter',
|
||||
AI21: 'ai21',
|
||||
TOGETHERAI: 'togetherai',
|
||||
MAKERSUITE: 'makersuite',
|
||||
MISTRALAI: 'mistralai',
|
||||
};
|
||||
@ -172,8 +171,21 @@ const TEXTGEN_TYPES = {
|
||||
APHRODITE: 'aphrodite',
|
||||
TABBY: 'tabby',
|
||||
KOBOLDCPP: 'koboldcpp',
|
||||
TOGETHERAI: 'togetherai',
|
||||
};
|
||||
|
||||
// https://docs.together.ai/reference/completions
|
||||
const TOGETHERAI_KEYS = [
|
||||
'model',
|
||||
'prompt',
|
||||
'max_tokens',
|
||||
'temperature',
|
||||
'top_p',
|
||||
'top_k',
|
||||
'repetition_penalty',
|
||||
'stream',
|
||||
];
|
||||
|
||||
const AVATAR_WIDTH = 400;
|
||||
const AVATAR_HEIGHT = 600;
|
||||
|
||||
@ -187,4 +199,5 @@ module.exports = {
|
||||
CHAT_COMPLETION_SOURCES,
|
||||
AVATAR_WIDTH,
|
||||
AVATAR_HEIGHT,
|
||||
TOGETHERAI_KEYS,
|
||||
};
|
||||
|
@ -1,8 +1,9 @@
|
||||
const express = require('express');
|
||||
const fetch = require('node-fetch').default;
|
||||
const _ = require('lodash');
|
||||
|
||||
const { jsonParser } = require('../../express-common');
|
||||
const { TEXTGEN_TYPES } = require('../../constants');
|
||||
const { TEXTGEN_TYPES, TOGETHERAI_KEYS } = require('../../constants');
|
||||
const { forwardFetchResponse } = require('../../util');
|
||||
const { setAdditionalHeaders } = require('../../additional-headers');
|
||||
|
||||
@ -46,6 +47,9 @@ router.post('/status', jsonParser, async function (request, response) {
|
||||
case TEXTGEN_TYPES.TABBY:
|
||||
url += '/v1/model/list';
|
||||
break;
|
||||
case TEXTGEN_TYPES.TOGETHERAI:
|
||||
url += '/api/models?&info';
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
@ -56,13 +60,18 @@ router.post('/status', jsonParser, async function (request, response) {
|
||||
return response.status(400);
|
||||
}
|
||||
|
||||
const data = await modelsReply.json();
|
||||
let data = await modelsReply.json();
|
||||
|
||||
if (request.body.legacy_api) {
|
||||
console.log('Legacy API response:', data);
|
||||
return response.send({ result: data?.result });
|
||||
}
|
||||
|
||||
// Rewrap to OAI-like response
|
||||
if (request.body.api_type === TEXTGEN_TYPES.TOGETHERAI && Array.isArray(data)) {
|
||||
data = { data: data.map(x => ({ id: x.name, ...x })) };
|
||||
}
|
||||
|
||||
if (!Array.isArray(data.data)) {
|
||||
console.log('Models response is not an array.');
|
||||
return response.status(400);
|
||||
@ -145,6 +154,7 @@ router.post('/generate', jsonParser, async function (request, response_generate)
|
||||
case TEXTGEN_TYPES.OOBA:
|
||||
case TEXTGEN_TYPES.TABBY:
|
||||
case TEXTGEN_TYPES.KOBOLDCPP:
|
||||
case TEXTGEN_TYPES.TOGETHERAI:
|
||||
url += '/v1/completions';
|
||||
break;
|
||||
case TEXTGEN_TYPES.MANCER:
|
||||
@ -163,6 +173,15 @@ router.post('/generate', jsonParser, async function (request, response_generate)
|
||||
|
||||
setAdditionalHeaders(request, args, baseUrl);
|
||||
|
||||
if (request.body.api_type === TEXTGEN_TYPES.TOGETHERAI) {
|
||||
const stop = Array.isArray(request.body.stop) ? request.body.stop[0] : '';
|
||||
request.body = _.pickBy(request.body, (_, key) => TOGETHERAI_KEYS.includes(key));
|
||||
if (typeof stop === 'string' && stop.length > 0) {
|
||||
request.body.stop = stop;
|
||||
}
|
||||
args.body = JSON.stringify(request.body);
|
||||
}
|
||||
|
||||
if (request.body.stream) {
|
||||
const completionsStream = await fetch(url, args);
|
||||
// Pipe remote SSE stream to Express response
|
||||
|
@ -12,7 +12,6 @@ const { jsonParser } = require('../express-common');
|
||||
*/
|
||||
async function getVector(source, text) {
|
||||
switch (source) {
|
||||
case 'togetherai':
|
||||
case 'mistral':
|
||||
case 'openai':
|
||||
return require('../openai-vectors').getOpenAIVector(text, source);
|
||||
|
@ -12,11 +12,6 @@ const SOURCES = {
|
||||
url: 'api.openai.com',
|
||||
model: 'text-embedding-ada-002',
|
||||
},
|
||||
'togetherai': {
|
||||
secretKey: SECRET_KEYS.TOGETHERAI,
|
||||
url: 'api.togetherai.xyz',
|
||||
model: 'togethercomputer/GPT-NeoXT-Chat-Base-20B',
|
||||
},
|
||||
};
|
||||
|
||||
/**
|
||||
@ -26,7 +21,6 @@ const SOURCES = {
|
||||
* @returns {Promise<number[]>} - The vector for the text
|
||||
*/
|
||||
async function getOpenAIVector(text, source) {
|
||||
|
||||
const config = SOURCES[source];
|
||||
|
||||
if (!config) {
|
||||
|
Loading…
x
Reference in New Issue
Block a user