Merge pull request #1844 from infermaticAI/InfermaticAI

Add InfermaticAI as a text completion source
This commit is contained in:
Cohee 2024-02-24 14:28:09 +02:00 committed by GitHub
commit dab9bbb514
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
10 changed files with 149 additions and 8 deletions

View File

@ -1896,6 +1896,7 @@
<option value="llamacpp">llama.cpp</option> <option value="llamacpp">llama.cpp</option>
<option value="ollama">Ollama</option> <option value="ollama">Ollama</option>
<option value="togetherai">TogetherAI</option> <option value="togetherai">TogetherAI</option>
<option value="infermaticai">InfermaticAI</option>
</select> </select>
</div> </div>
<div data-tg-type="togetherai" class="flex-container flexFlowColumn"> <div data-tg-type="togetherai" class="flex-container flexFlowColumn">
@ -1916,6 +1917,24 @@
</select> </select>
</div> </div>
</div> </div>
<div data-tg-type="infermaticai" class="flex-container flexFlowColumn">
<h4 data-i18n="InfermaticAI API Key">InfermaticAI API Key</h4>
<div class="flex-container">
<input id="api_key_infermaticai" name="api_key_infermaticai" class="text_pole flex1" maxlength="500" value="" type="text" autocomplete="off">
<div title="Clear your API key" data-i18n="[title]Clear your API key" class="menu_button fa-solid fa-circle-xmark clear-api-key" data-key="api_key_infermaticai"></div>
</div>
<div data-for="api_key_infermaticai" class="neutral_warning">
For privacy reasons, your API key will be hidden after you reload the page.
</div>
<div>
<h4 data-i18n="InfermaticAI Model">InfermaticAI Model</h4>
<select id="model_infermaticai_select">
<option>
-- Connect to the API --
</option>
</select>
</div>
</div>
<div data-tg-type="mancer" class="flex-container flexFlowColumn"> <div data-tg-type="mancer" class="flex-container flexFlowColumn">
<div class="flex-container flexFlowColumn"> <div class="flex-container flexFlowColumn">
</div> </div>

View File

@ -20,7 +20,7 @@ import {
validateTextGenUrl, validateTextGenUrl,
} from './scripts/textgen-settings.js'; } from './scripts/textgen-settings.js';
const { MANCER, TOGETHERAI, OOBA, APHRODITE, OLLAMA } = textgen_types; const { MANCER, TOGETHERAI, OOBA, APHRODITE, OLLAMA, INFERMATICAI } = textgen_types;
import { import {
world_info, world_info,
@ -194,7 +194,7 @@ import { createPersona, initPersonas, selectCurrentPersona, setPersonaDescriptio
import { getBackgrounds, initBackgrounds, loadBackgroundSettings, background_settings } from './scripts/backgrounds.js'; import { getBackgrounds, initBackgrounds, loadBackgroundSettings, background_settings } from './scripts/backgrounds.js';
import { hideLoader, showLoader } from './scripts/loader.js'; import { hideLoader, showLoader } from './scripts/loader.js';
import { BulkEditOverlay, CharacterContextMenu } from './scripts/BulkEditOverlay.js'; import { BulkEditOverlay, CharacterContextMenu } from './scripts/BulkEditOverlay.js';
import { loadMancerModels, loadOllamaModels, loadTogetherAIModels } from './scripts/textgen-models.js'; import { loadMancerModels, loadOllamaModels, loadTogetherAIModels, loadInfermaticAIModels } from './scripts/textgen-models.js';
import { appendFileContent, hasPendingFileAttachment, populateFileAttachment, decodeStyleTags, encodeStyleTags } from './scripts/chats.js'; import { appendFileContent, hasPendingFileAttachment, populateFileAttachment, decodeStyleTags, encodeStyleTags } from './scripts/chats.js';
import { initPresetManager } from './scripts/preset-manager.js'; import { initPresetManager } from './scripts/preset-manager.js';
import { evaluateMacros } from './scripts/macros.js'; import { evaluateMacros } from './scripts/macros.js';
@ -1053,6 +1053,9 @@ async function getStatusTextgen() {
} else if (textgen_settings.type === OLLAMA) { } else if (textgen_settings.type === OLLAMA) {
loadOllamaModels(data?.data); loadOllamaModels(data?.data);
online_status = textgen_settings.ollama_model || 'Connected'; online_status = textgen_settings.ollama_model || 'Connected';
} else if (textgen_settings.type === INFERMATICAI) {
loadInfermaticAIModels(data?.data);
online_status = textgen_settings.infermaticai_model;
} else { } else {
online_status = data?.result; online_status = data?.result;
} }
@ -7684,6 +7687,11 @@ const CONNECT_API_MAP = {
button: '#api_button_openai', button: '#api_button_openai',
source: chat_completion_sources.CUSTOM, source: chat_completion_sources.CUSTOM,
}, },
'infermaticai': {
selected: 'textgenerationwebui',
button: '#api_button_textgenerationwebui',
type: textgen_types.INFERMATICAI,
},
}; };
async function selectContextCallback(_, name) { async function selectContextCallback(_, name) {
@ -8616,6 +8624,11 @@ jQuery(async function () {
await writeSecret(SECRET_KEYS.OOBA, oobaKey); await writeSecret(SECRET_KEYS.OOBA, oobaKey);
} }
const infermaticAIKey = String($('#api_key_infermaticai').val()).trim();
if (infermaticAIKey.length) {
await writeSecret(SECRET_KEYS.INFERMATICAI, infermaticAIKey);
}
validateTextGenUrl(); validateTextGenUrl();
startStatusLoading(); startStatusLoading();
main_api = 'textgenerationwebui'; main_api = 'textgenerationwebui';

View File

@ -389,8 +389,9 @@ function RA_autoconnect(PrevApi) {
} }
break; break;
case 'textgenerationwebui': case 'textgenerationwebui':
if ((textgen_settings.type === textgen_types.MANCER && secret_state[SECRET_KEYS.MANCER]) || if ((textgen_settings.type === textgen_types.MANCER && secret_state[SECRET_KEYS.MANCER])
(textgen_settings.type === textgen_types.TOGETHERAI && secret_state[SECRET_KEYS.TOGETHERAI]) || (textgen_settings.type === textgen_types.TOGETHERAI && secret_state[SECRET_KEYS.TOGETHERAI])
|| (textgen_settings.type === textgen_types.INFERMATICAI && secret_state[SECRET_KEYS.INFERMATICAI])
) { ) {
$('#api_button_textgenerationwebui').trigger('click'); $('#api_button_textgenerationwebui').trigger('click');
} }

View File

@ -16,6 +16,7 @@ export const SECRET_KEYS = {
SERPAPI: 'api_key_serpapi', SERPAPI: 'api_key_serpapi',
MISTRALAI: 'api_key_mistralai', MISTRALAI: 'api_key_mistralai',
TOGETHERAI: 'api_key_togetherai', TOGETHERAI: 'api_key_togetherai',
INFERMATICAI: 'api_key_infermaticai',
CUSTOM: 'api_key_custom', CUSTOM: 'api_key_custom',
OOBA: 'api_key_ooba', OOBA: 'api_key_ooba',
}; };
@ -37,6 +38,7 @@ const INPUT_MAP = {
[SECRET_KEYS.CUSTOM]: '#api_key_custom', [SECRET_KEYS.CUSTOM]: '#api_key_custom',
[SECRET_KEYS.TOGETHERAI]: '#api_key_togetherai', [SECRET_KEYS.TOGETHERAI]: '#api_key_togetherai',
[SECRET_KEYS.OOBA]: '#api_key_ooba', [SECRET_KEYS.OOBA]: '#api_key_ooba',
[SECRET_KEYS.INFERMATICAI]: '#api_key_infermaticai',
}; };
async function clearSecret() { async function clearSecret() {

View File

@ -4,6 +4,7 @@ import { textgenerationwebui_settings as textgen_settings, textgen_types } from
let mancerModels = []; let mancerModels = [];
let togetherModels = []; let togetherModels = [];
let infermaticAIModels = [];
export async function loadOllamaModels(data) { export async function loadOllamaModels(data) {
if (!Array.isArray(data)) { if (!Array.isArray(data)) {
@ -52,6 +53,32 @@ export async function loadTogetherAIModels(data) {
} }
} }
export async function loadInfermaticAIModels(data) {
if (!Array.isArray(data)) {
console.error('Invalid Infermatic AI models data', data);
return;
}
infermaticAIModels = data;
if (!data.find(x => x.id === textgen_settings.infermaticai_model)) {
textgen_settings.infermaticai_model = data[0]?.id || '';
}
$('#model_infermaticai_select').empty();
for (const model of data) {
if (model.display_type === 'image') {
continue;
}
const option = document.createElement('option');
option.value = model.id;
option.text = model.id;
option.selected = model.id === textgen_settings.infermaticai_model;
$('#model_infermaticai_select').append(option);
}
}
export async function loadMancerModels(data) { export async function loadMancerModels(data) {
if (!Array.isArray(data)) { if (!Array.isArray(data)) {
console.error('Invalid Mancer models data', data); console.error('Invalid Mancer models data', data);
@ -91,6 +118,14 @@ function onTogetherModelSelect() {
setGenerationParamsFromPreset({ max_length: model.context_length }); setGenerationParamsFromPreset({ max_length: model.context_length });
} }
function onInfermaticAIModelSelect() {
const modelName = String($('#model_infermaticai_select').val());
textgen_settings.infermaticai_model = modelName;
$('#api_button_textgenerationwebui').trigger('click');
const model = infermaticAIModels.find(x => x.id === modelName);
setGenerationParamsFromPreset({ max_length: model.context_length });
}
function onOllamaModelSelect() { function onOllamaModelSelect() {
const modelId = String($('#ollama_model').val()); const modelId = String($('#ollama_model').val());
textgen_settings.ollama_model = modelId; textgen_settings.ollama_model = modelId;
@ -130,6 +165,20 @@ function getTogetherModelTemplate(option) {
`)); `));
} }
function getInfermaticAIModelTemplate(option) {
const model = infermaticAIModels.find(x => x.id === option?.element?.value);
if (!option.id || !model) {
return option.text;
}
return $((`
<div class="flex-container flexFlowColumn">
<div><strong>${DOMPurify.sanitize(model.id)}</strong></div>
</div>
`));
}
async function downloadOllamaModel() { async function downloadOllamaModel() {
try { try {
const serverUrl = textgen_settings.server_urls[textgen_types.OLLAMA]; const serverUrl = textgen_settings.server_urls[textgen_types.OLLAMA];
@ -174,6 +223,7 @@ async function downloadOllamaModel() {
jQuery(function () { jQuery(function () {
$('#mancer_model').on('change', onMancerModelSelect); $('#mancer_model').on('change', onMancerModelSelect);
$('#model_togetherai_select').on('change', onTogetherModelSelect); $('#model_togetherai_select').on('change', onTogetherModelSelect);
$('#model_infermaticai_select').on('change', onInfermaticAIModelSelect);
$('#ollama_model').on('change', onOllamaModelSelect); $('#ollama_model').on('change', onOllamaModelSelect);
$('#ollama_download_model').on('click', downloadOllamaModel); $('#ollama_download_model').on('click', downloadOllamaModel);
@ -198,5 +248,12 @@ jQuery(function () {
searchInputCssClass: 'text_pole', searchInputCssClass: 'text_pole',
width: '100%', width: '100%',
}); });
$('#model_infermaticai_select').select2({
placeholder: 'Select a model',
searchInputPlaceholder: 'Search models...',
searchInputCssClass: 'text_pole',
width: '100%',
templateResult: getInfermaticAIModelTemplate,
});
} }
}); });

View File

@ -31,9 +31,10 @@ export const textgen_types = {
TOGETHERAI: 'togetherai', TOGETHERAI: 'togetherai',
LLAMACPP: 'llamacpp', LLAMACPP: 'llamacpp',
OLLAMA: 'ollama', OLLAMA: 'ollama',
INFERMATICAI: 'infermaticai',
}; };
const { MANCER, APHRODITE, TABBY, TOGETHERAI, OOBA, OLLAMA, LLAMACPP } = textgen_types; const { MANCER, APHRODITE, TABBY, TOGETHERAI, OOBA, OLLAMA, LLAMACPP, INFERMATICAI } = textgen_types;
const OOBA_DEFAULT_ORDER = [ const OOBA_DEFAULT_ORDER = [
'temperature', 'temperature',
'dynamic_temperature', 'dynamic_temperature',
@ -56,6 +57,7 @@ const MANCER_SERVER_KEY = 'mancer_server';
const MANCER_SERVER_DEFAULT = 'https://neuro.mancer.tech'; const MANCER_SERVER_DEFAULT = 'https://neuro.mancer.tech';
let MANCER_SERVER = localStorage.getItem(MANCER_SERVER_KEY) ?? MANCER_SERVER_DEFAULT; let MANCER_SERVER = localStorage.getItem(MANCER_SERVER_KEY) ?? MANCER_SERVER_DEFAULT;
let TOGETHERAI_SERVER = 'https://api.together.xyz'; let TOGETHERAI_SERVER = 'https://api.together.xyz';
let INFERMATICAI_SERVER = 'https://api.totalgpt.ai';
const SERVER_INPUTS = { const SERVER_INPUTS = {
[textgen_types.OOBA]: '#textgenerationwebui_api_url_text', [textgen_types.OOBA]: '#textgenerationwebui_api_url_text',
@ -121,6 +123,7 @@ const settings = {
type: textgen_types.OOBA, type: textgen_types.OOBA,
mancer_model: 'mytholite', mancer_model: 'mytholite',
togetherai_model: 'Gryphe/MythoMax-L2-13b', togetherai_model: 'Gryphe/MythoMax-L2-13b',
infermaticai_model: '',
ollama_model: '', ollama_model: '',
legacy_api: false, legacy_api: false,
sampler_order: KOBOLDCPP_ORDER, sampler_order: KOBOLDCPP_ORDER,
@ -220,6 +223,10 @@ export function getTextGenServer() {
return TOGETHERAI_SERVER; return TOGETHERAI_SERVER;
} }
if (settings.type === INFERMATICAI) {
return INFERMATICAI_SERVER;
}
return settings.server_urls[settings.type] ?? ''; return settings.server_urls[settings.type] ?? '';
} }
@ -243,8 +250,8 @@ async function selectPreset(name) {
function formatTextGenURL(value) { function formatTextGenURL(value) {
try { try {
// Mancer/Together doesn't need any formatting (it's hardcoded) // Mancer/Together/InfermaticAI doesn't need any formatting (it's hardcoded)
if (settings.type === MANCER || settings.type === TOGETHERAI) { if (settings.type === MANCER || settings.type === TOGETHERAI || settings.type === INFERMATICAI) {
return value; return value;
} }
@ -835,6 +842,10 @@ function getModel() {
return settings.togetherai_model; return settings.togetherai_model;
} }
if (settings.type === INFERMATICAI) {
return settings.infermaticai_model;
}
if (settings.type === APHRODITE) { if (settings.type === APHRODITE) {
return online_status; return online_status;
} }

View File

@ -19,6 +19,14 @@ function getTogetherAIHeaders() {
}) : {}; }) : {};
} }
function getInfermaticAIHeaders() {
const apiKey = readSecret(SECRET_KEYS.INFERMATICAI);
return apiKey ? ({
'Authorization': `Bearer ${apiKey}`,
}) : {};
}
function getAphroditeHeaders() { function getAphroditeHeaders() {
const apiKey = readSecret(SECRET_KEYS.APHRODITE); const apiKey = readSecret(SECRET_KEYS.APHRODITE);
@ -80,6 +88,9 @@ function setAdditionalHeaders(request, args, server) {
case TEXTGEN_TYPES.OOBA: case TEXTGEN_TYPES.OOBA:
headers = getOobaHeaders(); headers = getOobaHeaders();
break; break;
case TEXTGEN_TYPES.INFERMATICAI:
headers = getInfermaticAIHeaders();
break;
default: default:
headers = server ? getOverrideHeaders((new URL(server))?.host) : {}; headers = server ? getOverrideHeaders((new URL(server))?.host) : {};
break; break;

View File

@ -176,8 +176,21 @@ const TEXTGEN_TYPES = {
TOGETHERAI: 'togetherai', TOGETHERAI: 'togetherai',
LLAMACPP: 'llamacpp', LLAMACPP: 'llamacpp',
OLLAMA: 'ollama', OLLAMA: 'ollama',
INFERMATICAI: 'infermaticai',
}; };
const INFERMATICAI_KEYS = [
'model',
'prompt',
'max_tokens',
'temperature',
'top_p',
'top_k',
'repetition_penalty',
'stream',
'stop',
];
// https://docs.together.ai/reference/completions // https://docs.together.ai/reference/completions
const TOGETHERAI_KEYS = [ const TOGETHERAI_KEYS = [
'model', 'model',
@ -225,4 +238,5 @@ module.exports = {
AVATAR_HEIGHT, AVATAR_HEIGHT,
TOGETHERAI_KEYS, TOGETHERAI_KEYS,
OLLAMA_KEYS, OLLAMA_KEYS,
INFERMATICAI_KEYS,
}; };

View File

@ -4,7 +4,7 @@ const _ = require('lodash');
const Readable = require('stream').Readable; const Readable = require('stream').Readable;
const { jsonParser } = require('../../express-common'); const { jsonParser } = require('../../express-common');
const { TEXTGEN_TYPES, TOGETHERAI_KEYS, OLLAMA_KEYS } = require('../../constants'); const { TEXTGEN_TYPES, TOGETHERAI_KEYS, OLLAMA_KEYS, INFERMATICAI_KEYS } = require('../../constants');
const { forwardFetchResponse, trimV1 } = require('../../util'); const { forwardFetchResponse, trimV1 } = require('../../util');
const { setAdditionalHeaders } = require('../../additional-headers'); const { setAdditionalHeaders } = require('../../additional-headers');
@ -106,6 +106,7 @@ router.post('/status', jsonParser, async function (request, response) {
case TEXTGEN_TYPES.APHRODITE: case TEXTGEN_TYPES.APHRODITE:
case TEXTGEN_TYPES.KOBOLDCPP: case TEXTGEN_TYPES.KOBOLDCPP:
case TEXTGEN_TYPES.LLAMACPP: case TEXTGEN_TYPES.LLAMACPP:
case TEXTGEN_TYPES.INFERMATICAI:
url += '/v1/models'; url += '/v1/models';
break; break;
case TEXTGEN_TYPES.MANCER: case TEXTGEN_TYPES.MANCER:
@ -232,6 +233,7 @@ router.post('/generate', jsonParser, async function (request, response) {
case TEXTGEN_TYPES.TABBY: case TEXTGEN_TYPES.TABBY:
case TEXTGEN_TYPES.KOBOLDCPP: case TEXTGEN_TYPES.KOBOLDCPP:
case TEXTGEN_TYPES.TOGETHERAI: case TEXTGEN_TYPES.TOGETHERAI:
case TEXTGEN_TYPES.INFERMATICAI:
url += '/v1/completions'; url += '/v1/completions';
break; break;
case TEXTGEN_TYPES.MANCER: case TEXTGEN_TYPES.MANCER:
@ -261,6 +263,11 @@ router.post('/generate', jsonParser, async function (request, response) {
args.body = JSON.stringify(request.body); args.body = JSON.stringify(request.body);
} }
if (request.body.api_type === TEXTGEN_TYPES.INFERMATICAI) {
request.body = _.pickBy(request.body, (_, key) => INFERMATICAI_KEYS.includes(key));
args.body = JSON.stringify(request.body);
}
if (request.body.api_type === TEXTGEN_TYPES.OLLAMA) { if (request.body.api_type === TEXTGEN_TYPES.OLLAMA) {
args.body = JSON.stringify({ args.body = JSON.stringify({
model: request.body.model, model: request.body.model,
@ -292,6 +299,11 @@ router.post('/generate', jsonParser, async function (request, response) {
data['choices'] = [{ text }]; data['choices'] = [{ text }];
} }
// Map InfermaticAI response to OAI completions format
if (completionsReply.url.includes('https://api.totalgpt.ai')) {
data['choices'] = (data?.choices || []).map(choice => ({ text: choice.message.content }));
}
return response.send(data); return response.send(data);
} else { } else {
const text = await completionsReply.text(); const text = await completionsReply.text();

View File

@ -30,6 +30,7 @@ const SECRET_KEYS = {
MISTRALAI: 'api_key_mistralai', MISTRALAI: 'api_key_mistralai',
CUSTOM: 'api_key_custom', CUSTOM: 'api_key_custom',
OOBA: 'api_key_ooba', OOBA: 'api_key_ooba',
INFERMATICAI: 'api_key_infermaticai',
}; };
// These are the keys that are safe to expose, even if allowKeysExposure is false // These are the keys that are safe to expose, even if allowKeysExposure is false