Add NanoGPT as chat completions provider
This commit is contained in:
parent
21c4ac7abf
commit
0882fb2d15
|
@ -2602,6 +2602,7 @@
|
||||||
<option value="groq">Groq</option>
|
<option value="groq">Groq</option>
|
||||||
<option value="makersuite">Google AI Studio</option>
|
<option value="makersuite">Google AI Studio</option>
|
||||||
<option value="mistralai">MistralAI</option>
|
<option value="mistralai">MistralAI</option>
|
||||||
|
<option value="nanogpt">NanoGPT</option>
|
||||||
<option value="openrouter">OpenRouter</option>
|
<option value="openrouter">OpenRouter</option>
|
||||||
<option value="perplexity">Perplexity</option>
|
<option value="perplexity">Perplexity</option>
|
||||||
<option value="scale">Scale</option>
|
<option value="scale">Scale</option>
|
||||||
|
@ -3084,6 +3085,76 @@
|
||||||
</optgroup>
|
</optgroup>
|
||||||
</select>
|
</select>
|
||||||
</div>
|
</div>
|
||||||
|
<div id="nanogpt_form" data-source="nanogpt">
|
||||||
|
<h4 data-i18n="NanoGPT API Key">NanoGPT API Key</h4>
|
||||||
|
<div class="flex-container">
|
||||||
|
<input id="api_key_nanogpt" name="api_key_nanogpt" class="text_pole flex1" value="" type="text" autocomplete="off">
|
||||||
|
<div title="Clear your API key" data-i18n="[title]Clear your API key" class="menu_button fa-solid fa-circle-xmark clear-api-key" data-key="api_key_nanogpt"></div>
|
||||||
|
</div>
|
||||||
|
<div data-for="api_key_nanogpt" class="neutral_warning" data-i18n="For privacy reasons, your API key will be hidden after you reload the page.">
|
||||||
|
For privacy reasons, your API key will be hidden after you reload the page.
|
||||||
|
</div>
|
||||||
|
<h4 data-i18n="NanoGPT Model">NanoGPT Model</h4>
|
||||||
|
<select id="model_nanogpt_select">
|
||||||
|
<option value="chatgpt-4o-latest">ChatGPT 4o</option>
|
||||||
|
<option value="o1-preview">OpenAI o1</option>
|
||||||
|
<option value="o1-mini">OpenAI o1-mini</option>
|
||||||
|
<option value="claude-3-5-sonnet-20241022">Claude 3.5 Sonnet</option>
|
||||||
|
<option value="claude-3-5-sonnet-20240620">Claude 3.5 Sonnet Old</option>
|
||||||
|
<option value="google/gemini-pro-1.5">Gemini 1.5 Pro</option>
|
||||||
|
<option value="google/gemini-pro-1.5-exp">Gemini 1.5 Pro Exp</option>
|
||||||
|
<option value="Meta-Llama-3-1-405B-Instruct-FP8">Llama 3.1 Large</option>
|
||||||
|
<option value="nvidia/llama-3.1-nemotron-70b-instruct">Nvidia Nemotron</option>
|
||||||
|
<option value="nousresearch/hermes-3-llama-3.1-405b">Hermes 3 Large</option>
|
||||||
|
<option value="meta-llama/llama-3.2-90b-vision-instruct">Llama 3.2 Medium</option>
|
||||||
|
<option value="x-ai/grok-beta">Grok 2</option>
|
||||||
|
<option value="neversleep/llama-3.1-lumimaid-70b">Lumimaid v0.2</option>
|
||||||
|
<option value="inflection/inflection-3-pi">Inflection 3 Pi</option>
|
||||||
|
<option value="inflection/inflection-3-productivity">Inflection 3 Productivity</option>
|
||||||
|
<option value="microsoft/wizardlm-2-8x22b">WizardLM-2 8x22B</option>
|
||||||
|
<option value="accounts/fireworks/models/llama-v3p1-405b-instruct">Llama 3.1 Large</option>
|
||||||
|
<option value="gpt-4o-2024-08-06">GPT 4o</option>
|
||||||
|
<option value="accounts/fireworks/models/llama-v3p1-70b-instruct">Llama 3.1 Medium</option>
|
||||||
|
<option value="llama-3.1-70b-instruct">Llama 3.1 Medium</option>
|
||||||
|
<option value="llama-3.1-sonar-huge-128k-online">Perplexity Online</option>
|
||||||
|
<option value="gpt-4o-mini">GPT 4o mini</option>
|
||||||
|
<option value="meta-llama/llama-3.1-405b-instruct">Llama 3.1 Large</option>
|
||||||
|
<option value="claude-3-opus-20240229">Claude 3 Opus</option>
|
||||||
|
<option value="google/gemini-flash-1.5-exp">Gemini 1.5 Flash</option>
|
||||||
|
<option value="google/gemini-flash-1.5">Gemini 1.5 Flash</option>
|
||||||
|
<option value="llama-3.1-sonar-large-128k-online">Perplexity Online Medium</option>
|
||||||
|
<option value="nousresearch/hermes-3-llama-3.1-405b:extended">Hermes 3 Large</option>
|
||||||
|
<option value="gryphe/mythomax-l2-13b">MythoMax 13B</option>
|
||||||
|
<option value="deepseek/deepseek-chat">DeepSeek V2.5</option>
|
||||||
|
<option value="qwen/qwen-2.5-72b-instruct">Qwen2.5 72B</option>
|
||||||
|
<option value="eva-unit-01/eva-qwen-2.5-14b">EVA Qwen2.5 14B</option>
|
||||||
|
<option value="cognitivecomputations/dolphin-mixtral-8x7b">Dolphin 2.6 Mixtral 8x7b</option>
|
||||||
|
<option value="gpt-4-turbo-preview">GPT 4 Turbo</option>
|
||||||
|
<option value="gpt-4o">GPT 4o</option>
|
||||||
|
<option value="gpt-3.5-turbo">GPT 3.5 Turbo</option>
|
||||||
|
<option value="gemini-1.5-flash-001">Gemini 1.5 Flash</option>
|
||||||
|
<option value="gemini-1.5-pro-001">Gemini 1.5 Pro</option>
|
||||||
|
<option value="free-model">Playground</option>
|
||||||
|
<option value="anthracite-org/magnum-v4-72b">Magnum v4 72B</option>
|
||||||
|
<option value="thedrummer/rocinante-12b">Rocinante 12B</option>
|
||||||
|
<option value="cognitivecomputations/dolphin-mixtral-8x22b">Dolphin 2.9.2 Mixtral 8x22B</option>
|
||||||
|
<option value="meta-llama/llama-3.1-70b-instruct">Llama 3.1 70b Instruct</option>
|
||||||
|
<option value="meta-llama/llama-3.1-8b-instruct">Llama 3.1 8b Instruct</option>
|
||||||
|
<option value="sao10k/l3-euryale-70b">L3 Euryale 70B</option>
|
||||||
|
<option value="mistralai/mistral-tiny">Mistral Tiny</option>
|
||||||
|
<option value="mistralai/mistral-7b-instruct">Mistral 7B Instruct</option>
|
||||||
|
<option value="meta-llama/llama-3-70b-instruct">Llama 3 70b Instruct</option>
|
||||||
|
<option value="microsoft/wizardlm-2-7b">WizardLM-2 7B</option>
|
||||||
|
<option value="cohere/command-r">Cohere: Command R</option>
|
||||||
|
<option value="nousresearch/hermes-3-llama-3.1-70b">Nous Hermes 3 70B Instruct</option>
|
||||||
|
<option value="mistralai/mistral-nemo">Mistral Nemo</option>
|
||||||
|
<option value="meta-llama/llama-3.2-3b-instruct">Llama 3.2 3b Instruct</option>
|
||||||
|
<option value="neversleep/llama-3-lumimaid-70b">Llama 3 Lumimaid 70B</option>
|
||||||
|
<option value="anthracite-org/magnum-v2-72b">Magnum v2 72B</option>
|
||||||
|
<option value="Meta-Llama-3-1-8B-Instruct-FP8">Llama 3.1 8B (decentralized)</option>
|
||||||
|
</select>
|
||||||
|
|
||||||
|
</div>
|
||||||
<div id="perplexity_form" data-source="perplexity">
|
<div id="perplexity_form" data-source="perplexity">
|
||||||
<h4 data-i18n="Perplexity API Key">Perplexity API Key</h4>
|
<h4 data-i18n="Perplexity API Key">Perplexity API Key</h4>
|
||||||
<div class="flex-container">
|
<div class="flex-container">
|
||||||
|
|
|
@ -181,6 +181,7 @@ export const chat_completion_sources = {
|
||||||
GROQ: 'groq',
|
GROQ: 'groq',
|
||||||
ZEROONEAI: '01ai',
|
ZEROONEAI: '01ai',
|
||||||
BLOCKENTROPY: 'blockentropy',
|
BLOCKENTROPY: 'blockentropy',
|
||||||
|
NANOGPT: 'nanogpt',
|
||||||
};
|
};
|
||||||
|
|
||||||
const character_names_behavior = {
|
const character_names_behavior = {
|
||||||
|
@ -250,6 +251,7 @@ const default_settings = {
|
||||||
cohere_model: 'command-r-plus',
|
cohere_model: 'command-r-plus',
|
||||||
perplexity_model: 'llama-3.1-70b-instruct',
|
perplexity_model: 'llama-3.1-70b-instruct',
|
||||||
groq_model: 'llama-3.1-70b-versatile',
|
groq_model: 'llama-3.1-70b-versatile',
|
||||||
|
nanogpt_model: 'gpt-4o-mini',
|
||||||
zerooneai_model: 'yi-large',
|
zerooneai_model: 'yi-large',
|
||||||
blockentropy_model: 'be-70b-base-llama3.1',
|
blockentropy_model: 'be-70b-base-llama3.1',
|
||||||
custom_model: '',
|
custom_model: '',
|
||||||
|
@ -326,6 +328,7 @@ const oai_settings = {
|
||||||
cohere_model: 'command-r-plus',
|
cohere_model: 'command-r-plus',
|
||||||
perplexity_model: 'llama-3.1-70b-instruct',
|
perplexity_model: 'llama-3.1-70b-instruct',
|
||||||
groq_model: 'llama-3.1-70b-versatile',
|
groq_model: 'llama-3.1-70b-versatile',
|
||||||
|
nanogpt_model: 'gpt-4o-mini',
|
||||||
zerooneai_model: 'yi-large',
|
zerooneai_model: 'yi-large',
|
||||||
blockentropy_model: 'be-70b-base-llama3.1',
|
blockentropy_model: 'be-70b-base-llama3.1',
|
||||||
custom_model: '',
|
custom_model: '',
|
||||||
|
@ -1479,6 +1482,8 @@ function getChatCompletionModel() {
|
||||||
return oai_settings.zerooneai_model;
|
return oai_settings.zerooneai_model;
|
||||||
case chat_completion_sources.BLOCKENTROPY:
|
case chat_completion_sources.BLOCKENTROPY:
|
||||||
return oai_settings.blockentropy_model;
|
return oai_settings.blockentropy_model;
|
||||||
|
case chat_completion_sources.NANOGPT:
|
||||||
|
return oai_settings.nanogpt_model;
|
||||||
default:
|
default:
|
||||||
throw new Error(`Unknown chat completion source: ${oai_settings.chat_completion_source}`);
|
throw new Error(`Unknown chat completion source: ${oai_settings.chat_completion_source}`);
|
||||||
}
|
}
|
||||||
|
@ -2960,6 +2965,7 @@ function loadOpenAISettings(data, settings) {
|
||||||
oai_settings.cohere_model = settings.cohere_model ?? default_settings.cohere_model;
|
oai_settings.cohere_model = settings.cohere_model ?? default_settings.cohere_model;
|
||||||
oai_settings.perplexity_model = settings.perplexity_model ?? default_settings.perplexity_model;
|
oai_settings.perplexity_model = settings.perplexity_model ?? default_settings.perplexity_model;
|
||||||
oai_settings.groq_model = settings.groq_model ?? default_settings.groq_model;
|
oai_settings.groq_model = settings.groq_model ?? default_settings.groq_model;
|
||||||
|
oai_settings.nanogpt_model = settings.nanogpt_model ?? default_settings.nanogpt_model;
|
||||||
oai_settings.blockentropy_model = settings.blockentropy_model ?? default_settings.blockentropy_model;
|
oai_settings.blockentropy_model = settings.blockentropy_model ?? default_settings.blockentropy_model;
|
||||||
oai_settings.zerooneai_model = settings.zerooneai_model ?? default_settings.zerooneai_model;
|
oai_settings.zerooneai_model = settings.zerooneai_model ?? default_settings.zerooneai_model;
|
||||||
oai_settings.custom_model = settings.custom_model ?? default_settings.custom_model;
|
oai_settings.custom_model = settings.custom_model ?? default_settings.custom_model;
|
||||||
|
@ -3040,6 +3046,8 @@ function loadOpenAISettings(data, settings) {
|
||||||
$(`#model_perplexity_select option[value="${oai_settings.perplexity_model}"`).attr('selected', true);
|
$(`#model_perplexity_select option[value="${oai_settings.perplexity_model}"`).attr('selected', true);
|
||||||
$('#model_groq_select').val(oai_settings.groq_model);
|
$('#model_groq_select').val(oai_settings.groq_model);
|
||||||
$(`#model_groq_select option[value="${oai_settings.groq_model}"`).attr('selected', true);
|
$(`#model_groq_select option[value="${oai_settings.groq_model}"`).attr('selected', true);
|
||||||
|
$('#model_nanogpt_select').val(oai_settings.nanogpt_model);
|
||||||
|
$(`#model_nanogpt_select option[value="${oai_settings.nanogpt_model}"`).attr('selected', true);
|
||||||
$('#model_01ai_select').val(oai_settings.zerooneai_model);
|
$('#model_01ai_select').val(oai_settings.zerooneai_model);
|
||||||
$('#model_blockentropy_select').val(oai_settings.blockentropy_model);
|
$('#model_blockentropy_select').val(oai_settings.blockentropy_model);
|
||||||
$('#custom_model_id').val(oai_settings.custom_model);
|
$('#custom_model_id').val(oai_settings.custom_model);
|
||||||
|
@ -3734,6 +3742,7 @@ function onSettingsPresetChange() {
|
||||||
cohere_model: ['#model_cohere_select', 'cohere_model', false],
|
cohere_model: ['#model_cohere_select', 'cohere_model', false],
|
||||||
perplexity_model: ['#model_perplexity_select', 'perplexity_model', false],
|
perplexity_model: ['#model_perplexity_select', 'perplexity_model', false],
|
||||||
groq_model: ['#model_groq_select', 'groq_model', false],
|
groq_model: ['#model_groq_select', 'groq_model', false],
|
||||||
|
nanogpt_model: ['#model_nanogpt_select', 'nanogpt_model', false],
|
||||||
zerooneai_model: ['#model_01ai_select', 'zerooneai_model', false],
|
zerooneai_model: ['#model_01ai_select', 'zerooneai_model', false],
|
||||||
blockentropy_model: ['#model_blockentropy_select', 'blockentropy_model', false],
|
blockentropy_model: ['#model_blockentropy_select', 'blockentropy_model', false],
|
||||||
custom_model: ['#custom_model_id', 'custom_model', false],
|
custom_model: ['#custom_model_id', 'custom_model', false],
|
||||||
|
@ -3983,6 +3992,11 @@ async function onModelChange() {
|
||||||
oai_settings.groq_model = value;
|
oai_settings.groq_model = value;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if ($(this).is('#model_nanogpt_select')) {
|
||||||
|
console.log('NanoGPT model changed to', value);
|
||||||
|
oai_settings.nanogpt_model = value;
|
||||||
|
}
|
||||||
|
|
||||||
if (value && $(this).is('#model_01ai_select')) {
|
if (value && $(this).is('#model_01ai_select')) {
|
||||||
console.log('01.AI model changed to', value);
|
console.log('01.AI model changed to', value);
|
||||||
oai_settings.zerooneai_model = value;
|
oai_settings.zerooneai_model = value;
|
||||||
|
@ -4497,6 +4511,19 @@ async function onConnectButtonClick(e) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (oai_settings.chat_completion_source == chat_completion_sources.NANOGPT) {
|
||||||
|
const api_key_nanogpt = String($('#api_key_nanogpt').val()).trim();
|
||||||
|
|
||||||
|
if (api_key_nanogpt.length) {
|
||||||
|
await writeSecret(SECRET_KEYS.NANOGPT, api_key_nanogpt);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!secret_state[SECRET_KEYS.NANOGPT]) {
|
||||||
|
console.log('No secret key saved for NanoGPT');
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
if (oai_settings.chat_completion_source == chat_completion_sources.ZEROONEAI) {
|
if (oai_settings.chat_completion_source == chat_completion_sources.ZEROONEAI) {
|
||||||
const api_key_01ai = String($('#api_key_01ai').val()).trim();
|
const api_key_01ai = String($('#api_key_01ai').val()).trim();
|
||||||
|
|
||||||
|
@ -4566,6 +4593,9 @@ function toggleChatCompletionForms() {
|
||||||
else if (oai_settings.chat_completion_source == chat_completion_sources.GROQ) {
|
else if (oai_settings.chat_completion_source == chat_completion_sources.GROQ) {
|
||||||
$('#model_groq_select').trigger('change');
|
$('#model_groq_select').trigger('change');
|
||||||
}
|
}
|
||||||
|
else if (oai_settings.chat_completion_source == chat_completion_sources.NANOGPT) {
|
||||||
|
$('#model_nanogpt_select').trigger('change');
|
||||||
|
}
|
||||||
else if (oai_settings.chat_completion_source == chat_completion_sources.ZEROONEAI) {
|
else if (oai_settings.chat_completion_source == chat_completion_sources.ZEROONEAI) {
|
||||||
$('#model_01ai_select').trigger('change');
|
$('#model_01ai_select').trigger('change');
|
||||||
}
|
}
|
||||||
|
@ -5287,6 +5317,7 @@ export function initOpenAI() {
|
||||||
$('#model_cohere_select').on('change', onModelChange);
|
$('#model_cohere_select').on('change', onModelChange);
|
||||||
$('#model_perplexity_select').on('change', onModelChange);
|
$('#model_perplexity_select').on('change', onModelChange);
|
||||||
$('#model_groq_select').on('change', onModelChange);
|
$('#model_groq_select').on('change', onModelChange);
|
||||||
|
$('#model_nanogpt_select').on('change', onModelChange);
|
||||||
$('#model_01ai_select').on('change', onModelChange);
|
$('#model_01ai_select').on('change', onModelChange);
|
||||||
$('#model_blockentropy_select').on('change', onModelChange);
|
$('#model_blockentropy_select').on('change', onModelChange);
|
||||||
$('#model_custom_select').on('change', onModelChange);
|
$('#model_custom_select').on('change', onModelChange);
|
||||||
|
|
|
@ -34,6 +34,7 @@ export const SECRET_KEYS = {
|
||||||
STABILITY: 'api_key_stability',
|
STABILITY: 'api_key_stability',
|
||||||
BLOCKENTROPY: 'api_key_blockentropy',
|
BLOCKENTROPY: 'api_key_blockentropy',
|
||||||
CUSTOM_OPENAI_TTS: 'api_key_custom_openai_tts',
|
CUSTOM_OPENAI_TTS: 'api_key_custom_openai_tts',
|
||||||
|
NANOGPT: 'api_key_nanogpt',
|
||||||
TAVILY: 'api_key_tavily',
|
TAVILY: 'api_key_tavily',
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -67,6 +68,7 @@ const INPUT_MAP = {
|
||||||
[SECRET_KEYS.ZEROONEAI]: '#api_key_01ai',
|
[SECRET_KEYS.ZEROONEAI]: '#api_key_01ai',
|
||||||
[SECRET_KEYS.HUGGINGFACE]: '#api_key_huggingface',
|
[SECRET_KEYS.HUGGINGFACE]: '#api_key_huggingface',
|
||||||
[SECRET_KEYS.BLOCKENTROPY]: '#api_key_blockentropy',
|
[SECRET_KEYS.BLOCKENTROPY]: '#api_key_blockentropy',
|
||||||
|
[SECRET_KEYS.NANOGPT]: '#api_key_nanogpt',
|
||||||
};
|
};
|
||||||
|
|
||||||
async function clearSecret() {
|
async function clearSecret() {
|
||||||
|
|
|
@ -3632,6 +3632,7 @@ function getModelOptions(quiet) {
|
||||||
{ id: 'model_cohere_select', api: 'openai', type: chat_completion_sources.COHERE },
|
{ id: 'model_cohere_select', api: 'openai', type: chat_completion_sources.COHERE },
|
||||||
{ id: 'model_perplexity_select', api: 'openai', type: chat_completion_sources.PERPLEXITY },
|
{ id: 'model_perplexity_select', api: 'openai', type: chat_completion_sources.PERPLEXITY },
|
||||||
{ id: 'model_groq_select', api: 'openai', type: chat_completion_sources.GROQ },
|
{ id: 'model_groq_select', api: 'openai', type: chat_completion_sources.GROQ },
|
||||||
|
{ id: 'model_nanogpt_select', api: 'openai', type: chat_completion_sources.NANOGPT },
|
||||||
{ id: 'model_01ai_select', api: 'openai', type: chat_completion_sources.ZEROONEAI },
|
{ id: 'model_01ai_select', api: 'openai', type: chat_completion_sources.ZEROONEAI },
|
||||||
{ id: 'model_blockentropy_select', api: 'openai', type: chat_completion_sources.BLOCKENTROPY },
|
{ id: 'model_blockentropy_select', api: 'openai', type: chat_completion_sources.BLOCKENTROPY },
|
||||||
{ id: 'model_novel_select', api: 'novel', type: null },
|
{ id: 'model_novel_select', api: 'novel', type: null },
|
||||||
|
|
|
@ -200,6 +200,7 @@ export const CHAT_COMPLETION_SOURCES = {
|
||||||
GROQ: 'groq',
|
GROQ: 'groq',
|
||||||
ZEROONEAI: '01ai',
|
ZEROONEAI: '01ai',
|
||||||
BLOCKENTROPY: 'blockentropy',
|
BLOCKENTROPY: 'blockentropy',
|
||||||
|
NANOGPT: 'nanogpt',
|
||||||
};
|
};
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|
Loading…
Reference in New Issue