Add xAI as chat completion source

This commit is contained in:
Cohee
2025-04-10 22:59:10 +03:00
parent c3b1573c91
commit 1c52099ed6
11 changed files with 120 additions and 12 deletions

View File

@ -685,7 +685,7 @@
</span> </span>
</div> </div>
</div> </div>
<div class="range-block" data-source="openai,claude,windowai,openrouter,ai21,scale,makersuite,mistralai,custom,cohere,perplexity,groq,01ai,nanogpt,deepseek"> <div class="range-block" data-source="openai,claude,windowai,openrouter,ai21,scale,makersuite,mistralai,custom,cohere,perplexity,groq,01ai,nanogpt,deepseek,xai">
<div class="range-block-title" data-i18n="Temperature"> <div class="range-block-title" data-i18n="Temperature">
Temperature Temperature
</div> </div>
@ -698,7 +698,7 @@
</div> </div>
</div> </div>
</div> </div>
<div class="range-block" data-source="openai,openrouter,custom,cohere,perplexity,groq,mistralai,nanogpt,deepseek"> <div class="range-block" data-source="openai,openrouter,custom,cohere,perplexity,groq,mistralai,nanogpt,deepseek,xai">
<div class="range-block-title" data-i18n="Frequency Penalty"> <div class="range-block-title" data-i18n="Frequency Penalty">
Frequency Penalty Frequency Penalty
</div> </div>
@ -711,7 +711,7 @@
</div> </div>
</div> </div>
</div> </div>
<div class="range-block" data-source="openai,openrouter,custom,cohere,perplexity,groq,mistralai,nanogpt,deepseek"> <div class="range-block" data-source="openai,openrouter,custom,cohere,perplexity,groq,mistralai,nanogpt,deepseek,xai">
<div class="range-block-title" data-i18n="Presence Penalty"> <div class="range-block-title" data-i18n="Presence Penalty">
Presence Penalty Presence Penalty
</div> </div>
@ -737,7 +737,7 @@
</div> </div>
</div> </div>
</div> </div>
<div class="range-block" data-source="openai,claude,openrouter,ai21,scale,makersuite,mistralai,custom,cohere,perplexity,groq,01ai,nanogpt,deepseek"> <div class="range-block" data-source="openai,claude,openrouter,ai21,scale,makersuite,mistralai,custom,cohere,perplexity,groq,01ai,nanogpt,deepseek,xai">
<div class="range-block-title" data-i18n="Top P"> <div class="range-block-title" data-i18n="Top P">
Top P Top P
</div> </div>
@ -974,7 +974,7 @@
</div> </div>
</div> </div>
</div> </div>
<div class="range-block" data-source="openai,openrouter,mistralai,custom,cohere,groq,nanogpt"> <div class="range-block" data-source="openai,openrouter,mistralai,custom,cohere,groq,nanogpt,xai">
<div class="range-block-title justifyLeft" data-i18n="Seed"> <div class="range-block-title justifyLeft" data-i18n="Seed">
Seed Seed
</div> </div>
@ -1965,7 +1965,7 @@
</span> </span>
</div> </div>
</div> </div>
<div class="range-block" data-source="openai,cohere,mistralai,custom,claude,openrouter,groq,deepseek,makersuite,ai21"> <div class="range-block" data-source="openai,cohere,mistralai,custom,claude,openrouter,groq,deepseek,makersuite,ai21,xai">
<label for="openai_function_calling" class="checkbox_label flexWrap widthFreeExpand"> <label for="openai_function_calling" class="checkbox_label flexWrap widthFreeExpand">
<input id="openai_function_calling" type="checkbox" /> <input id="openai_function_calling" type="checkbox" />
<span data-i18n="Enable function calling">Enable function calling</span> <span data-i18n="Enable function calling">Enable function calling</span>
@ -1975,7 +1975,7 @@
<span data-i18n="enable_functions_desc_3">Can be utilized by various extensions to provide additional functionality.</span> <span data-i18n="enable_functions_desc_3">Can be utilized by various extensions to provide additional functionality.</span>
</div> </div>
</div> </div>
<div class="range-block" data-source="openai,openrouter,makersuite,claude,custom,01ai"> <div class="range-block" data-source="openai,openrouter,makersuite,claude,custom,01ai,xai">
<label for="openai_image_inlining" class="checkbox_label flexWrap widthFreeExpand"> <label for="openai_image_inlining" class="checkbox_label flexWrap widthFreeExpand">
<input id="openai_image_inlining" type="checkbox" /> <input id="openai_image_inlining" type="checkbox" />
<span data-i18n="Send inline images">Send inline images</span> <span data-i18n="Send inline images">Send inline images</span>
@ -2031,7 +2031,7 @@
</span> </span>
</div> </div>
</div> </div>
<div class="range-block" data-source="deepseek,openrouter,custom,claude"> <div class="range-block" data-source="deepseek,openrouter,custom,claude,xai">
<label for="openai_show_thoughts" class="checkbox_label widthFreeExpand"> <label for="openai_show_thoughts" class="checkbox_label widthFreeExpand">
<input id="openai_show_thoughts" type="checkbox" /> <input id="openai_show_thoughts" type="checkbox" />
<span> <span>
@ -2045,7 +2045,7 @@
</span> </span>
</div> </div>
</div> </div>
<div class="flex-container flexFlowColumn wide100p textAlignCenter marginTop10" data-source="openai,custom,claude"> <div class="flex-container flexFlowColumn wide100p textAlignCenter marginTop10" data-source="openai,custom,claude,xai">
<div class="flex-container oneline-dropdown" title="Constrains effort on reasoning for reasoning models.&#10;Currently supported values are low, medium, and high.&#10;Reducing reasoning effort can result in faster responses and fewer tokens used on reasoning in a response." data-i18n="[title]Constrains effort on reasoning for reasoning models."> <div class="flex-container oneline-dropdown" title="Constrains effort on reasoning for reasoning models.&#10;Currently supported values are low, medium, and high.&#10;Reducing reasoning effort can result in faster responses and fewer tokens used on reasoning in a response." data-i18n="[title]Constrains effort on reasoning for reasoning models.">
<label for="openai_reasoning_effort"> <label for="openai_reasoning_effort">
<span data-i18n="Reasoning Effort">Reasoning Effort</span> <span data-i18n="Reasoning Effort">Reasoning Effort</span>
@ -2756,6 +2756,7 @@
<option value="perplexity">Perplexity</option> <option value="perplexity">Perplexity</option>
<option value="scale">Scale</option> <option value="scale">Scale</option>
<option value="windowai">Window AI</option> <option value="windowai">Window AI</option>
<option value="xai">xAI (Grok)</option>
</optgroup> </optgroup>
</select> </select>
<div class="inline-drawer wide100p" data-source="openai,claude,mistralai,makersuite,deepseek"> <div class="inline-drawer wide100p" data-source="openai,claude,mistralai,makersuite,deepseek">
@ -3424,6 +3425,31 @@
<select id="model_01ai_select"> <select id="model_01ai_select">
</select> </select>
</div> </div>
<div id="xai_form" data-source="xai">
<h4>
<a data-i18n="xAI API Key" href="https://console.x.ai/" target="_blank" rel="noopener noreferrer">
xAI API Key
</a>
</h4>
<div class="flex-container">
<input id="api_key_xai" name="api_key_xai" class="text_pole flex1" value="" type="text" autocomplete="off">
<div title="Clear your API key" data-i18n="[title]Clear your API key" class="menu_button fa-solid fa-circle-xmark clear-api-key" data-key="api_key_xai"></div>
</div>
<div data-for="api_key_01ai" class="neutral_warning" data-i18n="For privacy reasons, your API key will be hidden after you reload the page.">
For privacy reasons, your API key will be hidden after you reload the page.
</div>
<h4 data-i18n="xAI Model">xAI Model</h4>
<select id="model_xai_select">
<option value="grok-3-beta">grok-3-beta</option>
<option value="grok-3-fast-beta">grok-3-fast-beta</option>
<option value="grok-3-mini-beta">grok-3-mini-beta</option>
<option value="grok-3-mini-fast-beta">grok-3-mini-fast-beta</option>
<option value="grok-2-vision-1212">grok-2-vision-1212</option>
<option value="grok-2-1212">grok-2-1212</option>
<option value="grok-vision-beta">grok-vision-beta</option>
<option value="grok-beta">grok-beta</option>
</select>
</div>
<div id="prompt_post_porcessing_form" data-source="custom,openrouter"> <div id="prompt_post_porcessing_form" data-source="custom,openrouter">
<h4 data-i18n="Prompt Post-Processing">Prompt Post-Processing</h4> <h4 data-i18n="Prompt Post-Processing">Prompt Post-Processing</h4>
<select id="custom_prompt_post_processing" class="text_pole" title="Applies additional processing to the prompt before sending it to the API." data-i18n="[title]Applies additional processing to the prompt before sending it to the API."> <select id="custom_prompt_post_processing" class="text_pole" title="Applies additional processing to the prompt before sending it to the API." data-i18n="[title]Applies additional processing to the prompt before sending it to the API.">

View File

@ -409,6 +409,7 @@ function RA_autoconnect(PrevApi) {
|| (secret_state[SECRET_KEYS.ZEROONEAI] && oai_settings.chat_completion_source == chat_completion_sources.ZEROONEAI) || (secret_state[SECRET_KEYS.ZEROONEAI] && oai_settings.chat_completion_source == chat_completion_sources.ZEROONEAI)
|| (secret_state[SECRET_KEYS.NANOGPT] && oai_settings.chat_completion_source == chat_completion_sources.NANOGPT) || (secret_state[SECRET_KEYS.NANOGPT] && oai_settings.chat_completion_source == chat_completion_sources.NANOGPT)
|| (secret_state[SECRET_KEYS.DEEPSEEK] && oai_settings.chat_completion_source == chat_completion_sources.DEEPSEEK) || (secret_state[SECRET_KEYS.DEEPSEEK] && oai_settings.chat_completion_source == chat_completion_sources.DEEPSEEK)
|| (secret_state[SECRET_KEYS.XAI] && oai_settings.chat_completion_source == chat_completion_sources.XAI)
|| (isValidUrl(oai_settings.custom_url) && oai_settings.chat_completion_source == chat_completion_sources.CUSTOM) || (isValidUrl(oai_settings.custom_url) && oai_settings.chat_completion_source == chat_completion_sources.CUSTOM)
) { ) {
$('#api_button_openai').trigger('click'); $('#api_button_openai').trigger('click');

View File

@ -184,6 +184,7 @@ export const chat_completion_sources = {
ZEROONEAI: '01ai', ZEROONEAI: '01ai',
NANOGPT: 'nanogpt', NANOGPT: 'nanogpt',
DEEPSEEK: 'deepseek', DEEPSEEK: 'deepseek',
XAI: 'xai',
}; };
const character_names_behavior = { const character_names_behavior = {
@ -257,6 +258,7 @@ export const settingsToUpdate = {
nanogpt_model: ['#model_nanogpt_select', 'nanogpt_model', false], nanogpt_model: ['#model_nanogpt_select', 'nanogpt_model', false],
deepseek_model: ['#model_deepseek_select', 'deepseek_model', false], deepseek_model: ['#model_deepseek_select', 'deepseek_model', false],
zerooneai_model: ['#model_01ai_select', 'zerooneai_model', false], zerooneai_model: ['#model_01ai_select', 'zerooneai_model', false],
xai_model: ['#model_xai_select', 'xai_model', false],
custom_model: ['#custom_model_id', 'custom_model', false], custom_model: ['#custom_model_id', 'custom_model', false],
custom_url: ['#custom_api_url_text', 'custom_url', false], custom_url: ['#custom_api_url_text', 'custom_url', false],
custom_include_body: ['#custom_include_body', 'custom_include_body', false], custom_include_body: ['#custom_include_body', 'custom_include_body', false],
@ -345,6 +347,7 @@ const default_settings = {
nanogpt_model: 'gpt-4o-mini', nanogpt_model: 'gpt-4o-mini',
zerooneai_model: 'yi-large', zerooneai_model: 'yi-large',
deepseek_model: 'deepseek-chat', deepseek_model: 'deepseek-chat',
xai_model: 'grok-3-beta',
custom_model: '', custom_model: '',
custom_url: '', custom_url: '',
custom_include_body: '', custom_include_body: '',
@ -425,6 +428,7 @@ const oai_settings = {
nanogpt_model: 'gpt-4o-mini', nanogpt_model: 'gpt-4o-mini',
zerooneai_model: 'yi-large', zerooneai_model: 'yi-large',
deepseek_model: 'deepseek-chat', deepseek_model: 'deepseek-chat',
xai_model: 'grok-3-beta',
custom_model: '', custom_model: '',
custom_url: '', custom_url: '',
custom_include_body: '', custom_include_body: '',
@ -1644,6 +1648,8 @@ export function getChatCompletionModel(source = null) {
return oai_settings.nanogpt_model; return oai_settings.nanogpt_model;
case chat_completion_sources.DEEPSEEK: case chat_completion_sources.DEEPSEEK:
return oai_settings.deepseek_model; return oai_settings.deepseek_model;
case chat_completion_sources.XAI:
return oai_settings.xai_model;
default: default:
throw new Error(`Unknown chat completion source: ${activeSource}`); throw new Error(`Unknown chat completion source: ${activeSource}`);
} }
@ -1961,6 +1967,7 @@ async function sendOpenAIRequest(type, messages, signal) {
const is01AI = oai_settings.chat_completion_source == chat_completion_sources.ZEROONEAI; const is01AI = oai_settings.chat_completion_source == chat_completion_sources.ZEROONEAI;
const isNano = oai_settings.chat_completion_source == chat_completion_sources.NANOGPT; const isNano = oai_settings.chat_completion_source == chat_completion_sources.NANOGPT;
const isDeepSeek = oai_settings.chat_completion_source == chat_completion_sources.DEEPSEEK; const isDeepSeek = oai_settings.chat_completion_source == chat_completion_sources.DEEPSEEK;
const isXAI = oai_settings.chat_completion_source == chat_completion_sources.XAI;
const isTextCompletion = isOAI && textCompletionModels.includes(oai_settings.openai_model); const isTextCompletion = isOAI && textCompletionModels.includes(oai_settings.openai_model);
const isQuiet = type === 'quiet'; const isQuiet = type === 'quiet';
const isImpersonate = type === 'impersonate'; const isImpersonate = type === 'impersonate';
@ -2033,7 +2040,7 @@ async function sendOpenAIRequest(type, messages, signal) {
} }
// Add logprobs request (currently OpenAI only, max 5 on their side) // Add logprobs request (currently OpenAI only, max 5 on their side)
if (useLogprobs && (isOAI || isCustom || isDeepSeek)) { if (useLogprobs && (isOAI || isCustom || isDeepSeek || isXAI)) {
generate_data['logprobs'] = 5; generate_data['logprobs'] = 5;
} }
@ -2152,7 +2159,7 @@ async function sendOpenAIRequest(type, messages, signal) {
} }
} }
if ((isOAI || isOpenRouter || isMistral || isCustom || isCohere || isNano) && oai_settings.seed >= 0) { if ((isOAI || isOpenRouter || isMistral || isCustom || isCohere || isNano || isXAI) && oai_settings.seed >= 0) {
generate_data['seed'] = oai_settings.seed; generate_data['seed'] = oai_settings.seed;
} }
@ -2278,6 +2285,11 @@ export function getStreamingReply(data, state, { chatCompletionSource = null, ov
state.reasoning += (data.choices?.filter(x => x?.delta?.reasoning_content)?.[0]?.delta?.reasoning_content || ''); state.reasoning += (data.choices?.filter(x => x?.delta?.reasoning_content)?.[0]?.delta?.reasoning_content || '');
} }
return data.choices?.[0]?.delta?.content || ''; return data.choices?.[0]?.delta?.content || '';
} else if (chat_completion_source === chat_completion_sources.XAI) {
if (show_thoughts) {
state.reasoning += (data.choices?.filter(x => x?.delta?.reasoning_content)?.[0]?.delta?.reasoning_content || '');
}
return data.choices?.[0]?.delta?.content || '';
} else if (chat_completion_source === chat_completion_sources.OPENROUTER) { } else if (chat_completion_source === chat_completion_sources.OPENROUTER) {
if (show_thoughts) { if (show_thoughts) {
state.reasoning += (data.choices?.filter(x => x?.delta?.reasoning)?.[0]?.delta?.reasoning || ''); state.reasoning += (data.choices?.filter(x => x?.delta?.reasoning)?.[0]?.delta?.reasoning || '');
@ -2310,6 +2322,7 @@ function parseChatCompletionLogprobs(data) {
switch (oai_settings.chat_completion_source) { switch (oai_settings.chat_completion_source) {
case chat_completion_sources.OPENAI: case chat_completion_sources.OPENAI:
case chat_completion_sources.DEEPSEEK: case chat_completion_sources.DEEPSEEK:
case chat_completion_sources.XAI:
case chat_completion_sources.CUSTOM: case chat_completion_sources.CUSTOM:
if (!data.choices?.length) { if (!data.choices?.length) {
return null; return null;
@ -3231,6 +3244,7 @@ function loadOpenAISettings(data, settings) {
oai_settings.nanogpt_model = settings.nanogpt_model ?? default_settings.nanogpt_model; oai_settings.nanogpt_model = settings.nanogpt_model ?? default_settings.nanogpt_model;
oai_settings.deepseek_model = settings.deepseek_model ?? default_settings.deepseek_model; oai_settings.deepseek_model = settings.deepseek_model ?? default_settings.deepseek_model;
oai_settings.zerooneai_model = settings.zerooneai_model ?? default_settings.zerooneai_model; oai_settings.zerooneai_model = settings.zerooneai_model ?? default_settings.zerooneai_model;
oai_settings.xai_model = settings.xai_model ?? default_settings.xai_model;
oai_settings.custom_model = settings.custom_model ?? default_settings.custom_model; oai_settings.custom_model = settings.custom_model ?? default_settings.custom_model;
oai_settings.custom_url = settings.custom_url ?? default_settings.custom_url; oai_settings.custom_url = settings.custom_url ?? default_settings.custom_url;
oai_settings.custom_include_body = settings.custom_include_body ?? default_settings.custom_include_body; oai_settings.custom_include_body = settings.custom_include_body ?? default_settings.custom_include_body;
@ -3316,6 +3330,8 @@ function loadOpenAISettings(data, settings) {
$('#model_deepseek_select').val(oai_settings.deepseek_model); $('#model_deepseek_select').val(oai_settings.deepseek_model);
$(`#model_deepseek_select option[value="${oai_settings.deepseek_model}"`).prop('selected', true); $(`#model_deepseek_select option[value="${oai_settings.deepseek_model}"`).prop('selected', true);
$('#model_01ai_select').val(oai_settings.zerooneai_model); $('#model_01ai_select').val(oai_settings.zerooneai_model);
$('#model_xai_select').val(oai_settings.xai_model);
$(`#model_xai_select option[value="${oai_settings.xai_model}"`).attr('selected', true);
$('#custom_model_id').val(oai_settings.custom_model); $('#custom_model_id').val(oai_settings.custom_model);
$('#custom_api_url_text').val(oai_settings.custom_url); $('#custom_api_url_text').val(oai_settings.custom_url);
$('#openai_max_context').val(oai_settings.openai_max_context); $('#openai_max_context').val(oai_settings.openai_max_context);
@ -4312,6 +4328,11 @@ async function onModelChange() {
$('#custom_model_id').val(value).trigger('input'); $('#custom_model_id').val(value).trigger('input');
} }
if ($(this).is('#model_xai_select')) {
console.log('XAI model changed to', value);
oai_settings.xai_model = value;
}
if (oai_settings.chat_completion_source == chat_completion_sources.SCALE) { if (oai_settings.chat_completion_source == chat_completion_sources.SCALE) {
if (oai_settings.max_context_unlocked) { if (oai_settings.max_context_unlocked) {
$('#openai_max_context').attr('max', unlocked_max); $('#openai_max_context').attr('max', unlocked_max);
@ -4584,6 +4605,22 @@ async function onModelChange() {
$('#temp_openai').attr('max', oai_max_temp).val(oai_settings.temp_openai).trigger('input'); $('#temp_openai').attr('max', oai_max_temp).val(oai_settings.temp_openai).trigger('input');
} }
if (oai_settings.chat_completion_source === chat_completion_sources.XAI) {
if (oai_settings.max_context_unlocked) {
$('#openai_max_context').attr('max', unlocked_max);
} else if (oai_settings.xai_model.includes('grok-2-vision')) {
$('#openai_max_context').attr('max', max_32k);
} else if (oai_settings.xai_model.includes('grok-vision')) {
$('#openai_max_context').attr('max', max_8k);
} else {
$('#openai_max_context').attr('max', max_128k);
}
oai_settings.openai_max_context = Math.min(Number($('#openai_max_context').attr('max')), oai_settings.openai_max_context);
$('#openai_max_context').val(oai_settings.openai_max_context).trigger('input');
$('#temp_openai').attr('max', oai_max_temp).val(oai_settings.temp_openai).trigger('input');
}
if (oai_settings.chat_completion_source === chat_completion_sources.COHERE) { if (oai_settings.chat_completion_source === chat_completion_sources.COHERE) {
oai_settings.pres_pen_openai = Math.min(Math.max(0, oai_settings.pres_pen_openai), 1); oai_settings.pres_pen_openai = Math.min(Math.max(0, oai_settings.pres_pen_openai), 1);
$('#pres_pen_openai').attr('max', 1).attr('min', 0).val(oai_settings.pres_pen_openai).trigger('input'); $('#pres_pen_openai').attr('max', 1).attr('min', 0).val(oai_settings.pres_pen_openai).trigger('input');
@ -4822,6 +4859,19 @@ async function onConnectButtonClick(e) {
} }
} }
if (oai_settings.chat_completion_source === chat_completion_sources.XAI) {
const api_key_xai = String($('#api_key_xai').val()).trim();
if (api_key_xai.length) {
await writeSecret(SECRET_KEYS.XAI, api_key_xai);
}
if (!secret_state[SECRET_KEYS.XAI]) {
console.log('No secret key saved for XAI');
return;
}
}
startStatusLoading(); startStatusLoading();
saveSettingsDebounced(); saveSettingsDebounced();
await getStatusOpen(); await getStatusOpen();
@ -4878,6 +4928,9 @@ function toggleChatCompletionForms() {
else if (oai_settings.chat_completion_source == chat_completion_sources.DEEPSEEK) { else if (oai_settings.chat_completion_source == chat_completion_sources.DEEPSEEK) {
$('#model_deepseek_select').trigger('change'); $('#model_deepseek_select').trigger('change');
} }
else if (oai_settings.chat_completion_source == chat_completion_sources.XAI) {
$('#model_xai_select').trigger('change');
}
$('[data-source]').each(function () { $('[data-source]').each(function () {
const validSources = $(this).data('source').split(','); const validSources = $(this).data('source').split(',');
$(this).toggle(validSources.includes(oai_settings.chat_completion_source)); $(this).toggle(validSources.includes(oai_settings.chat_completion_source));
@ -5014,6 +5067,8 @@ export function isImageInliningSupported() {
'pixtral-large-2411', 'pixtral-large-2411',
'c4ai-aya-vision-8b', 'c4ai-aya-vision-8b',
'c4ai-aya-vision-32b', 'c4ai-aya-vision-32b',
'grok-2-vision',
'grok-vision',
]; ];
switch (oai_settings.chat_completion_source) { switch (oai_settings.chat_completion_source) {
@ -5033,6 +5088,8 @@ export function isImageInliningSupported() {
return visionSupportedModels.some(model => oai_settings.mistralai_model.includes(model)); return visionSupportedModels.some(model => oai_settings.mistralai_model.includes(model));
case chat_completion_sources.COHERE: case chat_completion_sources.COHERE:
return visionSupportedModels.some(model => oai_settings.cohere_model.includes(model)); return visionSupportedModels.some(model => oai_settings.cohere_model.includes(model));
case chat_completion_sources.XAI:
return visionSupportedModels.some(model => oai_settings.xai_model.includes(model));
default: default:
return false; return false;
} }
@ -5629,6 +5686,7 @@ export function initOpenAI() {
$('#model_deepseek_select').on('change', onModelChange); $('#model_deepseek_select').on('change', onModelChange);
$('#model_01ai_select').on('change', onModelChange); $('#model_01ai_select').on('change', onModelChange);
$('#model_custom_select').on('change', onModelChange); $('#model_custom_select').on('change', onModelChange);
$('#model_xai_select').on('change', onModelChange);
$('#settings_preset_openai').on('change', onSettingsPresetChange); $('#settings_preset_openai').on('change', onSettingsPresetChange);
$('#new_oai_preset').on('click', onNewPresetClick); $('#new_oai_preset').on('click', onNewPresetClick);
$('#delete_oai_preset').on('click', onDeletePresetClick); $('#delete_oai_preset').on('click', onDeletePresetClick);

View File

@ -109,6 +109,8 @@ export function extractReasoningFromData(data, {
switch (chatCompletionSource ?? oai_settings.chat_completion_source) { switch (chatCompletionSource ?? oai_settings.chat_completion_source) {
case chat_completion_sources.DEEPSEEK: case chat_completion_sources.DEEPSEEK:
return data?.choices?.[0]?.message?.reasoning_content ?? ''; return data?.choices?.[0]?.message?.reasoning_content ?? '';
case chat_completion_sources.XAI:
return data?.choices?.[0]?.message?.reasoning_content ?? '';
case chat_completion_sources.OPENROUTER: case chat_completion_sources.OPENROUTER:
return data?.choices?.[0]?.message?.reasoning ?? ''; return data?.choices?.[0]?.message?.reasoning ?? '';
case chat_completion_sources.MAKERSUITE: case chat_completion_sources.MAKERSUITE:

View File

@ -42,6 +42,7 @@ export const SECRET_KEYS = {
DEEPSEEK: 'api_key_deepseek', DEEPSEEK: 'api_key_deepseek',
SERPER: 'api_key_serper', SERPER: 'api_key_serper',
FALAI: 'api_key_falai', FALAI: 'api_key_falai',
XAI: 'api_key_xai',
}; };
const INPUT_MAP = { const INPUT_MAP = {
@ -76,6 +77,7 @@ const INPUT_MAP = {
[SECRET_KEYS.NANOGPT]: '#api_key_nanogpt', [SECRET_KEYS.NANOGPT]: '#api_key_nanogpt',
[SECRET_KEYS.GENERIC]: '#api_key_generic', [SECRET_KEYS.GENERIC]: '#api_key_generic',
[SECRET_KEYS.DEEPSEEK]: '#api_key_deepseek', [SECRET_KEYS.DEEPSEEK]: '#api_key_deepseek',
[SECRET_KEYS.XAI]: '#api_key_xai',
}; };
async function clearSecret() { async function clearSecret() {

View File

@ -3942,6 +3942,7 @@ function getModelOptions(quiet) {
{ id: 'model_nanogpt_select', api: 'openai', type: chat_completion_sources.NANOGPT }, { id: 'model_nanogpt_select', api: 'openai', type: chat_completion_sources.NANOGPT },
{ id: 'model_01ai_select', api: 'openai', type: chat_completion_sources.ZEROONEAI }, { id: 'model_01ai_select', api: 'openai', type: chat_completion_sources.ZEROONEAI },
{ id: 'model_deepseek_select', api: 'openai', type: chat_completion_sources.DEEPSEEK }, { id: 'model_deepseek_select', api: 'openai', type: chat_completion_sources.DEEPSEEK },
{ id: 'model_xai_select', api: 'openai', type: chat_completion_sources.XAI },
{ id: 'model_novel_select', api: 'novel', type: null }, { id: 'model_novel_select', api: 'novel', type: null },
{ id: 'horde_model', api: 'koboldhorde', type: null }, { id: 'horde_model', api: 'koboldhorde', type: null },
]; ];

View File

@ -586,6 +586,7 @@ export class ToolManager {
chat_completion_sources.DEEPSEEK, chat_completion_sources.DEEPSEEK,
chat_completion_sources.MAKERSUITE, chat_completion_sources.MAKERSUITE,
chat_completion_sources.AI21, chat_completion_sources.AI21,
chat_completion_sources.XAI,
]; ];
return supportedSources.includes(oai_settings.chat_completion_source); return supportedSources.includes(oai_settings.chat_completion_source);
} }

View File

@ -176,6 +176,7 @@ export const CHAT_COMPLETION_SOURCES = {
ZEROONEAI: '01ai', ZEROONEAI: '01ai',
NANOGPT: 'nanogpt', NANOGPT: 'nanogpt',
DEEPSEEK: 'deepseek', DEEPSEEK: 'deepseek',
XAI: 'xai',
}; };
/** /**

View File

@ -53,6 +53,7 @@ const API_01AI = 'https://api.lingyiwanwu.com/v1';
const API_AI21 = 'https://api.ai21.com/studio/v1'; const API_AI21 = 'https://api.ai21.com/studio/v1';
const API_NANOGPT = 'https://nano-gpt.com/api/v1'; const API_NANOGPT = 'https://nano-gpt.com/api/v1';
const API_DEEPSEEK = 'https://api.deepseek.com/beta'; const API_DEEPSEEK = 'https://api.deepseek.com/beta';
const API_XAI = 'https://api.x.ai/v1';
/** /**
* Applies a post-processing step to the generated messages. * Applies a post-processing step to the generated messages.
@ -872,6 +873,9 @@ router.post('/status', async function (request, response_getstatus_openai) {
api_url = new URL(request.body.reverse_proxy || API_DEEPSEEK.replace('/beta', '')); api_url = new URL(request.body.reverse_proxy || API_DEEPSEEK.replace('/beta', ''));
api_key_openai = request.body.reverse_proxy ? request.body.proxy_password : readSecret(request.user.directories, SECRET_KEYS.DEEPSEEK); api_key_openai = request.body.reverse_proxy ? request.body.proxy_password : readSecret(request.user.directories, SECRET_KEYS.DEEPSEEK);
headers = {}; headers = {};
} else if (request.body.chat_completion_source === CHAT_COMPLETION_SOURCES.XAI) {
api_url = API_XAI;
api_key_openai = readSecret(request.user.directories, SECRET_KEYS.XAI);
} else { } else {
console.warn('This chat completion source is not supported yet.'); console.warn('This chat completion source is not supported yet.');
return response_getstatus_openai.status(400).send({ error: true }); return response_getstatus_openai.status(400).send({ error: true });
@ -1150,6 +1154,11 @@ router.post('/generate', function (request, response) {
apiKey = readSecret(request.user.directories, SECRET_KEYS.ZEROONEAI); apiKey = readSecret(request.user.directories, SECRET_KEYS.ZEROONEAI);
headers = {}; headers = {};
bodyParams = {}; bodyParams = {};
} else if (request.body.chat_completion_source === CHAT_COMPLETION_SOURCES.XAI) {
apiUrl = API_XAI;
apiKey = readSecret(request.user.directories, SECRET_KEYS.XAI);
headers = {};
bodyParams = {};
} else { } else {
console.warn('This chat completion source is not supported yet.'); console.warn('This chat completion source is not supported yet.');
return response.status(400).send({ error: true }); return response.status(400).send({ error: true });
@ -1162,6 +1171,12 @@ router.post('/generate', function (request, response) {
} }
} }
if ([CHAT_COMPLETION_SOURCES.XAI].includes(request.body.chat_completion_source)) {
if (['grok-3-mini-beta', 'grok-3-mini-fast-beta'].includes(request.body.model)) {
bodyParams['reasoning_effort'] = request.body.reasoning_effort === 'high' ? 'high' : 'low';
}
}
if (!apiKey && !request.body.reverse_proxy && request.body.chat_completion_source !== CHAT_COMPLETION_SOURCES.CUSTOM) { if (!apiKey && !request.body.reverse_proxy && request.body.chat_completion_source !== CHAT_COMPLETION_SOURCES.CUSTOM) {
console.warn('OpenAI API key is missing.'); console.warn('OpenAI API key is missing.');
return response.status(400).send({ error: true }); return response.status(400).send({ error: true });

View File

@ -52,6 +52,7 @@ export const SECRET_KEYS = {
GENERIC: 'api_key_generic', GENERIC: 'api_key_generic',
DEEPSEEK: 'api_key_deepseek', DEEPSEEK: 'api_key_deepseek',
SERPER: 'api_key_serper', SERPER: 'api_key_serper',
XAI: 'api_key_xai',
}; };
// These are the keys that are safe to expose, even if allowKeysExposure is false // These are the keys that are safe to expose, even if allowKeysExposure is false

View File

@ -413,7 +413,7 @@ export function convertGooglePrompt(messages, model, useSysPrompt, names) {
} }
} }
const system_instruction = { parts: [{ text: sys_prompt.trim() }]}; const system_instruction = { parts: [{ text: sys_prompt.trim() }] };
const toolNameMap = {}; const toolNameMap = {};
const contents = []; const contents = [];