diff --git a/public/locales/fr-fr.json b/public/locales/fr-fr.json
index b2fc0b361..22d163a65 100644
--- a/public/locales/fr-fr.json
+++ b/public/locales/fr-fr.json
@@ -1385,7 +1385,7 @@
"enable_functions_desc_1": "Autorise l'utilisation",
"enable_functions_desc_2": "outils de fonction",
"enable_functions_desc_3": "Peut être utilisé par diverses extensions pour fournir des fonctionnalités supplémentaires.",
- "Show model thoughts": "Afficher les pensées du modèle",
+ "Show model reasoning": "Afficher les pensées du modèle",
"Display the model's internal thoughts in the response.": "Afficher les pensées internes du modèle dans la réponse.",
"Confirm token parsing with": "Confirmer l'analyse des tokens avec",
"openai_logit_bias_no_items": "Aucun élément",
diff --git a/public/locales/zh-cn.json b/public/locales/zh-cn.json
index 17a6ab6e8..d951491c5 100644
--- a/public/locales/zh-cn.json
+++ b/public/locales/zh-cn.json
@@ -266,7 +266,7 @@
"Use system prompt": "使用系统提示词",
"Merges_all_system_messages_desc_1": "合并所有系统消息,直到第一条具有非系统角色的消息,然后通过",
"Merges_all_system_messages_desc_2": "字段发送。",
- "Show model thoughts": "展示思维链",
+ "Show model reasoning": "展示思维链",
"Display the model's internal thoughts in the response.": "展示模型在回复时的内部思维链。",
"Assistant Prefill": "AI预填",
"Expand the editor": "展开编辑器",
diff --git a/public/locales/zh-tw.json b/public/locales/zh-tw.json
index 062aa7942..c8ec26b22 100644
--- a/public/locales/zh-tw.json
+++ b/public/locales/zh-tw.json
@@ -2357,7 +2357,7 @@
"Forbid": "禁止",
"Aphrodite only. Determines the order of samplers. Skew is always applied post-softmax, so it's not included here.": "僅限 Aphrodite 使用。決定採樣器的順序。偏移總是在 softmax 後應用,因此不包括在此。",
"Aphrodite only. Determines the order of samplers.": "僅限 Aphrodite 使用。決定採樣器的順序。",
- "Show model thoughts": "顯示模型思維鏈",
+ "Show model reasoning": "顯示模型思維鏈",
"Display the model's internal thoughts in the response.": "在回應中顯示模型的思維鏈(內部思考過程)。",
"Generic (OpenAI-compatible) [LM Studio, LiteLLM, etc.]": "通用(兼容 OpenAI)[LM Studio, LiteLLM 等]",
"Model ID (optional)": "模型 ID(可選)",
diff --git a/public/script.js b/public/script.js
index dc3d299da..006f946d7 100644
--- a/public/script.js
+++ b/public/script.js
@@ -267,6 +267,7 @@ import { initSettingsSearch } from './scripts/setting-search.js';
import { initBulkEdit } from './scripts/bulk-edit.js';
import { deriveTemplatesFromChatTemplate } from './scripts/chat-templates.js';
import { getContext } from './scripts/st-context.js';
+import { initReasoning, PromptReasoning } from './scripts/reasoning.js';
// API OBJECT FOR EXTERNAL WIRING
globalThis.SillyTavern = {
@@ -982,6 +983,7 @@ async function firstLoadInit() {
initServerHistory();
initSettingsSearch();
initBulkEdit();
+ initReasoning();
await initScrapers();
doDailyExtensionUpdatesCheck();
await hideLoader();
@@ -2200,6 +2202,7 @@ function getMessageFromTemplate({
isUser,
avatarImg,
bias,
+ reasoning,
isSystem,
title,
timerValue,
@@ -2224,6 +2227,7 @@ function getMessageFromTemplate({
mes.find('.avatar img').attr('src', avatarImg);
mes.find('.ch_name .name_text').text(characterName);
mes.find('.mes_bias').html(bias);
+ mes.find('.mes_reasoning').html(reasoning);
mes.find('.timestamp').text(timestamp).attr('title', `${extra?.api ? extra.api + ' - ' : ''}${extra?.model ?? ''}`);
mes.find('.mesIDDisplay').text(`#${mesId}`);
tokenCount && mes.find('.tokenCounterDisplay').text(`${tokenCount}t`);
@@ -2238,10 +2242,16 @@ function getMessageFromTemplate({
return mes;
}
+/**
+ * Re-renders a message block with updated content.
+ * @param {number} messageId Message ID
+ * @param {object} message Message object
+ */
export function updateMessageBlock(messageId, message) {
const messageElement = $(`#chat [mesid="${messageId}"]`);
const text = message?.extra?.display_text ?? message.mes;
messageElement.find('.mes_text').html(messageFormatting(text, message.name, message.is_system, message.is_user, messageId));
+ messageElement.find('.mes_reasoning').html(messageFormatting(message.extra?.reasoning ?? '', '', false, false, -1));
addCopyToCodeBlocks(messageElement);
appendMediaToMessage(message, messageElement);
}
@@ -2400,6 +2410,7 @@ export function addOneMessage(mes, { type = 'normal', insertAfter = null, scroll
sanitizerOverrides,
);
const bias = messageFormatting(mes.extra?.bias ?? '', '', false, false, -1);
+ const reasoning = messageFormatting(mes.extra?.reasoning ?? '', '', false, false, -1);
let bookmarkLink = mes?.extra?.bookmark_link ?? '';
let params = {
@@ -2409,6 +2420,7 @@ export function addOneMessage(mes, { type = 'normal', insertAfter = null, scroll
isUser: mes.is_user,
avatarImg: avatarImg,
bias: bias,
+ reasoning: reasoning,
isSystem: isSystem,
title: title,
bookmarkLink: bookmarkLink,
@@ -2468,6 +2480,7 @@ export function addOneMessage(mes, { type = 'normal', insertAfter = null, scroll
const swipeMessage = chatElement.find(`[mesid="${chat.length - 1}"]`);
swipeMessage.attr('swipeid', params.swipeId);
swipeMessage.find('.mes_text').html(messageText).attr('title', title);
+ swipeMessage.find('.mes_reasoning').html(reasoning);
swipeMessage.find('.timestamp').text(timestamp).attr('title', `${params.extra.api} - ${params.extra.model}`);
appendMediaToMessage(mes, swipeMessage);
if (power_user.timestamp_model_icon && params.extra?.api) {
@@ -3078,6 +3091,7 @@ class StreamingProcessor {
this.messageTextDom = null;
this.messageTimerDom = null;
this.messageTokenCounterDom = null;
+ this.messageReasoningDom = null;
/** @type {HTMLTextAreaElement} */
this.sendTextarea = document.querySelector('#send_textarea');
this.type = type;
@@ -3093,6 +3107,7 @@ class StreamingProcessor {
/** @type {import('./scripts/logprobs.js').TokenLogprobs[]} */
this.messageLogprobs = [];
this.toolCalls = [];
+ this.reasoning = '';
}
#checkDomElements(messageId) {
@@ -3101,6 +3116,7 @@ class StreamingProcessor {
this.messageTextDom = this.messageDom?.querySelector('.mes_text');
this.messageTimerDom = this.messageDom?.querySelector('.mes_timer');
this.messageTokenCounterDom = this.messageDom?.querySelector('.tokenCounterDisplay');
+ this.messageReasoningDom = this.messageDom?.querySelector('.mes_reasoning');
}
}
@@ -3178,18 +3194,27 @@ class StreamingProcessor {
this.#checkDomElements(messageId);
this.#updateMessageBlockVisibility();
const currentTime = new Date();
- // Don't waste time calculating token count for streaming
- const currentTokenCount = isFinal && power_user.message_token_count_enabled ? getTokenCount(processedText, 0) : 0;
- const timePassed = formatGenerationTimer(this.timeStarted, currentTime, currentTokenCount);
chat[messageId]['mes'] = processedText;
chat[messageId]['gen_started'] = this.timeStarted;
chat[messageId]['gen_finished'] = currentTime;
- if (currentTokenCount) {
- if (!chat[messageId]['extra']) {
- chat[messageId]['extra'] = {};
- }
+ if (!chat[messageId]['extra']) {
+ chat[messageId]['extra'] = {};
+ }
+ if (this.reasoning) {
+ chat[messageId]['extra']['reasoning'] = this.reasoning;
+ if (this.messageReasoningDom instanceof HTMLElement) {
+ const formattedReasoning = messageFormatting(this.reasoning, '', false, false, -1);
+ this.messageReasoningDom.innerHTML = formattedReasoning;
+ }
+ }
+
+ // Don't waste time calculating token count for streaming
+ const tokenCountText = (this.reasoning || '') + processedText;
+ const currentTokenCount = isFinal && power_user.message_token_count_enabled ? getTokenCount(tokenCountText, 0) : 0;
+
+ if (currentTokenCount) {
chat[messageId]['extra']['token_count'] = currentTokenCount;
if (this.messageTokenCounterDom instanceof HTMLElement) {
this.messageTokenCounterDom.textContent = `${currentTokenCount}t`;
@@ -3211,10 +3236,13 @@ class StreamingProcessor {
if (this.messageTextDom instanceof HTMLElement) {
this.messageTextDom.innerHTML = formattedText;
}
+
+ const timePassed = formatGenerationTimer(this.timeStarted, currentTime, currentTokenCount);
if (this.messageTimerDom instanceof HTMLElement) {
this.messageTimerDom.textContent = timePassed.timerValue;
this.messageTimerDom.title = timePassed.timerTitle;
}
+
this.setFirstSwipe(messageId);
}
@@ -3321,7 +3349,7 @@ class StreamingProcessor {
}
/**
- * @returns {Generator<{ text: string, swipes: string[], logprobs: import('./scripts/logprobs.js').TokenLogprobs, toolCalls: any[] }, void, void>}
+ * @returns {Generator<{ text: string, swipes: string[], logprobs: import('./scripts/logprobs.js').TokenLogprobs, toolCalls: any[], state: any }, void, void>}
*/
*nullStreamingGeneration() {
throw new Error('Generation function for streaming is not hooked up');
@@ -3343,7 +3371,7 @@ class StreamingProcessor {
try {
const sw = new Stopwatch(1000 / power_user.streaming_fps);
const timestamps = [];
- for await (const { text, swipes, logprobs, toolCalls } of this.generator()) {
+ for await (const { text, swipes, logprobs, toolCalls, state } of this.generator()) {
timestamps.push(Date.now());
if (this.isStopped) {
return;
@@ -3355,6 +3383,7 @@ class StreamingProcessor {
if (logprobs) {
this.messageLogprobs.push(...(Array.isArray(logprobs) ? logprobs : [logprobs]));
}
+ this.reasoning = state?.reasoning ?? '';
await eventSource.emit(event_types.STREAM_TOKEN_RECEIVED, text);
await sw.tick(() => this.onProgressStreaming(this.messageId, this.continueMessage + text));
}
@@ -3821,6 +3850,14 @@ export async function Generate(type, { automatic_trigger, force_name2, quiet_pro
coreChat.pop();
}
+ const reasoning = new PromptReasoning();
+ for (let i = coreChat.length - 1; i >= 0; i--) {
+ if (reasoning.isLimitReached()) {
+ break;
+ }
+ coreChat[i] = { ...coreChat[i], mes: reasoning.addToMessage(coreChat[i].mes, coreChat[i].extra?.reasoning) };
+ }
+
coreChat = await Promise.all(coreChat.map(async (chatItem, index) => {
let message = chatItem.mes;
let regexType = chatItem.is_user ? regex_placement.USER_INPUT : regex_placement.AI_OUTPUT;
@@ -4742,6 +4779,7 @@ export async function Generate(type, { automatic_trigger, force_name2, quiet_pro
//const getData = await response.json();
let getMessage = extractMessageFromData(data);
let title = extractTitleFromData(data);
+ let reasoning = extractReasoningFromData(data);
kobold_horde_model = title;
const swipes = extractMultiSwipes(data, type);
@@ -4768,10 +4806,10 @@ export async function Generate(type, { automatic_trigger, force_name2, quiet_pro
else {
// Without streaming we'll be having a full message on continuation. Treat it as a last chunk.
if (originalType !== 'continue') {
- ({ type, getMessage } = await saveReply(type, getMessage, false, title, swipes));
+ ({ type, getMessage } = await saveReply(type, getMessage, false, title, swipes, reasoning));
}
else {
- ({ type, getMessage } = await saveReply('appendFinal', getMessage, false, title, swipes));
+ ({ type, getMessage } = await saveReply('appendFinal', getMessage, false, title, swipes, reasoning));
}
// This relies on `saveReply` having been called to add the message to the chat, so it must be last.
@@ -5659,31 +5697,40 @@ function extractMessageFromData(data) {
return data;
}
- function getTextContext() {
- switch (main_api) {
- case 'kobold':
- return data.results[0].text;
- case 'koboldhorde':
- return data.text;
- case 'textgenerationwebui':
- return data.choices?.[0]?.text ?? data.content ?? data.response ?? '';
- case 'novel':
- return data.output;
- case 'openai':
- return data?.choices?.[0]?.message?.content ?? data?.choices?.[0]?.text ?? data?.text ?? data?.message?.content?.[0]?.text ?? data?.message?.tool_plan ?? '';
- default:
- return '';
+ switch (main_api) {
+ case 'kobold':
+ return data.results[0].text;
+ case 'koboldhorde':
+ return data.text;
+ case 'textgenerationwebui':
+ return data.choices?.[0]?.text ?? data.content ?? data.response ?? '';
+ case 'novel':
+ return data.output;
+ case 'openai':
+ return data?.choices?.[0]?.message?.content ?? data?.choices?.[0]?.text ?? data?.text ?? data?.message?.content?.[0]?.text ?? data?.message?.tool_plan ?? '';
+ default:
+ return '';
+ }
+}
+
+/**
+ * Extracts the reasoning from the response data.
+ * @param {object} data Response data
+ * @returns {string} Extracted reasoning
+ */
+function extractReasoningFromData(data) {
+ if (main_api === 'openai' && oai_settings.show_thoughts) {
+ switch (oai_settings.chat_completion_source) {
+ case chat_completion_sources.DEEPSEEK:
+ return data?.choices?.[0]?.message?.reasoning_content ?? '';
+ case chat_completion_sources.OPENROUTER:
+ return data?.choices?.[0]?.message?.reasoning ?? '';
+ case chat_completion_sources.MAKERSUITE:
+ return data?.responseContent?.parts?.filter(part => part.thought)?.map(part => part.text)?.join('\n\n') ?? '';
}
}
- const content = getTextContext();
-
- if (main_api === 'openai' && oai_settings.chat_completion_source === chat_completion_sources.DEEPSEEK && oai_settings.show_thoughts) {
- const thoughts = data?.choices?.[0]?.message?.reasoning_content ?? '';
- return [thoughts, content].filter(x => x).join('\n\n');
- }
-
- return content;
+ return '';
}
/**
@@ -5866,7 +5913,7 @@ export function cleanUpMessage(getMessage, isImpersonate, isContinue, displayInc
return getMessage;
}
-export async function saveReply(type, getMessage, fromStreaming, title, swipes) {
+export async function saveReply(type, getMessage, fromStreaming, title, swipes, reasoning) {
if (type != 'append' && type != 'continue' && type != 'appendFinal' && chat.length && (chat[chat.length - 1]['swipe_id'] === undefined ||
chat[chat.length - 1]['is_user'])) {
type = 'normal';
@@ -5891,8 +5938,10 @@ export async function saveReply(type, getMessage, fromStreaming, title, swipes)
chat[chat.length - 1]['send_date'] = getMessageTimeStamp();
chat[chat.length - 1]['extra']['api'] = getGeneratingApi();
chat[chat.length - 1]['extra']['model'] = getGeneratingModel();
+ chat[chat.length - 1]['extra']['reasoning'] = reasoning;
if (power_user.message_token_count_enabled) {
- chat[chat.length - 1]['extra']['token_count'] = await getTokenCountAsync(chat[chat.length - 1]['mes'], 0);
+ const tokenCountText = (reasoning || '') + chat[chat.length - 1]['mes'];
+ chat[chat.length - 1]['extra']['token_count'] = await getTokenCountAsync(tokenCountText, 0);
}
const chat_id = (chat.length - 1);
await eventSource.emit(event_types.MESSAGE_RECEIVED, chat_id);
@@ -5911,8 +5960,10 @@ export async function saveReply(type, getMessage, fromStreaming, title, swipes)
chat[chat.length - 1]['send_date'] = getMessageTimeStamp();
chat[chat.length - 1]['extra']['api'] = getGeneratingApi();
chat[chat.length - 1]['extra']['model'] = getGeneratingModel();
+ chat[chat.length - 1]['extra']['reasoning'] += reasoning;
if (power_user.message_token_count_enabled) {
- chat[chat.length - 1]['extra']['token_count'] = await getTokenCountAsync(chat[chat.length - 1]['mes'], 0);
+ const tokenCountText = (reasoning || '') + chat[chat.length - 1]['mes'];
+ chat[chat.length - 1]['extra']['token_count'] = await getTokenCountAsync(tokenCountText, 0);
}
const chat_id = (chat.length - 1);
await eventSource.emit(event_types.MESSAGE_RECEIVED, chat_id);
@@ -5928,8 +5979,10 @@ export async function saveReply(type, getMessage, fromStreaming, title, swipes)
chat[chat.length - 1]['send_date'] = getMessageTimeStamp();
chat[chat.length - 1]['extra']['api'] = getGeneratingApi();
chat[chat.length - 1]['extra']['model'] = getGeneratingModel();
+ chat[chat.length - 1]['extra']['reasoning'] += reasoning;
if (power_user.message_token_count_enabled) {
- chat[chat.length - 1]['extra']['token_count'] = await getTokenCountAsync(chat[chat.length - 1]['mes'], 0);
+ const tokenCountText = (reasoning || '') + chat[chat.length - 1]['mes'];
+ chat[chat.length - 1]['extra']['token_count'] = await getTokenCountAsync(tokenCountText, 0);
}
const chat_id = (chat.length - 1);
await eventSource.emit(event_types.MESSAGE_RECEIVED, chat_id);
@@ -5945,6 +5998,7 @@ export async function saveReply(type, getMessage, fromStreaming, title, swipes)
chat[chat.length - 1]['send_date'] = getMessageTimeStamp();
chat[chat.length - 1]['extra']['api'] = getGeneratingApi();
chat[chat.length - 1]['extra']['model'] = getGeneratingModel();
+ chat[chat.length - 1]['extra']['reasoning'] = reasoning;
if (power_user.trim_spaces) {
getMessage = getMessage.trim();
}
@@ -5954,7 +6008,8 @@ export async function saveReply(type, getMessage, fromStreaming, title, swipes)
chat[chat.length - 1]['gen_finished'] = generationFinished;
if (power_user.message_token_count_enabled) {
- chat[chat.length - 1]['extra']['token_count'] = await getTokenCountAsync(chat[chat.length - 1]['mes'], 0);
+ const tokenCountText = (reasoning || '') + chat[chat.length - 1]['mes'];
+ chat[chat.length - 1]['extra']['token_count'] = await getTokenCountAsync(tokenCountText, 0);
}
if (selected_group) {
@@ -7997,9 +8052,23 @@ function updateEditArrowClasses() {
}
}
-function closeMessageEditor() {
- if (this_edit_mes_id) {
- $(`#chat .mes[mesid="${this_edit_mes_id}"] .mes_edit_cancel`).click();
+/**
+ * Closes the message editor.
+ * @param {'message'|'reasoning'|'all'} what What to close. Default is 'all'.
+ */
+export function closeMessageEditor(what = 'all') {
+ if (what === 'message' || what === 'all') {
+ if (this_edit_mes_id) {
+ $(`#chat .mes[mesid="${this_edit_mes_id}"] .mes_edit_cancel`).click();
+ }
+ }
+ if (what === 'reasoning' || what === 'all') {
+ document.querySelectorAll('.reasoning_edit_textarea').forEach((el) => {
+ const cancelButton = el.closest('.mes')?.querySelector('.mes_reasoning_edit_cancel');
+ if (cancelButton instanceof HTMLElement) {
+ cancelButton.click();
+ }
+ });
}
}
@@ -8483,7 +8552,8 @@ function swipe_left() { // when we swipe left..but no generation.
}
const swipeMessage = $('#chat').find(`[mesid="${chat.length - 1}"]`);
- const tokenCount = await getTokenCountAsync(chat[chat.length - 1].mes, 0);
+ const tokenCountText = (chat[chat.length - 1]?.extra?.reasoning || '') + chat[chat.length - 1].mes;
+ const tokenCount = await getTokenCountAsync(tokenCountText, 0);
chat[chat.length - 1]['extra']['token_count'] = tokenCount;
swipeMessage.find('.tokenCounterDisplay').text(`${tokenCount}t`);
}
@@ -8647,6 +8717,7 @@ const swipe_right = () => {
// resets the timer
swipeMessage.find('.mes_timer').html('');
swipeMessage.find('.tokenCounterDisplay').text('');
+ swipeMessage.find('.mes_reasoning').html('');
} else {
//console.log('showing previously generated swipe candidate, or "..."');
//console.log('onclick right swipe calling addOneMessage');
@@ -8657,7 +8728,8 @@ const swipe_right = () => {
chat[chat.length - 1].extra = {};
}
- const tokenCount = await getTokenCountAsync(chat[chat.length - 1].mes, 0);
+ const tokenCountText = (chat[chat.length - 1]?.extra?.reasoning || '') + chat[chat.length - 1].mes;
+ const tokenCount = await getTokenCountAsync(tokenCountText, 0);
chat[chat.length - 1]['extra']['token_count'] = tokenCount;
swipeMessage.find('.tokenCounterDisplay').text(`${tokenCount}t`);
}
@@ -9459,7 +9531,8 @@ function addDebugFunctions() {
message.extra = {};
}
- message.extra.token_count = await getTokenCountAsync(message.mes, 0);
+ const tokenCountText = (message?.extra?.reasoning || '') + message.mes;
+ message.extra.token_count = await getTokenCountAsync(tokenCountText, 0);
}
await saveChatConditional();
@@ -11228,14 +11301,15 @@ jQuery(async function () {
$(document).keyup(function (e) {
if (e.key === 'Escape') {
- const isEditVisible = $('#curEditTextarea').is(':visible');
+ const isEditVisible = $('#curEditTextarea').is(':visible') || $('.reasoning_edit_textarea').length > 0;
if (isEditVisible && power_user.auto_save_msg_edits === false) {
- closeMessageEditor();
+ closeMessageEditor('all');
$('#send_textarea').focus();
return;
}
if (isEditVisible && power_user.auto_save_msg_edits === true) {
$(`#chat .mes[mesid="${this_edit_mes_id}"] .mes_edit_done`).click();
+ closeMessageEditor('reasoning');
$('#send_textarea').focus();
return;
}
diff --git a/public/scripts/kai-settings.js b/public/scripts/kai-settings.js
index 6efadce87..65d47fc4b 100644
--- a/public/scripts/kai-settings.js
+++ b/public/scripts/kai-settings.js
@@ -188,7 +188,7 @@ export async function generateKoboldWithStreaming(generate_data, signal) {
if (data?.token) {
text += data.token;
}
- yield { text, swipes: [], toolCalls: [] };
+ yield { text, swipes: [], toolCalls: [], state: {} };
}
};
}
diff --git a/public/scripts/nai-settings.js b/public/scripts/nai-settings.js
index f95e7d9f6..91ff09ef6 100644
--- a/public/scripts/nai-settings.js
+++ b/public/scripts/nai-settings.js
@@ -746,7 +746,7 @@ export async function generateNovelWithStreaming(generate_data, signal) {
text += data.token;
}
- yield { text, swipes: [], logprobs: parseNovelAILogprobs(data.logprobs), toolCalls: [] };
+ yield { text, swipes: [], logprobs: parseNovelAILogprobs(data.logprobs), toolCalls: [], state: {} };
}
};
}
diff --git a/public/scripts/openai.js b/public/scripts/openai.js
index 2d0f44458..f1d898430 100644
--- a/public/scripts/openai.js
+++ b/public/scripts/openai.js
@@ -2095,7 +2095,7 @@ async function sendOpenAIRequest(type, messages, signal) {
let text = '';
const swipes = [];
const toolCalls = [];
- const state = {};
+ const state = { reasoning: '' };
while (true) {
const { done, value } = await reader.read();
if (done) return;
@@ -2113,7 +2113,7 @@ async function sendOpenAIRequest(type, messages, signal) {
ToolManager.parseToolCalls(toolCalls, parsed);
- yield { text, swipes: swipes, logprobs: parseChatCompletionLogprobs(parsed), toolCalls: toolCalls };
+ yield { text, swipes: swipes, logprobs: parseChatCompletionLogprobs(parsed), toolCalls: toolCalls, state: state };
}
};
}
@@ -2150,16 +2150,22 @@ function getStreamingReply(data, state) {
if (oai_settings.chat_completion_source === chat_completion_sources.CLAUDE) {
return data?.delta?.text || '';
} else if (oai_settings.chat_completion_source === chat_completion_sources.MAKERSUITE) {
- return data?.candidates?.[0]?.content?.parts?.filter(x => oai_settings.show_thoughts || !x.thought)?.map(x => x.text)?.filter(x => x)?.join('\n\n') || '';
+ if (oai_settings.show_thoughts) {
+ state.reasoning += (data?.candidates?.[0]?.content?.parts?.filter(x => x.thought)?.map(x => x.text)?.[0] || '');
+ }
+ return data?.candidates?.[0]?.content?.parts?.filter(x => !x.thought)?.map(x => x.text)?.[0] || '';
} else if (oai_settings.chat_completion_source === chat_completion_sources.COHERE) {
return data?.delta?.message?.content?.text || data?.delta?.message?.tool_plan || '';
} else if (oai_settings.chat_completion_source === chat_completion_sources.DEEPSEEK) {
- const hadThoughts = state.hadThoughts;
- const thoughts = data.choices?.filter(x => oai_settings.show_thoughts || !x?.delta?.reasoning_content)?.[0]?.delta?.reasoning_content || '';
- const content = data.choices?.[0]?.delta?.content || '';
- state.hadThoughts = !!thoughts;
- const separator = hadThoughts && !thoughts ? '\n\n' : '';
- return [thoughts, separator, content].filter(x => x).join('\n\n');
+ if (oai_settings.show_thoughts) {
+ state.reasoning += (data.choices?.filter(x => x?.delta?.reasoning_content)?.[0]?.delta?.reasoning_content || '');
+ }
+ return data.choices?.[0]?.delta?.content || '';
+ } else if (oai_settings.chat_completion_source === chat_completion_sources.OPENROUTER) {
+ if (oai_settings.show_thoughts) {
+ state.reasoning += (data.choices?.filter(x => x?.delta?.reasoning)?.[0]?.delta?.reasoning || '');
+ }
+ return data.choices?.[0]?.delta?.content ?? data.choices?.[0]?.message?.content ?? data.choices?.[0]?.text ?? '';
} else {
return data.choices?.[0]?.delta?.content ?? data.choices?.[0]?.message?.content ?? data.choices?.[0]?.text ?? '';
}
diff --git a/public/scripts/power-user.js b/public/scripts/power-user.js
index bfe8eca2e..2a74f8722 100644
--- a/public/scripts/power-user.js
+++ b/public/scripts/power-user.js
@@ -253,6 +253,14 @@ let power_user = {
content: 'Write {{char}}\'s next reply in a fictional chat between {{char}} and {{user}}.',
},
+ reasoning: {
+ add_to_prompts: false,
+ prefix: '\n',
+ suffix: '\n',
+ separator: '\n\n',
+ max_additions: 1,
+ },
+
personas: {},
default_persona: null,
persona_descriptions: {},
diff --git a/public/scripts/reasoning.js b/public/scripts/reasoning.js
new file mode 100644
index 000000000..c9b0efc3b
--- /dev/null
+++ b/public/scripts/reasoning.js
@@ -0,0 +1,297 @@
+import { chat, closeMessageEditor, saveChatConditional, saveSettingsDebounced, substituteParams, updateMessageBlock } from '../script.js';
+import { t } from './i18n.js';
+import { MacrosParser } from './macros.js';
+import { Popup } from './popup.js';
+import { power_user } from './power-user.js';
+import { SlashCommand } from './slash-commands/SlashCommand.js';
+import { ARGUMENT_TYPE, SlashCommandArgument, SlashCommandNamedArgument } from './slash-commands/SlashCommandArgument.js';
+import { commonEnumProviders } from './slash-commands/SlashCommandCommonEnumsProvider.js';
+import { SlashCommandParser } from './slash-commands/SlashCommandParser.js';
+import { copyText } from './utils.js';
+
+/**
+ * Gets a message from a jQuery element.
+ * @param {Element} element
+ * @returns {{messageId: number, message: object, messageBlock: JQuery}}
+ */
+function getMessageFromJquery(element) {
+ const messageBlock = $(element).closest('.mes');
+ const messageId = Number(messageBlock.attr('mesid'));
+ const message = chat[messageId];
+ return { messageId: messageId, message, messageBlock };
+}
+
+/**
+ * Helper class for adding reasoning to messages.
+ * Keeps track of the number of reasoning additions.
+ */
+export class PromptReasoning {
+ static REASONING_PLACEHOLDER = '\u200B';
+ static REASONING_PLACEHOLDER_REGEX = new RegExp(`${PromptReasoning.REASONING_PLACEHOLDER}$`);
+
+ constructor() {
+ this.counter = 0;
+ }
+
+ /**
+ * Checks if the limit of reasoning additions has been reached.
+ * @returns {boolean} True if the limit of reasoning additions has been reached, false otherwise.
+ */
+ isLimitReached() {
+ if (!power_user.reasoning.add_to_prompts) {
+ return true;
+ }
+
+ return this.counter >= power_user.reasoning.max_additions;
+ }
+
+ /**
+ * Add reasoning to a message according to the power user settings.
+ * @param {string} content Message content
+ * @param {string} reasoning Message reasoning
+ * @returns {string} Message content with reasoning
+ */
+ addToMessage(content, reasoning) {
+ // Disabled or reached limit of additions
+ if (!power_user.reasoning.add_to_prompts || this.counter >= power_user.reasoning.max_additions) {
+ return content;
+ }
+
+ // No reasoning provided or a placeholder
+ if (!reasoning || reasoning === PromptReasoning.REASONING_PLACEHOLDER) {
+ return content;
+ }
+
+ // Increment the counter
+ this.counter++;
+
+ // Substitute macros in variable parts
+ const prefix = substituteParams(power_user.reasoning.prefix || '');
+ const separator = substituteParams(power_user.reasoning.separator || '');
+ const suffix = substituteParams(power_user.reasoning.suffix || '');
+
+ // Combine parts with reasoning and content
+ return `${prefix}${reasoning}${suffix}${separator}${content}`;
+ }
+}
+
+function loadReasoningSettings() {
+ $('#reasoning_add_to_prompts').prop('checked', power_user.reasoning.add_to_prompts);
+ $('#reasoning_add_to_prompts').on('change', function () {
+ power_user.reasoning.add_to_prompts = !!$(this).prop('checked');
+ saveSettingsDebounced();
+ });
+
+ $('#reasoning_prefix').val(power_user.reasoning.prefix);
+ $('#reasoning_prefix').on('input', function () {
+ power_user.reasoning.prefix = String($(this).val());
+ saveSettingsDebounced();
+ });
+
+ $('#reasoning_suffix').val(power_user.reasoning.suffix);
+ $('#reasoning_suffix').on('input', function () {
+ power_user.reasoning.suffix = String($(this).val());
+ saveSettingsDebounced();
+ });
+
+ $('#reasoning_separator').val(power_user.reasoning.separator);
+ $('#reasoning_separator').on('input', function () {
+ power_user.reasoning.separator = String($(this).val());
+ saveSettingsDebounced();
+ });
+
+ $('#reasoning_max_additions').val(power_user.reasoning.max_additions);
+ $('#reasoning_max_additions').on('input', function () {
+ power_user.reasoning.max_additions = Number($(this).val());
+ saveSettingsDebounced();
+ });
+}
+
+function registerReasoningSlashCommands() {
+ SlashCommandParser.addCommandObject(SlashCommand.fromProps({
+ name: 'reasoning-get',
+ returns: ARGUMENT_TYPE.STRING,
+ helpString: t`Get the contents of a reasoning block of a message. Returns an empty string if the message does not have a reasoning block.`,
+ unnamedArgumentList: [
+ SlashCommandArgument.fromProps({
+ description: 'Message ID. If not provided, the message ID of the last message is used.',
+ typeList: ARGUMENT_TYPE.NUMBER,
+ enumProvider: commonEnumProviders.messages(),
+ }),
+ ],
+ callback: (_args, value) => {
+ const messageId = !isNaN(Number(value)) ? Number(value) : chat.length - 1;
+ const message = chat[messageId];
+ const reasoning = String(message?.extra?.reasoning ?? '');
+ return reasoning.replace(PromptReasoning.REASONING_PLACEHOLDER_REGEX, '');
+ },
+ }));
+
+ SlashCommandParser.addCommandObject(SlashCommand.fromProps({
+ name: 'reasoning-set',
+ returns: ARGUMENT_TYPE.STRING,
+ helpString: t`Set the reasoning block of a message. Returns the reasoning block content.`,
+ namedArgumentList: [
+ SlashCommandNamedArgument.fromProps({
+ name: 'at',
+ description: 'Message ID. If not provided, the message ID of the last message is used.',
+ typeList: ARGUMENT_TYPE.NUMBER,
+ enumProvider: commonEnumProviders.messages(),
+ }),
+ ],
+ unnamedArgumentList: [
+ SlashCommandArgument.fromProps({
+ description: 'Reasoning block content.',
+ typeList: ARGUMENT_TYPE.STRING,
+ }),
+ ],
+ callback: async (args, value) => {
+ const messageId = !isNaN(Number(args[0])) ? Number(args[0]) : chat.length - 1;
+ const message = chat[messageId];
+ if (!message?.extra) {
+ return '';
+ }
+
+ message.extra.reasoning = String(value ?? '');
+ await saveChatConditional();
+
+ closeMessageEditor('reasoning');
+ updateMessageBlock(messageId, message);
+ return message.extra.reasoning;
+ },
+ }));
+}
+
+function registerReasoningMacros() {
+ MacrosParser.registerMacro('reasoningPrefix', () => power_user.reasoning.prefix, t`Reasoning Prefix`);
+ MacrosParser.registerMacro('reasoningSuffix', () => power_user.reasoning.suffix, t`Reasoning Suffix`);
+ MacrosParser.registerMacro('reasoningSeparator', () => power_user.reasoning.separator, t`Reasoning Separator`);
+}
+
+function setReasoningEventHandlers(){
+ $(document).on('click', '.mes_reasoning_copy', (e) => {
+ e.stopPropagation();
+ e.preventDefault();
+ });
+
+ $(document).on('click', '.mes_reasoning_edit', function (e) {
+ e.stopPropagation();
+ e.preventDefault();
+ const { message, messageBlock } = getMessageFromJquery(this);
+ if (!message?.extra) {
+ return;
+ }
+
+ const reasoning = String(message?.extra?.reasoning ?? '');
+ const chatElement = document.getElementById('chat');
+ const textarea = document.createElement('textarea');
+ const reasoningBlock = messageBlock.find('.mes_reasoning');
+ textarea.classList.add('reasoning_edit_textarea');
+ textarea.value = reasoning.replace(PromptReasoning.REASONING_PLACEHOLDER_REGEX, '');
+ $(textarea).insertBefore(reasoningBlock);
+
+ if (!CSS.supports('field-sizing', 'content')) {
+ const resetHeight = function () {
+ const scrollTop = chatElement.scrollTop;
+ textarea.style.height = '0px';
+ textarea.style.height = `${textarea.scrollHeight}px`;
+ chatElement.scrollTop = scrollTop;
+ };
+
+ textarea.addEventListener('input', resetHeight);
+ resetHeight();
+ }
+
+ textarea.focus();
+ textarea.setSelectionRange(textarea.value.length, textarea.value.length);
+
+ const textareaRect = textarea.getBoundingClientRect();
+ const chatRect = chatElement.getBoundingClientRect();
+
+ // Scroll if textarea bottom is below visible area
+ if (textareaRect.bottom > chatRect.bottom) {
+ const scrollOffset = textareaRect.bottom - chatRect.bottom;
+ chatElement.scrollTop += scrollOffset;
+ }
+ });
+
+ $(document).on('click', '.mes_reasoning_edit_done', async function (e) {
+ e.stopPropagation();
+ e.preventDefault();
+ const { message, messageId, messageBlock } = getMessageFromJquery(this);
+ if (!message?.extra) {
+ return;
+ }
+
+ const textarea = messageBlock.find('.reasoning_edit_textarea');
+ const reasoning = String(textarea.val());
+ message.extra.reasoning = reasoning;
+ await saveChatConditional();
+ updateMessageBlock(messageId, message);
+ textarea.remove();
+ });
+
+ $(document).on('click', '.mes_reasoning_edit_cancel', function (e) {
+ e.stopPropagation();
+ e.preventDefault();
+
+ const { messageBlock } = getMessageFromJquery(this);
+ const textarea = messageBlock.find('.reasoning_edit_textarea');
+ textarea.remove();
+ });
+
+ $(document).on('click', '.mes_edit_add_reasoning', async function () {
+ const { message, messageId } = getMessageFromJquery(this);
+ if (!message?.extra) {
+ return;
+ }
+
+ if (message.extra.reasoning) {
+ toastr.info(t`Reasoning already exists.`, t`Edit Message`);
+ return;
+ }
+
+ message.extra.reasoning = PromptReasoning.REASONING_PLACEHOLDER;
+ await saveChatConditional();
+ closeMessageEditor();
+ updateMessageBlock(messageId, message);
+ });
+
+ $(document).on('click', '.mes_reasoning_delete', async function (e) {
+ e.stopPropagation();
+ e.preventDefault();
+
+ const confirm = await Popup.show.confirm(t`Are you sure you want to clear the reasoning?`, t`Visible message contents will stay intact.`);
+
+ if (!confirm) {
+ return;
+ }
+
+ const { message, messageId } = getMessageFromJquery(this);
+ if (!message?.extra) {
+ return;
+ }
+ message.extra.reasoning = '';
+ await saveChatConditional();
+ updateMessageBlock(messageId, message);
+ });
+
+ $(document).on('pointerup', '.mes_reasoning_copy', async function () {
+ const { message } = getMessageFromJquery(this);
+ const reasoning = String(message?.extra?.reasoning ?? '').replace(PromptReasoning.REASONING_PLACEHOLDER_REGEX, '');
+
+ if (!reasoning) {
+ return;
+ }
+
+ await copyText(reasoning);
+ toastr.info(t`Copied!`, '', { timeOut: 2000 });
+ });
+}
+
+export function initReasoning() {
+ loadReasoningSettings();
+ setReasoningEventHandlers();
+ registerReasoningSlashCommands();
+ registerReasoningMacros();
+}
diff --git a/public/scripts/sse-stream.js b/public/scripts/sse-stream.js
index 3921e7d58..17a31b567 100644
--- a/public/scripts/sse-stream.js
+++ b/public/scripts/sse-stream.js
@@ -235,6 +235,21 @@ async function* parseStreamData(json) {
}
return;
}
+ else if (typeof json.choices[0].delta.reasoning === 'string' && json.choices[0].delta.reasoning.length > 0) {
+ for (let j = 0; j < json.choices[0].delta.reasoning.length; j++) {
+ const str = json.choices[0].delta.reasoning[j];
+ const isLastSymbol = j === json.choices[0].delta.reasoning.length - 1;
+ const choiceClone = structuredClone(json.choices[0]);
+ choiceClone.delta.reasoning = str;
+ choiceClone.delta.content = isLastSymbol ? choiceClone.delta.content : '';
+ const choices = [choiceClone];
+ yield {
+ data: { ...json, choices },
+ chunk: str,
+ };
+ }
+ return;
+ }
else if (typeof json.choices[0].delta.content === 'string' && json.choices[0].delta.content.length > 0) {
for (let j = 0; j < json.choices[0].delta.content.length; j++) {
const str = json.choices[0].delta.content[j];
diff --git a/public/scripts/st-context.js b/public/scripts/st-context.js
index a02fd5492..1c7a46199 100644
--- a/public/scripts/st-context.js
+++ b/public/scripts/st-context.js
@@ -1,6 +1,7 @@
import {
activateSendButtons,
addOneMessage,
+ appendMediaToMessage,
callPopup,
characters,
chat,
@@ -41,6 +42,7 @@ import {
substituteParamsExtended,
this_chid,
updateChatMetadata,
+ updateMessageBlock,
} from '../script.js';
import {
extension_settings,
@@ -171,6 +173,8 @@ export function getContext() {
getCharacters,
uuidv4,
humanizedDateTime,
+ updateMessageBlock,
+ appendMediaToMessage,
};
}
diff --git a/public/scripts/textgen-settings.js b/public/scripts/textgen-settings.js
index cd1004991..19b729374 100644
--- a/public/scripts/textgen-settings.js
+++ b/public/scripts/textgen-settings.js
@@ -986,6 +986,7 @@ export async function generateTextGenWithStreaming(generate_data, signal) {
let logprobs = null;
const swipes = [];
const toolCalls = [];
+ const state = {};
while (true) {
const { done, value } = await reader.read();
if (done) return;
@@ -1004,7 +1005,7 @@ export async function generateTextGenWithStreaming(generate_data, signal) {
logprobs = parseTextgenLogprobs(newText, data.choices?.[0]?.logprobs || data?.completion_probabilities);
}
- yield { text, swipes, logprobs, toolCalls };
+ yield { text, swipes, logprobs, toolCalls, state };
}
};
}
diff --git a/public/style.css b/public/style.css
index 95dcd314c..08fe9b68b 100644
--- a/public/style.css
+++ b/public/style.css
@@ -292,36 +292,44 @@ input[type='checkbox']:focus-visible {
filter: grayscale(25%);
}
-.mes_text table {
+.mes_text table,
+.mes_reasoning table {
border-spacing: 0;
border-collapse: collapse;
margin-bottom: 10px;
}
.mes_text td,
-.mes_text th {
+.mes_text th,
+.mes_reasoning td,
+.mes_reasoning th {
border: 1px solid;
border-collapse: collapse;
padding: 0.25em;
}
-.mes_text p {
+.mes_text p,
+.mes_reasoning p {
margin-top: 0;
margin-bottom: 10px;
}
-.mes_text li tt {
+.mes_text li tt,
+.mes_reasoning li tt {
display: inline-block;
}
.mes_text ol,
-.mes_text ul {
+.mes_text ul,
+.mes_reasoning ol,
+.mes_reasoning ul {
margin-top: 5px;
margin-bottom: 5px;
}
.mes_text br,
-.mes_bias br {
+.mes_bias br,
+.mes_reasoning br {
content: ' ';
}
@@ -332,25 +340,83 @@ input[type='checkbox']:focus-visible {
color: var(--SmartThemeQuoteColor);
}
+.mes_reasoning {
+ display: block;
+ border: 1px solid var(--SmartThemeBorderColor);
+ background-color: var(--black30a);
+ border-radius: 5px;
+ padding: 5px;
+ margin: 5px 0;
+ overflow-y: auto;
+}
+
+.mes_reasoning_summary {
+ cursor: pointer;
+ position: relative;
+ margin: 2px;
+}
+
+@supports not selector(:has(*)) {
+ .mes_reasoning_details {
+ display: none !important;
+ }
+}
+
+.mes_bias:empty,
+.mes_reasoning:empty,
+.mes_reasoning_details:has(.mes_reasoning:empty),
+.mes_block:has(.edit_textarea) .mes_reasoning_details,
+.mes_reasoning_details:not([open]) .mes_reasoning_actions,
+.mes_reasoning_details:has(.reasoning_edit_textarea) .mes_reasoning,
+.mes_reasoning_details:not(:has(.reasoning_edit_textarea)) .mes_reasoning_actions .mes_button.mes_reasoning_edit_done,
+.mes_reasoning_details:not(:has(.reasoning_edit_textarea)) .mes_reasoning_actions .mes_button.mes_reasoning_edit_cancel,
+.mes_reasoning_details:has(.reasoning_edit_textarea) .mes_reasoning_actions .mes_button:not(.mes_reasoning_edit_done, .mes_reasoning_edit_cancel) {
+ display: none;
+}
+
+.mes_reasoning_actions {
+ position: absolute;
+ right: 0;
+ top: 0;
+
+ display: flex;
+ gap: 4px;
+ flex-wrap: nowrap;
+ justify-content: flex-end;
+ transition: all 200ms;
+ overflow-x: hidden;
+ padding: 1px;
+}
+
+.mes_reasoning_summary>span {
+ margin-left: 0.5em;
+}
+
.mes_text i,
-.mes_text em {
+.mes_text em,
+.mes_reasoning i,
+.mes_reasoning em {
color: var(--SmartThemeEmColor);
}
-.mes_text u {
+.mes_text u,
+.mes_reasoning u {
color: var(--SmartThemeUnderlineColor);
}
-.mes_text q {
+.mes_text q,
+.mes_reasoning q {
color: var(--SmartThemeQuoteColor);
}
.mes_text font[color] em,
-.mes_text font[color] i {
- color: inherit;
-}
-
-.mes_text font[color] q {
+.mes_text font[color] i,
+.mes_text font[color] u,
+.mes_text font[color] q,
+.mes_reasoning font[color] em,
+.mes_reasoning font[color] i,
+.mes_reasoning font[color] u,
+.mes_reasoning font[color] q {
color: inherit;
}
@@ -358,7 +424,8 @@ input[type='checkbox']:focus-visible {
display: block;
}
-.mes_text blockquote {
+.mes_text blockquote,
+.mes_reasoning blockquote {
border-left: 3px solid var(--SmartThemeQuoteColor);
padding-left: 10px;
background-color: var(--black30a);
@@ -368,18 +435,24 @@ input[type='checkbox']:focus-visible {
.mes_text strong em,
.mes_text strong,
.mes_text h2,
-.mes_text h1 {
+.mes_text h1,
+.mes_reasoning strong em,
+.mes_reasoning strong,
+.mes_reasoning h2,
+.mes_reasoning h1 {
font-weight: bold;
}
-.mes_text pre code {
+.mes_text pre code,
+.mes_reasoning pre code {
position: relative;
display: block;
overflow-x: auto;
padding: 1em;
}
-.mes_text img:not(.mes_img) {
+.mes_text img:not(.mes_img),
+.mes_reasoning img:not(.mes_img) {
max-width: 100%;
max-height: var(--doc-height);
}
@@ -1022,6 +1095,11 @@ body .panelControlBar {
/*only affects bubblechat to make it sit nicely at the bottom*/
}
+.last_mes:has(.mes_text:empty):has(.mes_reasoning_details[open]) .mes_reasoning:not(:empty) {
+ margin-bottom: 30px;
+}
+
+.last_mes .mes_reasoning,
.last_mes .mes_text {
padding-right: 30px;
}
@@ -1235,14 +1313,18 @@ body.swipeAllMessages .mes:not(.last_mes) .swipes-counter {
overflow-y: clip;
}
-.mes_text {
+.mes_text,
+.mes_reasoning {
font-weight: 500;
line-height: calc(var(--mainFontSize) + .5rem);
+ max-width: 100%;
+ overflow-wrap: anywhere;
+}
+
+.mes_text {
padding-left: 0;
padding-top: 5px;
padding-bottom: 5px;
- max-width: 100%;
- overflow-wrap: anywhere;
}
br {
@@ -4150,10 +4232,12 @@ input[type="range"]::-webkit-slider-thumb {
align-items: center;
}
+.mes_reasoning_edit_cancel,
.mes_edit_cancel.menu_button {
background-color: var(--crimson70a);
}
+.mes_reasoning_edit_done,
.mes_edit_done.menu_button {
background-color: var(--okGreen70a);
}
@@ -4162,6 +4246,7 @@ input[type="range"]::-webkit-slider-thumb {
opacity: 1;
}
+.reasoning_edit_textarea,
.edit_textarea {
padding: 5px;
margin: 0;
diff --git a/src/endpoints/backends/chat-completions.js b/src/endpoints/backends/chat-completions.js
index 884a95c65..a0561bbc8 100644
--- a/src/endpoints/backends/chat-completions.js
+++ b/src/endpoints/backends/chat-completions.js
@@ -289,6 +289,7 @@ async function sendMakerSuiteRequest(request, response) {
const model = String(request.body.model);
const stream = Boolean(request.body.stream);
const showThoughts = Boolean(request.body.show_thoughts);
+ const isThinking = model.includes('thinking');
const generationConfig = {
stopSequences: request.body.stop,
@@ -329,6 +330,12 @@ async function sendMakerSuiteRequest(request, response) {
body.systemInstruction = prompt.system_instruction;
}
+ if (isThinking && showThoughts) {
+ generationConfig.thinkingConfig = {
+ includeThoughts: true,
+ };
+ }
+
return body;
}
@@ -342,7 +349,6 @@ async function sendMakerSuiteRequest(request, response) {
controller.abort();
});
- const isThinking = model.includes('thinking');
const apiVersion = isThinking ? 'v1alpha' : 'v1beta';
const responseType = (stream ? 'streamGenerateContent' : 'generateContent');
@@ -387,11 +393,7 @@ async function sendMakerSuiteRequest(request, response) {
const responseContent = candidates[0].content ?? candidates[0].output;
console.log('Google AI Studio response:', responseContent);
- if (Array.isArray(responseContent?.parts) && isThinking && !showThoughts) {
- responseContent.parts = responseContent.parts.filter(part => !part.thought);
- }
-
- const responseText = typeof responseContent === 'string' ? responseContent : responseContent?.parts?.map(part => part.text)?.join('\n\n');
+ const responseText = typeof responseContent === 'string' ? responseContent : responseContent?.parts?.filter(part => !part.thought)?.map(part => part.text)?.join('\n\n');
if (!responseText) {
let message = 'Google AI Studio Candidate text empty';
console.log(message, generateResponseJson);
@@ -399,7 +401,7 @@ async function sendMakerSuiteRequest(request, response) {
}
// Wrap it back to OAI format
- const reply = { choices: [{ 'message': { 'content': responseText } }] };
+ const reply = { choices: [{ 'message': { 'content': responseText } }], responseContent };
return response.send(reply);
}
} catch (error) {
@@ -996,6 +998,10 @@ router.post('/generate', jsonParser, function (request, response) {
bodyParams['route'] = 'fallback';
}
+ if (request.body.show_thoughts) {
+ bodyParams['include_reasoning'] = true;
+ }
+
let cachingAtDepth = getConfigValue('claude.cachingAtDepth', -1);
if (Number.isInteger(cachingAtDepth) && cachingAtDepth >= 0 && request.body.model?.startsWith('anthropic/claude-3')) {
cachingAtDepthForOpenRouterClaude(request.body.messages, cachingAtDepth);