Formatting setting to send reasoning back to the model

Supersedes #3352
This commit is contained in:
Cohee
2025-01-25 22:41:08 +02:00
parent 347a515c25
commit 94ed548353
7 changed files with 150 additions and 10 deletions

View File

@ -1981,7 +1981,7 @@
<label for="openai_show_thoughts" class="checkbox_label widthFreeExpand"> <label for="openai_show_thoughts" class="checkbox_label widthFreeExpand">
<input id="openai_show_thoughts" type="checkbox" /> <input id="openai_show_thoughts" type="checkbox" />
<span> <span>
<span data-i18n="Show model thoughts">Show model thoughts</span> <span data-i18n="Show model reasoning">Show model reasoning</span>
<i class="opacity50p fa-solid fa-circle-info" title="Gemini 2.0 Thinking / DeepSeek Reasoner"></i> <i class="opacity50p fa-solid fa-circle-info" title="Gemini 2.0 Thinking / DeepSeek Reasoner"></i>
</span> </span>
</label> </label>
@ -3799,6 +3799,39 @@
<input id="token_padding" class="text_pole textarea_compact" type="number" min="-2048" max="2048" /> <input id="token_padding" class="text_pole textarea_compact" type="number" min="-2048" max="2048" />
</div> </div>
</div> </div>
<div>
<h4 class="standoutHeader">
<span data-i18n="Reasoning">Reasoning</span>
</h4>
<div>
<label class="checkbox_label" for="reasoning_add_to_prompts" title="Add existing reasoning blocks to prompts. To add a new reasoning block, use the message edit menu." data-i18n="[title]reasoning_add_to_prompts">
<input id="reasoning_add_to_prompts" type="checkbox" />
<small data-i18n="Add Reasoning to Prompts">
Add Reasoning to Prompts
</small>
</label>
<div class="flex-container">
<div class="flex1" title="Inserted before the reasoning content." data-i18n="[title]reasoning_prefix">
<small data-i18n="Prefix">Prefix</small>
<textarea id="reasoning_prefix" class="text_pole textarea_compact autoSetHeight"></textarea>
</div>
<div class="flex1" title="Inserted after the reasoning content." data-i18n="[title]reasoning_suffix">
<small data-i18n="Suffix">Suffix</small>
<textarea id="reasoning_suffix" class="text_pole textarea_compact autoSetHeight"></textarea>
</div>
</div>
<div class="flex-container">
<div class="flex1" title="Inserted between the reasoning and the message content." data-i18n="[title]reasoning_separator">
<small data-i18n="Separator">Separator</small>
<textarea id="reasoning_separator" class="text_pole textarea_compact autoSetHeight"></textarea>
</div>
<div class="flex1" title="Maximum number of reasoning blocks to be added per prompt, counting from the last message." data-i18n="[title]reasoning_max_additions">
<small data-i18n="Max Additions">Max Additions</small>
<input id="reasoning_max_additions" class="text_pole textarea_compact" type="number" min="0" max="999"></textarea>
</div>
</div>
</div>
</div>
<div> <div>
<h4 class="standoutHeader" data-i18n="Miscellaneous">Miscellaneous</h4> <h4 class="standoutHeader" data-i18n="Miscellaneous">Miscellaneous</h4>
<div> <div>
@ -6221,11 +6254,10 @@
<div class="mes_edit_buttons"> <div class="mes_edit_buttons">
<div class="mes_edit_done menu_button fa-solid fa-check" title="Confirm" data-i18n="[title]Confirm"></div> <div class="mes_edit_done menu_button fa-solid fa-check" title="Confirm" data-i18n="[title]Confirm"></div>
<div class="mes_edit_copy menu_button fa-solid fa-copy" title="Copy this message" data-i18n="[title]Copy this message"></div> <div class="mes_edit_copy menu_button fa-solid fa-copy" title="Copy this message" data-i18n="[title]Copy this message"></div>
<div class="mes_edit_delete menu_button fa-solid fa-trash-can" title="Delete this message" data-i18n="[title]Delete this message"> <div class="mes_edit_add_reasoning menu_button fa-solid fa-lightbulb" title="Add a reasoning block" data-i18n="[title]Add a reasoning block"></div>
</div> <div class="mes_edit_delete menu_button fa-solid fa-trash-can" title="Delete this message" data-i18n="[title]Delete this message"></div>
<div class="mes_edit_up menu_button fa-solid fa-chevron-up " title="Move message up" data-i18n="[title]Move message up"></div> <div class="mes_edit_up menu_button fa-solid fa-chevron-up " title="Move message up" data-i18n="[title]Move message up"></div>
<div class="mes_edit_down menu_button fa-solid fa-chevron-down" title="Move message down" data-i18n="[title]Move message down"> <div class="mes_edit_down menu_button fa-solid fa-chevron-down" title="Move message down" data-i18n="[title]Move message down"></div>
</div>
<div class="mes_edit_cancel menu_button fa-solid fa-xmark" title="Cancel" data-i18n="[title]Cancel"></div> <div class="mes_edit_cancel menu_button fa-solid fa-xmark" title="Cancel" data-i18n="[title]Cancel"></div>
</div> </div>
</div> </div>

View File

@ -1385,7 +1385,7 @@
"enable_functions_desc_1": "Autorise l'utilisation", "enable_functions_desc_1": "Autorise l'utilisation",
"enable_functions_desc_2": "outils de fonction", "enable_functions_desc_2": "outils de fonction",
"enable_functions_desc_3": "Peut être utilisé par diverses extensions pour fournir des fonctionnalités supplémentaires.", "enable_functions_desc_3": "Peut être utilisé par diverses extensions pour fournir des fonctionnalités supplémentaires.",
"Show model thoughts": "Afficher les pensées du modèle", "Show model reasoning": "Afficher les pensées du modèle",
"Display the model's internal thoughts in the response.": "Afficher les pensées internes du modèle dans la réponse.", "Display the model's internal thoughts in the response.": "Afficher les pensées internes du modèle dans la réponse.",
"Confirm token parsing with": "Confirmer l'analyse des tokens avec", "Confirm token parsing with": "Confirmer l'analyse des tokens avec",
"openai_logit_bias_no_items": "Aucun élément", "openai_logit_bias_no_items": "Aucun élément",

View File

@ -266,7 +266,7 @@
"Use system prompt": "使用系统提示词", "Use system prompt": "使用系统提示词",
"Merges_all_system_messages_desc_1": "合并所有系统消息,直到第一条具有非系统角色的消息,然后通过", "Merges_all_system_messages_desc_1": "合并所有系统消息,直到第一条具有非系统角色的消息,然后通过",
"Merges_all_system_messages_desc_2": "字段发送。", "Merges_all_system_messages_desc_2": "字段发送。",
"Show model thoughts": "展示思维链", "Show model reasoning": "展示思维链",
"Display the model's internal thoughts in the response.": "展示模型在回复时的内部思维链。", "Display the model's internal thoughts in the response.": "展示模型在回复时的内部思维链。",
"Assistant Prefill": "AI预填", "Assistant Prefill": "AI预填",
"Expand the editor": "展开编辑器", "Expand the editor": "展开编辑器",

View File

@ -2357,7 +2357,7 @@
"Forbid": "禁止", "Forbid": "禁止",
"Aphrodite only. Determines the order of samplers. Skew is always applied post-softmax, so it's not included here.": "僅限 Aphrodite 使用。決定採樣器的順序。偏移總是在 softmax 後應用,因此不包括在此。", "Aphrodite only. Determines the order of samplers. Skew is always applied post-softmax, so it's not included here.": "僅限 Aphrodite 使用。決定採樣器的順序。偏移總是在 softmax 後應用,因此不包括在此。",
"Aphrodite only. Determines the order of samplers.": "僅限 Aphrodite 使用。決定採樣器的順序。", "Aphrodite only. Determines the order of samplers.": "僅限 Aphrodite 使用。決定採樣器的順序。",
"Show model thoughts": "顯示模型思維鏈", "Show model reasoning": "顯示模型思維鏈",
"Display the model's internal thoughts in the response.": "在回應中顯示模型的思維鏈(內部思考過程)。", "Display the model's internal thoughts in the response.": "在回應中顯示模型的思維鏈(內部思考過程)。",
"Generic (OpenAI-compatible) [LM Studio, LiteLLM, etc.]": "通用(兼容 OpenAI[LM Studio, LiteLLM 等]", "Generic (OpenAI-compatible) [LM Studio, LiteLLM, etc.]": "通用(兼容 OpenAI[LM Studio, LiteLLM 等]",
"Model ID (optional)": "模型 ID可選", "Model ID (optional)": "模型 ID可選",

View File

@ -238,7 +238,7 @@ import { getBackgrounds, initBackgrounds, loadBackgroundSettings, background_set
import { hideLoader, showLoader } from './scripts/loader.js'; import { hideLoader, showLoader } from './scripts/loader.js';
import { BulkEditOverlay, CharacterContextMenu } from './scripts/BulkEditOverlay.js'; import { BulkEditOverlay, CharacterContextMenu } from './scripts/BulkEditOverlay.js';
import { loadFeatherlessModels, loadMancerModels, loadOllamaModels, loadTogetherAIModels, loadInfermaticAIModels, loadOpenRouterModels, loadVllmModels, loadAphroditeModels, loadDreamGenModels, initTextGenModels, loadTabbyModels, loadGenericModels } from './scripts/textgen-models.js'; import { loadFeatherlessModels, loadMancerModels, loadOllamaModels, loadTogetherAIModels, loadInfermaticAIModels, loadOpenRouterModels, loadVllmModels, loadAphroditeModels, loadDreamGenModels, initTextGenModels, loadTabbyModels, loadGenericModels } from './scripts/textgen-models.js';
import { appendFileContent, hasPendingFileAttachment, populateFileAttachment, decodeStyleTags, encodeStyleTags, isExternalMediaAllowed, getCurrentEntityId, preserveNeutralChat, restoreNeutralChat } from './scripts/chats.js'; import { appendFileContent, hasPendingFileAttachment, populateFileAttachment, decodeStyleTags, encodeStyleTags, isExternalMediaAllowed, getCurrentEntityId, preserveNeutralChat, restoreNeutralChat, PromptReasoning } from './scripts/chats.js';
import { getPresetManager, initPresetManager } from './scripts/preset-manager.js'; import { getPresetManager, initPresetManager } from './scripts/preset-manager.js';
import { evaluateMacros, getLastMessageId, initMacros } from './scripts/macros.js'; import { evaluateMacros, getLastMessageId, initMacros } from './scripts/macros.js';
import { currentUser, setUserControls } from './scripts/user.js'; import { currentUser, setUserControls } from './scripts/user.js';
@ -3844,6 +3844,11 @@ export async function Generate(type, { automatic_trigger, force_name2, quiet_pro
coreChat.pop(); coreChat.pop();
} }
const reasoning = new PromptReasoning();
for (let i = coreChat.length - 1; i >= 0; i--) {
coreChat[i] = { ...coreChat[i], mes: reasoning.addToMessage(coreChat[i].mes, coreChat[i].extra?.reasoning) };
}
coreChat = await Promise.all(coreChat.map(async (chatItem, index) => { coreChat = await Promise.all(coreChat.map(async (chatItem, index) => {
let message = chatItem.mes; let message = chatItem.mes;
let regexType = chatItem.is_user ? regex_placement.USER_INPUT : regex_placement.AI_OUTPUT; let regexType = chatItem.is_user ? regex_placement.USER_INPUT : regex_placement.AI_OUTPUT;
@ -8034,7 +8039,7 @@ function updateEditArrowClasses() {
} }
} }
function closeMessageEditor() { export function closeMessageEditor() {
if (this_edit_mes_id) { if (this_edit_mes_id) {
$(`#chat .mes[mesid="${this_edit_mes_id}"] .mes_edit_cancel`).click(); $(`#chat .mes[mesid="${this_edit_mes_id}"] .mes_edit_cancel`).click();
} }

View File

@ -24,6 +24,8 @@ import {
updateChatMetadata, updateChatMetadata,
system_message_types, system_message_types,
updateMessageBlock, updateMessageBlock,
closeMessageEditor,
substituteParams,
} from '../script.js'; } from '../script.js';
import { selected_group } from './group-chats.js'; import { selected_group } from './group-chats.js';
import { power_user } from './power-user.js'; import { power_user } from './power-user.js';
@ -1418,6 +1420,47 @@ export function registerFileConverter(mimeType, converter) {
converters[mimeType] = converter; converters[mimeType] = converter;
} }
/**
* Helper class for adding reasoning to messages.
* Keeps track of the number of reasoning additions.
*/
export class PromptReasoning {
static REASONING_PLACEHOLDER = '\u200B';
constructor() {
this.counter = 0;
}
/**
* Add reasoning to a message according to the power user settings.
* @param {string} content Message content
* @param {string} reasoning Message reasoning
* @returns {string} Message content with reasoning
*/
addToMessage(content, reasoning) {
// Disabled or reached limit of additions
if (!power_user.reasoning.add_to_prompts || this.counter >= power_user.reasoning.max_additions) {
return content;
}
// No reasoning provided or a placeholder
if (!reasoning || reasoning === PromptReasoning.REASONING_PLACEHOLDER) {
return content;
}
// Increment the counter
this.counter++;
// Substitute macros in variable parts
const prefix = substituteParams(power_user.reasoning.prefix || '');
const separator = substituteParams(power_user.reasoning.separator || '');
const suffix = substituteParams(power_user.reasoning.suffix || '');
// Combine parts with reasoning and content
return `${prefix}${reasoning}${suffix}${separator}${content}`;
}
}
jQuery(function () { jQuery(function () {
$(document).on('click', '.mes_hide', async function () { $(document).on('click', '.mes_hide', async function () {
const messageBlock = $(this).closest('.mes'); const messageBlock = $(this).closest('.mes');
@ -1574,6 +1617,25 @@ jQuery(function () {
e.preventDefault(); e.preventDefault();
}); });
$(document).on('click', '.mes_edit_add_reasoning', async function () {
const mesBlock = $(this).closest('.mes');
const mesId = Number(mesBlock.attr('mesid'));
const message = chat[mesId];
if (!message?.extra){
return;
}
if (message.extra.reasoning) {
toastr.info(t`Reasoning already exists.`, t`Edit Message`);
return;
}
message.extra.reasoning = PromptReasoning.REASONING_PLACEHOLDER;
await saveChatConditional();
closeMessageEditor();
updateMessageBlock(mesId, message);
});
$(document).on('click', '.mes_reasoning_delete', async function (e) { $(document).on('click', '.mes_reasoning_delete', async function (e) {
e.stopPropagation(); e.stopPropagation();
e.preventDefault(); e.preventDefault();

View File

@ -253,6 +253,14 @@ let power_user = {
content: 'Write {{char}}\'s next reply in a fictional chat between {{char}} and {{user}}.', content: 'Write {{char}}\'s next reply in a fictional chat between {{char}} and {{user}}.',
}, },
reasoning: {
add_to_prompts: false,
prefix: '<think>\n',
suffix: '\n</think>',
separator: '\n',
max_additions: 1,
},
personas: {}, personas: {},
default_persona: null, default_persona: null,
persona_descriptions: {}, persona_descriptions: {},
@ -1613,6 +1621,7 @@ async function loadPowerUserSettings(settings, data) {
loadMovingUIState(); loadMovingUIState();
loadCharListState(); loadCharListState();
toggleMDHotkeyIconDisplay(); toggleMDHotkeyIconDisplay();
loadReasoningSettings();
} }
function toggleMDHotkeyIconDisplay() { function toggleMDHotkeyIconDisplay() {
@ -1629,6 +1638,38 @@ function loadCharListState() {
document.body.classList.toggle('charListGrid', power_user.charListGrid); document.body.classList.toggle('charListGrid', power_user.charListGrid);
} }
function loadReasoningSettings() {
$('#reasoning_add_to_prompts').prop('checked', power_user.reasoning.add_to_prompts);
$('#reasoning_add_to_prompts').on('change', function () {
power_user.reasoning.add_to_prompts = !!$(this).prop('checked');
saveSettingsDebounced();
});
$('#reasoning_prefix').val(power_user.reasoning.prefix);
$('#reasoning_prefix').on('input', function () {
power_user.reasoning.prefix = String($(this).val());
saveSettingsDebounced();
});
$('#reasoning_suffix').val(power_user.reasoning.suffix);
$('#reasoning_suffix').on('input', function () {
power_user.reasoning.suffix = String($(this).val());
saveSettingsDebounced();
});
$('#reasoning_separator').val(power_user.reasoning.separator);
$('#reasoning_separator').on('input', function () {
power_user.reasoning.separator = String($(this).val());
saveSettingsDebounced();
});
$('#reasoning_max_additions').val(power_user.reasoning.max_additions);
$('#reasoning_max_additions').on('input', function () {
power_user.reasoning.max_additions = Number($(this).val());
saveSettingsDebounced();
});
}
function loadMovingUIState() { function loadMovingUIState() {
if (!isMobile() if (!isMobile()
&& power_user.movingUIState && power_user.movingUIState