mirror of
https://github.com/SillyTavern/SillyTavern.git
synced 2025-06-05 21:59:27 +02:00
@ -1975,6 +1975,20 @@
|
||||
</span>
|
||||
</div>
|
||||
</div>
|
||||
<div class="range-block" data-source="makersuite">
|
||||
<label for="openai_show_thoughts" class="checkbox_label widthFreeExpand">
|
||||
<input id="openai_show_thoughts" type="checkbox" />
|
||||
<span>
|
||||
<span data-i18n="Show model thoughts">Show model thoughts</span>
|
||||
<i class="opacity50p fa-solid fa-circle-info" title="Gemini 2.0 Thinking"></i>
|
||||
</span>
|
||||
</label>
|
||||
<div class="toggle-description justifyLeft marginBot5">
|
||||
<span data-i18n="Display the model's internal thoughts in the response.">
|
||||
Display the model's internal thoughts in the response.
|
||||
</span>
|
||||
</div>
|
||||
</div>
|
||||
<div class="range-block" data-source="claude">
|
||||
<div class="wide100p">
|
||||
<div class="flex-container alignItemsCenter">
|
||||
|
@ -295,6 +295,7 @@ const default_settings = {
|
||||
names_behavior: character_names_behavior.DEFAULT,
|
||||
continue_postfix: continue_postfix_types.SPACE,
|
||||
custom_prompt_post_processing: custom_prompt_post_processing_types.NONE,
|
||||
show_thoughts: false,
|
||||
seed: -1,
|
||||
n: 1,
|
||||
};
|
||||
@ -372,6 +373,7 @@ const oai_settings = {
|
||||
names_behavior: character_names_behavior.DEFAULT,
|
||||
continue_postfix: continue_postfix_types.SPACE,
|
||||
custom_prompt_post_processing: custom_prompt_post_processing_types.NONE,
|
||||
show_thoughts: false,
|
||||
seed: -1,
|
||||
n: 1,
|
||||
};
|
||||
@ -1884,6 +1886,7 @@ async function sendOpenAIRequest(type, messages, signal) {
|
||||
'user_name': name1,
|
||||
'char_name': name2,
|
||||
'group_names': getGroupNames(),
|
||||
'show_thoughts': Boolean(oai_settings.show_thoughts),
|
||||
};
|
||||
|
||||
// Empty array will produce a validation error
|
||||
@ -2098,7 +2101,7 @@ function getStreamingReply(data) {
|
||||
if (oai_settings.chat_completion_source === chat_completion_sources.CLAUDE) {
|
||||
return data?.delta?.text || '';
|
||||
} else if (oai_settings.chat_completion_source === chat_completion_sources.MAKERSUITE) {
|
||||
return data?.candidates?.[0]?.content?.parts?.map(x => x.text)?.join('\n\n') || '';
|
||||
return data?.candidates?.[0]?.content?.parts?.filter(x => oai_settings.show_thoughts || !x.thought)?.map(x => x.text)?.filter(x => x)?.join('\n\n') || '';
|
||||
} else if (oai_settings.chat_completion_source === chat_completion_sources.COHERE) {
|
||||
return data?.delta?.message?.content?.text || data?.delta?.message?.tool_plan || '';
|
||||
} else {
|
||||
@ -3056,6 +3059,7 @@ function loadOpenAISettings(data, settings) {
|
||||
oai_settings.image_inlining = settings.image_inlining ?? default_settings.image_inlining;
|
||||
oai_settings.inline_image_quality = settings.inline_image_quality ?? default_settings.inline_image_quality;
|
||||
oai_settings.bypass_status_check = settings.bypass_status_check ?? default_settings.bypass_status_check;
|
||||
oai_settings.show_thoughts = settings.show_thoughts ?? default_settings.show_thoughts;
|
||||
oai_settings.seed = settings.seed ?? default_settings.seed;
|
||||
oai_settings.n = settings.n ?? default_settings.n;
|
||||
|
||||
@ -3181,6 +3185,7 @@ function loadOpenAISettings(data, settings) {
|
||||
$('#repetition_penalty_counter_openai').val(Number(oai_settings.repetition_penalty_openai));
|
||||
$('#seed_openai').val(oai_settings.seed);
|
||||
$('#n_openai').val(oai_settings.n);
|
||||
$('#openai_show_thoughts').prop('checked', oai_settings.show_thoughts);
|
||||
|
||||
if (settings.reverse_proxy !== undefined) oai_settings.reverse_proxy = settings.reverse_proxy;
|
||||
$('#openai_reverse_proxy').val(oai_settings.reverse_proxy);
|
||||
@ -3441,6 +3446,7 @@ async function saveOpenAIPreset(name, settings, triggerUi = true) {
|
||||
continue_prefill: settings.continue_prefill,
|
||||
continue_postfix: settings.continue_postfix,
|
||||
function_calling: settings.function_calling,
|
||||
show_thoughts: settings.show_thoughts,
|
||||
seed: settings.seed,
|
||||
n: settings.n,
|
||||
};
|
||||
@ -3897,6 +3903,7 @@ function onSettingsPresetChange() {
|
||||
continue_prefill: ['#continue_prefill', 'continue_prefill', true],
|
||||
continue_postfix: ['#continue_postfix', 'continue_postfix', false],
|
||||
function_calling: ['#openai_function_calling', 'function_calling', true],
|
||||
show_thoughts: ['#openai_show_thoughts', 'show_thoughts', true],
|
||||
seed: ['#seed_openai', 'seed', false],
|
||||
n: ['#n_openai', 'n', false],
|
||||
};
|
||||
@ -5390,6 +5397,11 @@ export function initOpenAI() {
|
||||
saveSettingsDebounced();
|
||||
});
|
||||
|
||||
$('#openai_show_thoughts').on('input', function () {
|
||||
oai_settings.show_thoughts = !!$(this).prop('checked');
|
||||
saveSettingsDebounced();
|
||||
});
|
||||
|
||||
if (!CSS.supports('field-sizing', 'content')) {
|
||||
$(document).on('input', '#openai_settings .autoSetHeight', function () {
|
||||
resetScrollHeight($(this));
|
||||
|
@ -144,9 +144,14 @@ async function* parseStreamData(json) {
|
||||
for (let j = 0; j < json.candidates[i].content.parts.length; j++) {
|
||||
if (typeof json.candidates[i].content.parts[j].text === 'string') {
|
||||
for (let k = 0; k < json.candidates[i].content.parts[j].text.length; k++) {
|
||||
const str = json.candidates[i].content.parts[j].text[k];
|
||||
const moreThanOnePart = json.candidates[i].content.parts.length > 1;
|
||||
const isNotLastPart = j !== json.candidates[i].content.parts.length - 1;
|
||||
const isLastSymbol = k === json.candidates[i].content.parts[j].text.length - 1;
|
||||
const addNewline = moreThanOnePart && isNotLastPart && isLastSymbol;
|
||||
const str = json.candidates[i].content.parts[j].text[k] + (addNewline ? '\n\n' : '');
|
||||
const candidateClone = structuredClone(json.candidates[0]);
|
||||
candidateClone.content.parts[j].text = str;
|
||||
candidateClone.content.parts = [candidateClone.content.parts[j]];
|
||||
const candidates = [candidateClone];
|
||||
yield {
|
||||
data: { ...json, candidates },
|
||||
|
@ -278,6 +278,7 @@ async function sendMakerSuiteRequest(request, response) {
|
||||
|
||||
const model = String(request.body.model);
|
||||
const stream = Boolean(request.body.stream);
|
||||
const showThoughts = Boolean(request.body.show_thoughts);
|
||||
|
||||
const generationConfig = {
|
||||
stopSequences: request.body.stop,
|
||||
@ -325,7 +326,8 @@ async function sendMakerSuiteRequest(request, response) {
|
||||
controller.abort();
|
||||
});
|
||||
|
||||
const apiVersion = 'v1beta';
|
||||
const isThinking = model.includes('thinking');
|
||||
const apiVersion = isThinking ? 'v1alpha' : 'v1beta';
|
||||
const responseType = (stream ? 'streamGenerateContent' : 'generateContent');
|
||||
|
||||
const generateResponse = await fetch(`${apiUrl.toString().replace(/\/$/, '')}/${apiVersion}/models/${model}:${responseType}?key=${apiKey}${stream ? '&alt=sse' : ''}`, {
|
||||
@ -369,6 +371,10 @@ async function sendMakerSuiteRequest(request, response) {
|
||||
const responseContent = candidates[0].content ?? candidates[0].output;
|
||||
console.log('Google AI Studio response:', responseContent);
|
||||
|
||||
if (Array.isArray(responseContent?.parts) && isThinking && !showThoughts) {
|
||||
responseContent.parts = responseContent.parts.filter(part => !part.thought);
|
||||
}
|
||||
|
||||
const responseText = typeof responseContent === 'string' ? responseContent : responseContent?.parts?.map(part => part.text)?.join('\n\n');
|
||||
if (!responseText) {
|
||||
let message = 'Google AI Studio Candidate text empty';
|
||||
|
Reference in New Issue
Block a user