mirror of
https://github.com/SillyTavern/SillyTavern.git
synced 2025-06-05 21:59:27 +02:00
Compare commits
1 Commits
CollapseSa
...
rework-tok
Author | SHA1 | Date | |
---|---|---|---|
|
1d72a02995 |
@@ -244,56 +244,118 @@
|
||||
</div>
|
||||
</div>
|
||||
<div id="common-gen-settings-block" class="width100p">
|
||||
<div id="pro-settings-block" class="flex-container gap10h5v justifyCenter">
|
||||
<div id="amount_gen_block" class="alignitemscenter flex-container marginBot5 flexFlowColumn flexBasis48p flexGrow flexShrink gap0">
|
||||
<small data-i18n="response legth(tokens)">Response (tokens)</small>
|
||||
<input class="neo-range-slider" type="range" id="amount_gen" name="volume" min="16" max="2048" step="1">
|
||||
<div data-randomization-disabled="true" class="wide100p">
|
||||
<input class="neo-range-input" type="number" min="16" max="2048" step="1" data-for="amount_gen" id="amount_gen_counter">
|
||||
<div id="pro-settings-block" class="flex-container flexNoGap justifyCenter">
|
||||
|
||||
<div class="flex-container gap10h5v wide100p marginTopBot5">
|
||||
<div class="flex1">
|
||||
<div id="streaming_textgenerationwebui_block" class="flex-container alignitemscenter justifyCenter">
|
||||
<label class="checkbox_label" for="streaming_textgenerationwebui">
|
||||
<input type="checkbox" id="streaming_textgenerationwebui" />
|
||||
<span>
|
||||
<span data-i18n="Streaming">Streaming</span>
|
||||
<i class="margin5 fa-solid fa-circle-info opacity50p" data-i18n="[title]Streaming_desc" title="Display the response bit by bit as it is generated. When this is off, responses will be displayed all at once when they are complete."></i>
|
||||
</span>
|
||||
</label>
|
||||
</div>
|
||||
<div id="streaming_kobold_block" class="flex-container alignitemscenter">
|
||||
<label class="checkbox_label" for="streaming_kobold">
|
||||
<input type="checkbox" id="streaming_kobold" />
|
||||
<span>
|
||||
<span data-i18n="Streaming">Streaming</span>
|
||||
<i class="margin5 fa-solid fa-circle-info opacity50p" data-i18n="[title]Streaming_desc" title="Display the response bit by bit as it is generated. When this is off, responses will be displayed all at once when they are complete."></i>
|
||||
</span>
|
||||
</label>
|
||||
</div>
|
||||
<div id="streaming_novel_block" class="flex-container alignitemscenter">
|
||||
<label class="checkbox_label" for="streaming_novel">
|
||||
<input type="checkbox" id="streaming_novel" />
|
||||
<span>
|
||||
<span data-i18n="Streaming">Streaming</span>
|
||||
<i class="margin5 fa-solid fa-circle-info opacity50p" data-i18n="[title]Streaming_desc" title="Display the response bit by bit as it is generated. When this is off, responses will be displayed all at once when they are complete."></i>
|
||||
</span>
|
||||
</label>
|
||||
</div>
|
||||
<div id="streaming_openai_block" class="flex-container alignitemscenter">
|
||||
<label class="checkbox_label" for="stream_toggle" >
|
||||
<input id="stream_toggle" type="checkbox" />
|
||||
<span>
|
||||
<span data-i18n="Streaming">Streaming</span>
|
||||
<i class="margin5 fa-solid fa-circle-info opacity50p" data-i18n="[title]Streaming_desc" title="Display the response bit by bit as it is generated. When this is off, responses will be displayed all at once when they are complete."></i>
|
||||
</span>
|
||||
</label>
|
||||
</div>
|
||||
</div>
|
||||
<div id="streaming_textgenerationwebui_block" class="flex-container alignitemscenter justifyCenter marginTop5">
|
||||
<label class="checkbox_label" for="streaming_textgenerationwebui">
|
||||
<input type="checkbox" id="streaming_textgenerationwebui" />
|
||||
<small><span data-i18n="Streaming">Streaming</span>
|
||||
<div class="margin5 fa-solid fa-circle-info opacity50p" data-i18n="[title]Streaming_desc" title="Display the response bit by bit as it is generated. When this is off, responses will be displayed all at once when they are complete."></div>
|
||||
</small>
|
||||
</label>
|
||||
</div>
|
||||
<div id="streaming_kobold_block" class="flex-container alignitemscenter justifyCenter marginTop5">
|
||||
<label class="checkbox_label" for="streaming_kobold">
|
||||
<input type="checkbox" id="streaming_kobold" />
|
||||
<small><span data-i18n="Streaming">Streaming</span>
|
||||
<div class="margin5 fa-solid fa-circle-info opacity50p" data-i18n="[title]Streaming_desc" title="Display the response bit by bit as it is generated. When this is off, responses will be displayed all at once when they are complete."></div>
|
||||
</small>
|
||||
</label>
|
||||
</div>
|
||||
<div id="streaming_novel_block" class="flex-container alignitemscenter justifyCenter marginTop5">
|
||||
<label class="checkbox_label" for="streaming_novel">
|
||||
<input type="checkbox" id="streaming_novel" />
|
||||
<small><span data-i18n="Streaming">Streaming</span>
|
||||
<div class="margin5 fa-solid fa-circle-info opacity50p" data-i18n="[title]Streaming_desc" title="Display the response bit by bit as it is generated. When this is off, responses will be displayed all at once when they are complete."></div>
|
||||
</small>
|
||||
</label>
|
||||
<div class="flex1">
|
||||
<div id="max_context_unlocked_block" class="flex-container alignitemscenter">
|
||||
<label class="checkbox_label" for="max_context_unlocked">
|
||||
<input id="max_context_unlocked" type="checkbox" />
|
||||
<span>
|
||||
<span data-i18n="unlocked">Unlocked</span>
|
||||
<i class="margin5 fa-solid fa-circle-info opacity50p" data-i18n="[title]Only enable this if your model supports context sizes greater than 8192 tokens" title="Only enable this if your model supports context sizes greater than 8192 tokens. Increase only if you know what you're doing."></i>
|
||||
</span>
|
||||
</label>
|
||||
</div>
|
||||
<div id="oai_max_context_unlocked_block" class="flex-container alignitemscenter">
|
||||
<label class="checkbox_label" for="oai_max_context_unlocked">
|
||||
<input id="oai_max_context_unlocked" type="checkbox" />
|
||||
<span>
|
||||
<span data-i18n="unlocked">Unlocked</span>
|
||||
<i class="margin5 fa-solid fa-circle-info opacity50p" data-i18n="[title]Only enable this if your model supports context sizes greater than 8192 tokens" title="Only enable this if your model supports context sizes greater than 8192 tokens. Increase only if you know what you're doing."></i>
|
||||
</span>
|
||||
</label>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<div id="max_context_block" class="alignitemscenter flex-container marginBot5 flexFlowColumn flexBasis48p flexGrow flexShrink gap0">
|
||||
<small data-i18n="context size(tokens)">Context (tokens)</small>
|
||||
|
||||
<div id="max_context_block" class="range-block wide100p">
|
||||
<div class="range-block-title" data-i18n="context size(tokens)">
|
||||
Context (tokens)
|
||||
</div>
|
||||
<input class="neo-range-slider" type="range" id="max_context" name="volume" min="512" max="8192" step="64">
|
||||
<div data-randomization-disabled="true" class="wide100p">
|
||||
<input class="neo-range-input" type="number" min="512" max="8192" step="64" data-for="max_context" id="max_context_counter">
|
||||
</div>
|
||||
<div class="flex-container alignitemscenter justifyCenter marginTop5" id="max_context_unlocked_block">
|
||||
<label class="checkbox_label">
|
||||
<input id="max_context_unlocked" type="checkbox" />
|
||||
<small><span data-i18n="unlocked">Unlocked</span>
|
||||
<div id="max_context_unlocked_warning" class="fa-solid fa-circle-info opacity50p " data-i18n="[title]Only enable this if your model supports context sizes greater than 8192 tokens" title="Only enable this if your model supports context sizes greater than 8192 tokens. Increase only if you know what you're doing."></div>
|
||||
</small>
|
||||
</label>
|
||||
</div>
|
||||
|
||||
<div id="openai_max_context_block" class="range-block wide100p">
|
||||
<div class="range-block-title" data-i18n="context size(tokens)">
|
||||
Context (tokens)
|
||||
</div>
|
||||
<input class="neo-range-slider" type="range" id="openai_max_context" name="volume" min="512" max="4095" step="1">
|
||||
<div data-randomization-disabled="true" class="wide100p">
|
||||
<input class="neo-range-input" type="number" min="512" max="4095" step="1" data-for="openai_max_context" id="openai_max_context_counter">
|
||||
</div>
|
||||
</div>
|
||||
<small data-tg-type="openrouter">
|
||||
<span data-i18n="Max prompt cost:">Max prompt cost:</span> <span id="or_prompt_cost">–</span>
|
||||
</small>
|
||||
|
||||
<div id="amount_gen_block" class="range-block wide100p">
|
||||
<div class="range-block-title" data-i18n="response legth(tokens)">
|
||||
Response (tokens)
|
||||
</div>
|
||||
<div class="wide100p">
|
||||
<input type="number" id="amount_gen" name="amount_gen" class="text_pole" min="1" max="65536">
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div id="max_response_openai" class="range-block wide100p">
|
||||
<div class="range-block-title" data-i18n="response legth(tokens)">
|
||||
Response (tokens)
|
||||
</div>
|
||||
<div class="wide100p">
|
||||
<input type="number" id="openai_max_tokens" name="openai_max_tokens" class="text_pole" min="1" max="65536">
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div id="prompt_cost_block_textgenerationwebui" class="textAlignCenter">
|
||||
<small data-tg-type="openrouter">
|
||||
<span data-i18n="Max prompt cost:">Max prompt cost:</span> <span id="or_prompt_cost">–</span>
|
||||
</small>
|
||||
</div>
|
||||
|
||||
<div id="prompt_cost_block_openai" class="textAlignCenter">
|
||||
<small data-source="openrouter">
|
||||
<span data-i18n="Max prompt cost:">Max prompt cost:</span> <span id="openrouter_max_prompt_cost">Unknown</span>
|
||||
</small>
|
||||
</div>
|
||||
</div>
|
||||
<div id="ai_module_block_novel" class="width100p">
|
||||
<div class="range-block">
|
||||
@@ -311,7 +373,7 @@
|
||||
</select>
|
||||
</div>
|
||||
</div>
|
||||
<hr>
|
||||
<hr class="marginTop10 marginBot10">
|
||||
</div>
|
||||
<div id="respective-ranges-and-temps" class="width100p">
|
||||
<!--
|
||||
@@ -608,41 +670,6 @@
|
||||
</div>
|
||||
</div>
|
||||
<div id="range_block_openai">
|
||||
<div class="range-block">
|
||||
<label class="checkbox_label">
|
||||
<input id="oai_max_context_unlocked" type="checkbox" />
|
||||
<span data-i18n="Unlocked Context Size">
|
||||
Unlocked Context Size
|
||||
</span>
|
||||
</label>
|
||||
<div class="toggle-description justifyLeft">
|
||||
<span data-i18n="Unrestricted maximum value for the context slider">
|
||||
Unrestricted maximum value for the context size slider. Enable only if you know
|
||||
what you're doing.
|
||||
</span>
|
||||
</div>
|
||||
</div>
|
||||
<div class="range-block">
|
||||
<div class="range-block-title" data-i18n="Context Size (tokens)">
|
||||
Context Size (tokens)
|
||||
</div>
|
||||
<div class="range-block-range-and-counter">
|
||||
<div class="range-block-range">
|
||||
<input type="range" id="openai_max_context" name="volume" min="512" max="4095" step="1">
|
||||
</div>
|
||||
<div class="range-block-counter" data-randomization-disabled="true">
|
||||
<input type="number" min="512" max="4095" step="1" data-for="openai_max_context" id="openai_max_context_counter">
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<div class="range-block">
|
||||
<div class="range-block-title" data-i18n="Max Response Length (tokens)">
|
||||
Max Response Length (tokens)
|
||||
</div>
|
||||
<div class="wide100p">
|
||||
<input type="number" id="openai_max_tokens" name="openai_max_tokens" class="text_pole" min="1" max="65536">
|
||||
</div>
|
||||
</div>
|
||||
<div class="range-block" data-source="openai,custom">
|
||||
<div class="range-block-title" data-i18n="Multiple swipes per generation">
|
||||
Multiple swipes per generation
|
||||
@@ -664,24 +691,6 @@
|
||||
</select>
|
||||
</div>
|
||||
</div>
|
||||
<div data-source="openrouter">
|
||||
Max prompt cost: <span id="openrouter_max_prompt_cost">Unknown</span>
|
||||
</div>
|
||||
<hr>
|
||||
<div class="range-block">
|
||||
<label for="stream_toggle" title="Enable OpenAI completion streaming" data-i18n="[title]Enable OpenAI completion streaming" class="checkbox_label widthFreeExpand">
|
||||
<input id="stream_toggle" type="checkbox" /><span data-i18n="Streaming">
|
||||
Streaming</span>
|
||||
</label>
|
||||
<div class="toggle-description justifyLeft">
|
||||
<span data-i18n="Display the response bit by bit as it is generated.">
|
||||
Display the response bit by bit as it is generated.
|
||||
</span><br>
|
||||
<span data-i18n="When this is off, responses will be displayed all at once when they are complete.">
|
||||
When this is off, responses will be displayed all at once when they are complete.
|
||||
</span>
|
||||
</div>
|
||||
</div>
|
||||
<div class="range-block" data-source="openai,claude,windowai,openrouter,ai21,scale,makersuite,mistralai,custom,cohere,perplexity,groq,01ai,nanogpt,deepseek">
|
||||
<div class="range-block-title" data-i18n="Temperature">
|
||||
Temperature
|
||||
|
@@ -86,7 +86,6 @@ import {
|
||||
loadMovingUIState,
|
||||
getCustomStoppingStrings,
|
||||
MAX_CONTEXT_DEFAULT,
|
||||
MAX_RESPONSE_DEFAULT,
|
||||
renderStoryString,
|
||||
sortEntitiesList,
|
||||
registerDebugFunction,
|
||||
@@ -6905,6 +6904,8 @@ export function changeMainAPI() {
|
||||
apiConnector: $('#kobold_horde'),
|
||||
apiPresets: $('#kobold_api-presets'),
|
||||
apiRanges: $('#range_block'),
|
||||
apiUnlock: $('#max_context_unlocked_block'),
|
||||
apiCost: $('#NULL_SELECTOR'),
|
||||
maxContextElem: $('#max_context_block'),
|
||||
amountGenElem: $('#amount_gen_block'),
|
||||
},
|
||||
@@ -6914,6 +6915,8 @@ export function changeMainAPI() {
|
||||
apiConnector: $('#kobold_api'),
|
||||
apiPresets: $('#kobold_api-presets'),
|
||||
apiRanges: $('#range_block'),
|
||||
apiUnlock: $('#max_context_unlocked_block'),
|
||||
apiCost: $('#NULL_SELECTOR'),
|
||||
maxContextElem: $('#max_context_block'),
|
||||
amountGenElem: $('#amount_gen_block'),
|
||||
},
|
||||
@@ -6923,6 +6926,8 @@ export function changeMainAPI() {
|
||||
apiConnector: $('#textgenerationwebui_api'),
|
||||
apiPresets: $('#textgenerationwebui_api-presets'),
|
||||
apiRanges: $('#range_block_textgenerationwebui'),
|
||||
apiUnlock: $('#max_context_unlocked_block'),
|
||||
apiCost: $('#prompt_cost_block_textgenerationwebui'),
|
||||
maxContextElem: $('#max_context_block'),
|
||||
amountGenElem: $('#amount_gen_block'),
|
||||
},
|
||||
@@ -6932,21 +6937,23 @@ export function changeMainAPI() {
|
||||
apiConnector: $('#novel_api'),
|
||||
apiPresets: $('#novel_api-presets'),
|
||||
apiRanges: $('#range_block_novel'),
|
||||
apiUnlock: $('#max_context_unlocked_block'),
|
||||
apiCost: $('#NULL_SELECTOR'),
|
||||
maxContextElem: $('#max_context_block'),
|
||||
amountGenElem: $('#amount_gen_block'),
|
||||
},
|
||||
'openai': {
|
||||
apiStreaming: $('#NULL_SELECTOR'),
|
||||
apiStreaming: $('#streaming_openai_block'),
|
||||
apiSettings: $('#openai_settings'),
|
||||
apiConnector: $('#openai_api'),
|
||||
apiPresets: $('#openai_api-presets'),
|
||||
apiRanges: $('#range_block_openai'),
|
||||
maxContextElem: $('#max_context_block'),
|
||||
amountGenElem: $('#amount_gen_block'),
|
||||
apiUnlock: $('#oai_max_context_unlocked_block'),
|
||||
apiCost: $('#prompt_cost_block_openai'),
|
||||
maxContextElem: $('#openai_max_context_block'),
|
||||
amountGenElem: $('#max_response_openai'),
|
||||
},
|
||||
};
|
||||
//console.log('--- apiElements--- ');
|
||||
//console.log(apiElements);
|
||||
|
||||
//first, disable everything so the old elements stop showing
|
||||
for (const apiName in apiElements) {
|
||||
@@ -6960,6 +6967,10 @@ export function changeMainAPI() {
|
||||
apiObj.apiRanges.css('display', 'none');
|
||||
apiObj.apiPresets.css('display', 'none');
|
||||
apiObj.apiStreaming.css('display', 'none');
|
||||
apiObj.apiUnlock.css('display', 'none');
|
||||
apiObj.apiCost.css('display', 'none');
|
||||
apiObj.maxContextElem.css('display', 'none');
|
||||
apiObj.amountGenElem.css('display', 'none');
|
||||
}
|
||||
|
||||
//then, find and enable the active item.
|
||||
@@ -6971,17 +6982,15 @@ export function changeMainAPI() {
|
||||
activeItem.apiConnector.css('display', 'block');
|
||||
activeItem.apiRanges.css('display', 'block');
|
||||
activeItem.apiPresets.css('display', 'block');
|
||||
activeItem.apiUnlock.css('display', 'block');
|
||||
activeItem.apiCost.css('display', 'block');
|
||||
activeItem.maxContextElem.css('display', 'block');
|
||||
activeItem.amountGenElem.css('display', 'block');
|
||||
|
||||
if (selectedVal === 'openai') {
|
||||
activeItem.apiPresets.css('display', 'flex');
|
||||
}
|
||||
|
||||
if (selectedVal === 'textgenerationwebui' || selectedVal === 'novel') {
|
||||
console.debug('enabling amount_gen for ooba/novel');
|
||||
activeItem.amountGenElem.find('input').prop('disabled', false);
|
||||
activeItem.amountGenElem.css('opacity', 1.0);
|
||||
}
|
||||
|
||||
//custom because streaming has been moved up under response tokens, which exists inside common settings block
|
||||
if (selectedVal === 'novel') {
|
||||
$('#ai_module_block_novel').css('display', 'block');
|
||||
@@ -6989,15 +6998,6 @@ export function changeMainAPI() {
|
||||
$('#ai_module_block_novel').css('display', 'none');
|
||||
}
|
||||
|
||||
// Hide common settings for OpenAI
|
||||
console.debug('value?', selectedVal);
|
||||
if (selectedVal == 'openai') {
|
||||
console.debug('hiding settings?');
|
||||
$('#common-gen-settings-block').css('display', 'none');
|
||||
} else {
|
||||
$('#common-gen-settings-block').css('display', 'block');
|
||||
}
|
||||
|
||||
main_api = selectedVal;
|
||||
setOnlineStatus('no_connection');
|
||||
|
||||
@@ -7317,13 +7317,12 @@ export async function saveSettings(loopCounter = 0) {
|
||||
* @param {{ genamt?: number, max_length?: number }} preset Preset object
|
||||
*/
|
||||
export function setGenerationParamsFromPreset(preset) {
|
||||
const needsUnlock = (preset.max_length ?? max_context) > MAX_CONTEXT_DEFAULT || (preset.genamt ?? amount_gen) > MAX_RESPONSE_DEFAULT;
|
||||
const needsUnlock = (preset.max_length ?? max_context) > MAX_CONTEXT_DEFAULT;
|
||||
$('#max_context_unlocked').prop('checked', needsUnlock).trigger('change');
|
||||
|
||||
if (preset.genamt !== undefined) {
|
||||
amount_gen = preset.genamt;
|
||||
$('#amount_gen').val(amount_gen);
|
||||
$('#amount_gen_counter').val(amount_gen);
|
||||
}
|
||||
|
||||
if (preset.max_length !== undefined) {
|
||||
|
@@ -6,7 +6,7 @@ import {
|
||||
saveSettingsDebounced,
|
||||
setGenerationParamsFromPreset,
|
||||
} from '../script.js';
|
||||
import { MAX_CONTEXT_DEFAULT, MAX_RESPONSE_DEFAULT, power_user } from './power-user.js';
|
||||
import { MAX_CONTEXT_DEFAULT, power_user } from './power-user.js';
|
||||
import { getTextTokens, tokenizers } from './tokenizers.js';
|
||||
import { getEventSourceStream } from './sse-stream.js';
|
||||
import {
|
||||
@@ -166,7 +166,7 @@ export async function loadNovelSubscriptionData() {
|
||||
|
||||
export function loadNovelPreset(preset) {
|
||||
if (preset.genamt === undefined) {
|
||||
const needsUnlock = preset.max_context > MAX_CONTEXT_DEFAULT || preset.max_length > MAX_RESPONSE_DEFAULT;
|
||||
const needsUnlock = preset.max_context > MAX_CONTEXT_DEFAULT;
|
||||
$('#amount_gen').val(preset.max_length).trigger('input');
|
||||
$('#max_context_unlocked').prop('checked', needsUnlock).trigger('change');
|
||||
$('#max_context').val(preset.max_context).trigger('input');
|
||||
|
@@ -69,9 +69,7 @@ export {
|
||||
};
|
||||
|
||||
export const MAX_CONTEXT_DEFAULT = 8192;
|
||||
export const MAX_RESPONSE_DEFAULT = 2048;
|
||||
const MAX_CONTEXT_UNLOCKED = 200 * 1024;
|
||||
const MAX_RESPONSE_UNLOCKED = 16 * 1024;
|
||||
const unlockedMaxContextStep = 512;
|
||||
const maxContextMin = 512;
|
||||
const maxContextStep = 64;
|
||||
@@ -1711,14 +1709,6 @@ function switchMaxContextSize() {
|
||||
}
|
||||
}
|
||||
|
||||
const maxAmountGen = power_user.max_context_unlocked ? MAX_RESPONSE_UNLOCKED : MAX_RESPONSE_DEFAULT;
|
||||
$('#amount_gen').attr('max', maxAmountGen);
|
||||
$('#amount_gen_counter').attr('max', maxAmountGen);
|
||||
|
||||
if (Number($('#amount_gen').val()) >= maxAmountGen) {
|
||||
$('#amount_gen').val(maxAmountGen).trigger('input');
|
||||
}
|
||||
|
||||
if (power_user.enableZenSliders) {
|
||||
$('#max_context_zenslider').remove();
|
||||
CreateZenSliders($('#max_context'));
|
||||
|
Reference in New Issue
Block a user