mirror of
https://github.com/SillyTavern/SillyTavern.git
synced 2025-06-05 21:59:27 +02:00
feature: allow auto-use of max context size given by backend
This commit is contained in:
@ -220,6 +220,10 @@
|
|||||||
<div data-preset-manager-delete="textgenerationwebui" class="margin0 menu_button_icon menu_button" title="Delete the preset" data-i18n="[title]Delete the preset">
|
<div data-preset-manager-delete="textgenerationwebui" class="margin0 menu_button_icon menu_button" title="Delete the preset" data-i18n="[title]Delete the preset">
|
||||||
<i class="fa-fw fa-solid fa-trash-can"></i>
|
<i class="fa-fw fa-solid fa-trash-can"></i>
|
||||||
</div>
|
</div>
|
||||||
|
<label for="context_size_derived" class="checkbox_label flex1" title="Use backend provided context size, when available." data-i18n="[title]context_size_derived">
|
||||||
|
<input id="context_size_derived" type="checkbox" style="display:none;" />
|
||||||
|
<small><i class="fa-solid fa-bolt menu_button margin0"></i></small>
|
||||||
|
</label>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
<div class="flex-container flexNoGap">
|
<div class="flex-container flexNoGap">
|
||||||
|
@ -1238,8 +1238,9 @@ async function getStatusTextgen() {
|
|||||||
|
|
||||||
const wantsInstructDerivation = (power_user.instruct.enabled && power_user.instruct.derived);
|
const wantsInstructDerivation = (power_user.instruct.enabled && power_user.instruct.derived);
|
||||||
const wantsContextDerivation = power_user.context_derived;
|
const wantsContextDerivation = power_user.context_derived;
|
||||||
|
const wantsContextSize = power_user.context_size_derived;
|
||||||
const supportsChatTemplate = [textgen_types.KOBOLDCPP, textgen_types.LLAMACPP].includes(textgen_settings.type);
|
const supportsChatTemplate = [textgen_types.KOBOLDCPP, textgen_types.LLAMACPP].includes(textgen_settings.type);
|
||||||
if (supportsChatTemplate && (wantsInstructDerivation || wantsContextDerivation)) {
|
if (supportsChatTemplate && (wantsInstructDerivation || wantsContextDerivation || wantsContextSize)) {
|
||||||
const response = await fetch('/api/backends/text-completions/props', {
|
const response = await fetch('/api/backends/text-completions/props', {
|
||||||
method: 'POST',
|
method: 'POST',
|
||||||
headers: getRequestHeaders(),
|
headers: getRequestHeaders(),
|
||||||
@ -1253,6 +1254,16 @@ async function getStatusTextgen() {
|
|||||||
const data = await response.json();
|
const data = await response.json();
|
||||||
if (data) {
|
if (data) {
|
||||||
const { chat_template, chat_template_hash } = data;
|
const { chat_template, chat_template_hash } = data;
|
||||||
|
if (wantsContextSize && "default_generation_settings" in data) {
|
||||||
|
const backend_max_context = data["default_generation_settings"]["n_ctx"];
|
||||||
|
if (max_context !== backend_max_context) {
|
||||||
|
console.log(`Auto-switching max context from ${max_context} to ${backend_max_context}`);
|
||||||
|
toastr.info(`Context Size Changed: ${max_context} ⇒ ${backend_max_context}`);
|
||||||
|
max_context = backend_max_context;
|
||||||
|
$('#max_context').val(max_context);
|
||||||
|
$('#max_context_counter').val(max_context);
|
||||||
|
}
|
||||||
|
}
|
||||||
console.log(`We have chat template ${chat_template.split('\n')[0]}...`);
|
console.log(`We have chat template ${chat_template.split('\n')[0]}...`);
|
||||||
const templates = await deriveTemplatesFromChatTemplate(chat_template, chat_template_hash);
|
const templates = await deriveTemplatesFromChatTemplate(chat_template, chat_template_hash);
|
||||||
if (templates) {
|
if (templates) {
|
||||||
|
@ -245,6 +245,7 @@ let power_user = {
|
|||||||
},
|
},
|
||||||
|
|
||||||
context_derived: false,
|
context_derived: false,
|
||||||
|
context_size_derived: false,
|
||||||
|
|
||||||
sysprompt: {
|
sysprompt: {
|
||||||
enabled: true,
|
enabled: true,
|
||||||
@ -1481,6 +1482,7 @@ async function loadPowerUserSettings(settings, data) {
|
|||||||
$('#example_messages_behavior').val(getExampleMessagesBehavior());
|
$('#example_messages_behavior').val(getExampleMessagesBehavior());
|
||||||
$(`#example_messages_behavior option[value="${getExampleMessagesBehavior()}"]`).prop('selected', true);
|
$(`#example_messages_behavior option[value="${getExampleMessagesBehavior()}"]`).prop('selected', true);
|
||||||
$('#context_derived').parent().find('i').toggleClass('toggleEnabled', !!power_user.context_derived);
|
$('#context_derived').parent().find('i').toggleClass('toggleEnabled', !!power_user.context_derived);
|
||||||
|
$('#context_size_derived').parent().find('i').toggleClass('toggleEnabled', !!power_user.context_size_derived);
|
||||||
|
|
||||||
$('#console_log_prompts').prop('checked', power_user.console_log_prompts);
|
$('#console_log_prompts').prop('checked', power_user.console_log_prompts);
|
||||||
$('#request_token_probabilities').prop('checked', power_user.request_token_probabilities);
|
$('#request_token_probabilities').prop('checked', power_user.request_token_probabilities);
|
||||||
@ -3076,6 +3078,16 @@ $(document).ready(() => {
|
|||||||
$('#context_derived').parent().find('i').toggleClass('toggleEnabled', !!power_user.context_derived);
|
$('#context_derived').parent().find('i').toggleClass('toggleEnabled', !!power_user.context_derived);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
$('#context_size_derived').on('input', function () {
|
||||||
|
const value = !!$(this).prop('checked');
|
||||||
|
power_user.context_size_derived = value;
|
||||||
|
saveSettingsDebounced();
|
||||||
|
});
|
||||||
|
|
||||||
|
$('#context_size_derived').on('change', function () {
|
||||||
|
$('#context_size_derived').parent().find('i').toggleClass('toggleEnabled', !!power_user.context_size_derived);
|
||||||
|
});
|
||||||
|
|
||||||
$('#always-force-name2-checkbox').change(function () {
|
$('#always-force-name2-checkbox').change(function () {
|
||||||
power_user.always_force_name2 = !!$(this).prop('checked');
|
power_user.always_force_name2 = !!$(this).prop('checked');
|
||||||
saveSettingsDebounced();
|
saveSettingsDebounced();
|
||||||
|
Reference in New Issue
Block a user