mirror of
https://github.com/SillyTavern/SillyTavern.git
synced 2025-06-05 21:59:27 +02:00
Sampler priority support
This commit is contained in:
@ -47,6 +47,7 @@
|
|||||||
"ban_eos_token": false,
|
"ban_eos_token": false,
|
||||||
"skip_special_tokens": true,
|
"skip_special_tokens": true,
|
||||||
"streaming": false,
|
"streaming": false,
|
||||||
|
"sampler_priority": "temperature\ndynamic_temperature\nquadratic_sampling\ntop_k\ntop_p\ntypical_p\nepsilon_cutoff\neta_cutoff\ntfs\ntop_a\nmin_p\nmirostat"
|
||||||
"mirostat_mode": 0,
|
"mirostat_mode": 0,
|
||||||
"mirostat_tau": 5,
|
"mirostat_tau": 5,
|
||||||
"mirostat_eta": 0.1,
|
"mirostat_eta": 0.1,
|
||||||
|
@ -1451,6 +1451,16 @@
|
|||||||
<textarea id="banned_tokens_textgenerationwebui" class="text_pole textarea_compact" name="banned_tokens_textgenerationwebui" rows="3" placeholder="Example: some text [42, 69, 1337]"></textarea>
|
<textarea id="banned_tokens_textgenerationwebui" class="text_pole textarea_compact" name="banned_tokens_textgenerationwebui" rows="3" placeholder="Example: some text [42, 69, 1337]"></textarea>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
<div data-newbie-hidden class="wide100p">
|
||||||
|
<hr data-newbie-hidden class="width100p">
|
||||||
|
<h4 class="range-block-title justifyCenter">
|
||||||
|
<span data-i18n="Sampler Priority">Sampler Priority</span>
|
||||||
|
<div class="margin5 fa-solid fa-circle-info opacity50p " title="Ooba only. Determines the order of samplers."></div>
|
||||||
|
</h4>
|
||||||
|
<div class="wide100p">
|
||||||
|
<textarea id="sampler_priority_textgenerationwebui" class="text_pole textarea_compact" name="sampler_priority_textgenerationwebui" rows="3" placeholder="temperature dynamic_temperature quadratic_sampling top_k top_p typical_p epsilon_cutoff eta_cutoff tfs top_a min_p mirostat"></textarea>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
<div class="range-block wide100p">
|
<div class="range-block wide100p">
|
||||||
<div class="range-block-title title_restorable">
|
<div class="range-block-title title_restorable">
|
||||||
<span data-i18n="Logit Bias">Logit Bias</span>
|
<span data-i18n="Logit Bias">Logit Bias</span>
|
||||||
|
@ -96,6 +96,7 @@ const settings = {
|
|||||||
negative_prompt: '',
|
negative_prompt: '',
|
||||||
grammar_string: '',
|
grammar_string: '',
|
||||||
banned_tokens: '',
|
banned_tokens: '',
|
||||||
|
sampler_priority: '',
|
||||||
//n_aphrodite: 1,
|
//n_aphrodite: 1,
|
||||||
//best_of_aphrodite: 1,
|
//best_of_aphrodite: 1,
|
||||||
ignore_eos_token_aphrodite: false,
|
ignore_eos_token_aphrodite: false,
|
||||||
@ -170,6 +171,7 @@ const setting_names = [
|
|||||||
//'log_probs_aphrodite',
|
//'log_probs_aphrodite',
|
||||||
//'prompt_log_probs_aphrodite'
|
//'prompt_log_probs_aphrodite'
|
||||||
'sampler_order',
|
'sampler_order',
|
||||||
|
'sampler_priority',
|
||||||
'n',
|
'n',
|
||||||
'logit_bias',
|
'logit_bias',
|
||||||
'custom_model',
|
'custom_model',
|
||||||
@ -827,6 +829,7 @@ export function getTextGenGenerationData(finalPrompt, maxTokens, isImpersonate,
|
|||||||
'dynatemp_range': settings.dynatemp ? (settings.max_temp - settings.min_temp) / 2 : 0,
|
'dynatemp_range': settings.dynatemp ? (settings.max_temp - settings.min_temp) / 2 : 0,
|
||||||
'dynatemp_exponent': settings.dynatemp ? settings.dynatemp_exponent : 1,
|
'dynatemp_exponent': settings.dynatemp ? settings.dynatemp_exponent : 1,
|
||||||
'smoothing_factor': settings.smoothing_factor,
|
'smoothing_factor': settings.smoothing_factor,
|
||||||
|
'sampler_priority': (settings.type === OOBA || settings.type === APHRODITE || settings.type == TABBY) ? settings.sampler_priority : undefined,
|
||||||
'stopping_strings': getStoppingStrings(isImpersonate, isContinue),
|
'stopping_strings': getStoppingStrings(isImpersonate, isContinue),
|
||||||
'stop': getStoppingStrings(isImpersonate, isContinue),
|
'stop': getStoppingStrings(isImpersonate, isContinue),
|
||||||
'truncation_length': max_context,
|
'truncation_length': max_context,
|
||||||
@ -860,6 +863,7 @@ export function getTextGenGenerationData(finalPrompt, maxTokens, isImpersonate,
|
|||||||
'guidance_scale': cfgValues?.guidanceScale?.value ?? settings.guidance_scale ?? 1,
|
'guidance_scale': cfgValues?.guidanceScale?.value ?? settings.guidance_scale ?? 1,
|
||||||
'negative_prompt': cfgValues?.negativePrompt ?? substituteParams(settings.negative_prompt) ?? '',
|
'negative_prompt': cfgValues?.negativePrompt ?? substituteParams(settings.negative_prompt) ?? '',
|
||||||
'grammar_string': settings.grammar_string,
|
'grammar_string': settings.grammar_string,
|
||||||
|
'sampler_priority': (settings.type === OOBA || settings.type === APHRODITE || settings.type == TABBY) ? settings.sampler_priority : undefined,
|
||||||
// llama.cpp aliases. In case someone wants to use LM Studio as Text Completion API
|
// llama.cpp aliases. In case someone wants to use LM Studio as Text Completion API
|
||||||
'repeat_penalty': settings.rep_pen,
|
'repeat_penalty': settings.rep_pen,
|
||||||
'tfs_z': settings.tfs,
|
'tfs_z': settings.tfs,
|
||||||
|
Reference in New Issue
Block a user