Merge pull request #2295 from bdashore3/new-samplers

Add new tabby Samplers
This commit is contained in:
Cohee
2024-05-22 23:45:14 +03:00
committed by GitHub
3 changed files with 38 additions and 2 deletions

View File

@@ -1224,6 +1224,11 @@
<input class="neo-range-slider" type="range" id="rep_pen_range_textgenerationwebui" name="volume" min="-1" max="8192" step="1">
<input class="neo-range-input" type="number" min="-1" max="8192" step="1" data-for="rep_pen_range_textgenerationwebui" id="rep_pen_range_counter_textgenerationwebui">
</div>
<div data-tg-type="tabby" class="alignitemscenter flex-container flexFlowColumn flexBasis30p flexGrow flexShrink gap0">
<small data-i18n="rep.pen decay">Rep Pen Decay</small>
<input class="neo-range-slider" type="range" id="rep_pen_decay_textgenerationwebui" name="volume" min="-1" max="8192" step="1">
<input class="neo-range-input" type="number" min="-1" max="8192" step="1" data-for="rep_pen_decay_textgenerationwebui" id="rep_pen_decay_counter_textgenerationwebui">
</div>
<div data-tg-type="ooba" data-newbie-hidden class="alignitemscenter flex-container flexFlowColumn flexBasis30p flexGrow flexShrink gap0">
<small data-i18n="Encoder Rep. Pen.">Encoder Penalty</small>
<input class="neo-range-slider" type="range" id="encoder_rep_pen_textgenerationwebui" name="volume" min="0.8" max="1.5" step="0.01" />
@@ -1244,6 +1249,11 @@
<input class="neo-range-slider" type="range" id="no_repeat_ngram_size_textgenerationwebui" name="volume" min="0" max="20" step="1">
<input class="neo-range-input" type="number" min="0" max="20" step="1" data-for="no_repeat_ngram_size_textgenerationwebui" id="no_repeat_ngram_size_counter_textgenerationwebui">
</div>
<div data-newbie-hidden data-tg-type="tabby" class="alignitemscenter flex-container flexFlowColumn flexBasis30p flexGrow flexShrink gap0">
<small data-i18n="Skew">Skew</small>
<input class="neo-range-slider" type="range" id="skew_textgenerationwebui" name="volume" min="-5" max="5" step="0.01" />
<input class="neo-range-input" type="number" min="-5" max="5" step="0.01" data-for="skew_textgenerationwebui" id="skew_counter_textgenerationwebui">
</div>
<div data-newbie-hidden data-tg-type="mancer, ooba, tabby, dreamgen" class="alignitemscenter flex-container flexFlowColumn flexBasis30p flexGrow flexShrink gap0">
<small data-i18n="Min Length">Min Length</small>
<input class="neo-range-slider" type="range" id="min_length_textgenerationwebui" name="volume" min="0" max="2000" step="1" />
@@ -1444,6 +1454,13 @@
<div class="fa-solid fa-circle-info opacity50p " data-i18n="[title]Use the temperature sampler last" title="Use the temperature sampler last. This is almost always the sensible thing to do.&#13;When enabled: sample the set of plausible tokens first, then apply temperature to adjust their relative probabilities (technically, logits).&#13;When disabled: apply temperature to adjust the relative probabilities of ALL tokens first, then sample plausible tokens from that.&#13;Disabling Temperature Last boosts the probabilities in the tail of the distribution, which tends to amplify the chances of getting an incoherent response."></div>
</label>
</label>
<label data-tg-type="tabby" class="checkbox_label flexGrow flexShrink" for="speculative_ngram_textgenerationwebui">
<input type="checkbox" id="speculative_ngram_textgenerationwebui" />
<label>
<small data-i18n="Speculative Ngram">Speculative Ngram</small>
<div class="fa-solid fa-circle-info opacity50p " data-i18n="[title]Use a different speculative decoding method without a draft model" title="Use a different speculative decoding method without a draft model.&#13;Using a draft model is preferred. Speculative ngram is not as effective."></div>
</label>
</label>
<label data-tg-type="vllm, aphrodite" class="checkbox_label" for="spaces_between_special_tokens_textgenerationwebui">
<input type="checkbox" id="spaces_between_special_tokens_textgenerationwebui" />

View File

@@ -679,6 +679,7 @@ async function CreateZenSliders(elmnt) {
sliderID == 'mirostat_mode_kobold' ||
sliderID == 'rep_pen_range' ||
sliderID == 'dry_allowed_length_textgenerationwebui' ||
sliderID == 'rep_pen_decay_textgenerationwebui' ||
sliderID == 'dry_penalty_last_n_textgenerationwebui' ||
sliderID == 'max_tokens_second_textgenerationwebui') {
decimals = 0;
@@ -750,6 +751,7 @@ async function CreateZenSliders(elmnt) {
sliderID == 'rep_pen_slope' ||
sliderID == 'smoothing_factor_textgenerationwebui' ||
sliderID == 'smoothing_curve_textgenerationwebui' ||
sliderID == 'skew_textgenerationwebui' ||
sliderID == 'dry_multiplier_textgenerationwebui' ||
sliderID == 'min_length_textgenerationwebui') {
offVal = 0;
@@ -1768,12 +1770,15 @@ function switchMaxContextSize() {
$('#rep_pen_range_counter_textgenerationwebui'),
$('#dry_penalty_last_n_textgenerationwebui'),
$('#dry_penalty_last_n_counter_textgenerationwebui'),
$('#rep_pen_decay_textgenerationwebui'),
$('#rep_pen_decay_counter_textgenerationwebui'),
];
const maxValue = power_user.max_context_unlocked ? MAX_CONTEXT_UNLOCKED : MAX_CONTEXT_DEFAULT;
const minValue = power_user.max_context_unlocked ? maxContextMin : maxContextMin;
const steps = power_user.max_context_unlocked ? unlockedMaxContextStep : maxContextStep;
$('#rep_pen_range_textgenerationwebui_zenslider').remove(); //unsure why, but this is necessary.
$('#dry_penalty_last_n_textgenerationwebui_zenslider').remove();
$('#rep_pen_decay_textgenerationwebui_zenslider').remove();
for (const element of elements) {
const id = element.attr('id');
element.attr('max', maxValue);
@@ -1804,6 +1809,8 @@ function switchMaxContextSize() {
CreateZenSliders($('#rep_pen_range_textgenerationwebui'));
$('#dry_penalty_last_n_textgenerationwebui_zenslider').remove();
CreateZenSliders($('#dry_penalty_last_n_textgenerationwebui'));
$('#rep_pen_decay_textgenerationwebui_zenslider').remove();
CreateZenSliders($('#rep_pen_decay_textgenerationwebui'));
}
}
@@ -3026,7 +3033,7 @@ $(document).ready(() => {
var coreTruthWinHeight = window.innerHeight;
$(window).on('resize', async () => {
console.log(`Window resize: ${coreTruthWinWidth}x${coreTruthWinHeight} -> ${window.innerWidth}x${window.innerHeight}`)
console.log(`Window resize: ${coreTruthWinWidth}x${coreTruthWinHeight} -> ${window.innerWidth}x${window.innerHeight}`);
adjustAutocompleteDebounced();
setHotswapsDebounced();
@@ -3079,7 +3086,7 @@ $(document).ready(() => {
}
}
} else {
console.log('aborting MUI reset', Object.keys(power_user.movingUIState).length)
console.log('aborting MUI reset', Object.keys(power_user.movingUIState).length);
}
saveSettingsDebounced();
coreTruthWinWidth = window.innerWidth;

View File

@@ -100,6 +100,7 @@ const settings = {
min_p: 0,
rep_pen: 1.2,
rep_pen_range: 0,
rep_pen_decay: 0,
no_repeat_ngram_size: 0,
penalty_alpha: 0,
num_beams: 1,
@@ -108,6 +109,7 @@ const settings = {
encoder_rep_pen: 1,
freq_pen: 0,
presence_pen: 0,
skew: 0,
do_sample: true,
early_stopping: false,
dynatemp: false,
@@ -144,6 +146,7 @@ const settings = {
//best_of_aphrodite: 1,
ignore_eos_token: false,
spaces_between_special_tokens: true,
speculative_ngram: false,
//logits_processors_aphrodite: [],
//log_probs_aphrodite: 0,
//prompt_log_probs_aphrodite: 0,
@@ -176,6 +179,7 @@ export const setting_names = [
'temperature_last',
'rep_pen',
'rep_pen_range',
'rep_pen_decay',
'no_repeat_ngram_size',
'top_k',
'top_p',
@@ -204,6 +208,7 @@ export const setting_names = [
'encoder_rep_pen',
'freq_pen',
'presence_pen',
'skew',
'do_sample',
'early_stopping',
'seed',
@@ -224,6 +229,7 @@ export const setting_names = [
//'best_of_aphrodite',
'ignore_eos_token',
'spaces_between_special_tokens',
'speculative_ngram',
//'logits_processors_aphrodite',
//'log_probs_aphrodite',
//'prompt_log_probs_aphrodite'
@@ -648,6 +654,7 @@ jQuery(function () {
'min_p_textgenerationwebui': 0,
'rep_pen_textgenerationwebui': 1,
'rep_pen_range_textgenerationwebui': 0,
'rep_pen_decay_textgenerationwebui': 0,
'dynatemp_textgenerationwebui': false,
'seed_textgenerationwebui': -1,
'ban_eos_token_textgenerationwebui': false,
@@ -666,7 +673,9 @@ jQuery(function () {
'encoder_rep_pen_textgenerationwebui': 1,
'freq_pen_textgenerationwebui': 0,
'presence_pen_textgenerationwebui': 0,
'skew_textgenerationwebui': 0,
'no_repeat_ngram_size_textgenerationwebui': 0,
'speculative_ngram_textgenerationwebui': false,
'min_length_textgenerationwebui': 0,
'num_beams_textgenerationwebui': 1,
'length_penalty_textgenerationwebui': 1,
@@ -1028,6 +1037,7 @@ export function getTextGenGenerationData(finalPrompt, maxTokens, isImpersonate,
'frequency_penalty': settings.freq_pen,
'presence_penalty': settings.presence_pen,
'top_k': settings.top_k,
'skew': settings.skew,
'min_length': settings.type === OOBA ? settings.min_length : undefined,
'minimum_message_content_tokens': settings.type === DREAMGEN ? settings.min_length : undefined,
'min_tokens': settings.min_length,
@@ -1074,11 +1084,13 @@ export function getTextGenGenerationData(finalPrompt, maxTokens, isImpersonate,
const nonAphroditeParams = {
'rep_pen': settings.rep_pen,
'rep_pen_range': settings.rep_pen_range,
'repetition_decay': settings.type === TABBY ? settings.rep_pen_decay : undefined,
'repetition_penalty_range': settings.rep_pen_range,
'encoder_repetition_penalty': settings.type === OOBA ? settings.encoder_rep_pen : undefined,
'no_repeat_ngram_size': settings.type === OOBA ? settings.no_repeat_ngram_size : undefined,
'penalty_alpha': settings.type === OOBA ? settings.penalty_alpha : undefined,
'temperature_last': (settings.type === OOBA || settings.type === APHRODITE || settings.type == TABBY) ? settings.temperature_last : undefined,
'speculative_ngram': settings.type === TABBY ? settings.speculative_ngram : undefined,
'do_sample': settings.type === OOBA ? settings.do_sample : undefined,
'seed': settings.seed,
'guidance_scale': cfgValues?.guidanceScale?.value ?? settings.guidance_scale ?? 1,