Merge pull request #3017 from Beinsezii/lcpp-dry-fix

llama.cpp Enable dry w/ array convert
This commit is contained in:
Cohee 2024-10-28 10:16:34 +02:00 committed by GitHub
commit 9cc67ae43f
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
2 changed files with 2 additions and 2 deletions

View File

@ -1397,8 +1397,7 @@
</div> </div>
</div> </div>
<!-- Enable for llama.cpp when the PR is merged: https://github.com/ggerganov/llama.cpp/pull/6839 --> <div data-tg-type="ooba, koboldcpp, tabby, llamacpp" id="dryBlock" class="wide100p">
<div data-tg-type="ooba, koboldcpp, tabby" id="dryBlock" class="wide100p">
<h4 class="wide100p textAlignCenter" title="DRY penalizes tokens that would extend the end of the input into a sequence that has previously occurred in the input. Set multiplier to 0 to disable." data-i18n="[title]DRY_Repetition_Penalty_desc"> <h4 class="wide100p textAlignCenter" title="DRY penalizes tokens that would extend the end of the input into a sequence that has previously occurred in the input. Set multiplier to 0 to disable." data-i18n="[title]DRY_Repetition_Penalty_desc">
<label data-i18n="DRY Repetition Penalty">DRY Repetition Penalty</label> <label data-i18n="DRY Repetition Penalty">DRY Repetition Penalty</label>
<a href="https://github.com/oobabooga/text-generation-webui/pull/5677" target="_blank"> <a href="https://github.com/oobabooga/text-generation-webui/pull/5677" target="_blank">

View File

@ -1312,6 +1312,7 @@ export function getTextGenGenerationData(finalPrompt, maxTokens, isImpersonate,
// Conflicts with ooba's grammar_string // Conflicts with ooba's grammar_string
'grammar': settings.grammar_string, 'grammar': settings.grammar_string,
'cache_prompt': true, 'cache_prompt': true,
'dry_sequence_breakers': JSON.parse(params.dry_sequence_breakers),
}; };
params = Object.assign(params, llamaCppParams); params = Object.assign(params, llamaCppParams);
} }