llama.cpp Enable dry w/ array convert

The new PR that was merged needs an array instead of a str

https://github.com/ggerganov/llama.cpp/pull/9702
This commit is contained in:
Beinsezii 2024-10-26 16:07:07 -07:00
parent 5848806e6a
commit ace2902cb8
2 changed files with 2 additions and 2 deletions

View File

@ -1397,8 +1397,7 @@
</div>
</div>
<!-- Enable for llama.cpp when the PR is merged: https://github.com/ggerganov/llama.cpp/pull/6839 -->
<div data-tg-type="ooba, koboldcpp, tabby" id="dryBlock" class="wide100p">
<div data-tg-type="ooba, koboldcpp, tabby, llamacpp" id="dryBlock" class="wide100p">
<h4 class="wide100p textAlignCenter" title="DRY penalizes tokens that would extend the end of the input into a sequence that has previously occurred in the input. Set multiplier to 0 to disable." data-i18n="[title]DRY_Repetition_Penalty_desc">
<label data-i18n="DRY Repetition Penalty">DRY Repetition Penalty</label>
<a href="https://github.com/oobabooga/text-generation-webui/pull/5677" target="_blank">

View File

@ -1312,6 +1312,7 @@ export function getTextGenGenerationData(finalPrompt, maxTokens, isImpersonate,
// Conflicts with ooba's grammar_string
'grammar': settings.grammar_string,
'cache_prompt': true,
'dry_sequence_breakers': JSON.parse(params.dry_sequence_breakers),
};
params = Object.assign(params, llamaCppParams);
}