mirror of
https://github.com/SillyTavern/SillyTavern.git
synced 2025-06-05 21:59:27 +02:00
Text Completion: Add global banned strings list
This commit is contained in:
@@ -1625,6 +1625,15 @@
|
|||||||
<span data-i18n="Banned Tokens">Banned Tokens/Strings</span>
|
<span data-i18n="Banned Tokens">Banned Tokens/Strings</span>
|
||||||
<div class="margin5 fa-solid fa-circle-info opacity50p " data-i18n="[title]LLaMA / Mistral / Yi models only" title="Enter sequences you don't want to appear in the output. Unquoted text will be tokenized in the back end and banned as tokens. [token ids] will be banned as-is. Most tokens have a leading space. Use token counter (with the correct tokenizer selected first!) if you are unsure. Enclose text in double quotes to ban the entire string as a set. Quoted Strings and [Token ids] must be on their own line."></div>
|
<div class="margin5 fa-solid fa-circle-info opacity50p " data-i18n="[title]LLaMA / Mistral / Yi models only" title="Enter sequences you don't want to appear in the output. Unquoted text will be tokenized in the back end and banned as tokens. [token ids] will be banned as-is. Most tokens have a leading space. Use token counter (with the correct tokenizer selected first!) if you are unsure. Enclose text in double quotes to ban the entire string as a set. Quoted Strings and [Token ids] must be on their own line."></div>
|
||||||
</h4>
|
</h4>
|
||||||
|
<div class="textAlignCenter">
|
||||||
|
<small data-i18n="Global list">Global list</small>
|
||||||
|
</div>
|
||||||
|
<div class="wide100p marginBot10">
|
||||||
|
<textarea id="global_banned_tokens_textgenerationwebui" class="text_pole textarea_compact" name="global_banned_tokens_textgenerationwebui" rows="3" data-i18n="[placeholder]Example: some text [42, 69, 1337]" placeholder='some text as tokens [420, 69, 1337] "Some verbatim string"'></textarea>
|
||||||
|
</div>
|
||||||
|
<div class="textAlignCenter">
|
||||||
|
<small data-i18n="Preset-specific list">Preset-specific list</small>
|
||||||
|
</div>
|
||||||
<div class="wide100p">
|
<div class="wide100p">
|
||||||
<textarea id="banned_tokens_textgenerationwebui" class="text_pole textarea_compact" name="banned_tokens_textgenerationwebui" rows="3" data-i18n="[placeholder]Example: some text [42, 69, 1337]" placeholder='some text as tokens [420, 69, 1337] "Some verbatim string"'></textarea>
|
<textarea id="banned_tokens_textgenerationwebui" class="text_pole textarea_compact" name="banned_tokens_textgenerationwebui" rows="3" data-i18n="[placeholder]Example: some text [42, 69, 1337]" placeholder='some text as tokens [420, 69, 1337] "Some verbatim string"'></textarea>
|
||||||
</div>
|
</div>
|
||||||
|
@@ -587,6 +587,7 @@ class PresetManager {
|
|||||||
'derived',
|
'derived',
|
||||||
'generic_model',
|
'generic_model',
|
||||||
'include_reasoning',
|
'include_reasoning',
|
||||||
|
'global_banned_tokens',
|
||||||
];
|
];
|
||||||
const settings = Object.assign({}, getSettingsByApiId(this.apiId));
|
const settings = Object.assign({}, getSettingsByApiId(this.apiId));
|
||||||
|
|
||||||
|
@@ -182,6 +182,7 @@ const settings = {
|
|||||||
grammar_string: '',
|
grammar_string: '',
|
||||||
json_schema: {},
|
json_schema: {},
|
||||||
banned_tokens: '',
|
banned_tokens: '',
|
||||||
|
global_banned_tokens: '',
|
||||||
sampler_priority: OOBA_DEFAULT_ORDER,
|
sampler_priority: OOBA_DEFAULT_ORDER,
|
||||||
samplers: LLAMACPP_DEFAULT_ORDER,
|
samplers: LLAMACPP_DEFAULT_ORDER,
|
||||||
samplers_priorities: APHRODITE_DEFAULT_ORDER,
|
samplers_priorities: APHRODITE_DEFAULT_ORDER,
|
||||||
@@ -274,6 +275,7 @@ export const setting_names = [
|
|||||||
'grammar_string',
|
'grammar_string',
|
||||||
'json_schema',
|
'json_schema',
|
||||||
'banned_tokens',
|
'banned_tokens',
|
||||||
|
'global_banned_tokens',
|
||||||
'ignore_eos_token',
|
'ignore_eos_token',
|
||||||
'spaces_between_special_tokens',
|
'spaces_between_special_tokens',
|
||||||
'speculative_ngram',
|
'speculative_ngram',
|
||||||
@@ -404,8 +406,9 @@ function getCustomTokenBans() {
|
|||||||
const tokenizer = getTokenizerForTokenIds();
|
const tokenizer = getTokenizerForTokenIds();
|
||||||
const banned_tokens = [];
|
const banned_tokens = [];
|
||||||
const banned_strings = [];
|
const banned_strings = [];
|
||||||
const sequences = settings.banned_tokens
|
const sequences = []
|
||||||
.split('\n')
|
.concat(settings.banned_tokens.split('\n'))
|
||||||
|
.concat(settings.global_banned_tokens.split('\n'))
|
||||||
.concat(textgenerationwebui_banned_in_macros)
|
.concat(textgenerationwebui_banned_in_macros)
|
||||||
.filter(x => x.length > 0)
|
.filter(x => x.length > 0)
|
||||||
.filter(onlyUnique);
|
.filter(onlyUnique);
|
||||||
|
Reference in New Issue
Block a user